blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
283
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
41
| license_type
stringclasses 2
values | repo_name
stringlengths 7
96
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 58
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 12.7k
662M
⌀ | star_events_count
int64 0
35.5k
| fork_events_count
int64 0
20.6k
| gha_license_id
stringclasses 11
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 43
values | src_encoding
stringclasses 9
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 7
5.88M
| extension
stringclasses 30
values | content
stringlengths 7
5.88M
| authors
sequencelengths 1
1
| author
stringlengths 0
73
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6eed4f68b4a484742a4e25d761a324ca9159e9c2 | ecd2b21b7bb6a47ffc45757021c6916ece3f7f93 | /K-State/CIS505/Finished-Assingments/Ex2/Ex2-Submit/heapmodule.py | cffaadb3337f3eb5a7842b11fa1bc13954bcd70e | [] | no_license | amgregoi/School | 312bc9449f5584713a475f0e8a64af0805169425 | effb6b6d0667a19437efd15d15a43cf95fda6ac2 | refs/heads/master | 2021-01-17T06:50:39.438326 | 2016-06-23T05:03:40 | 2016-06-23T05:03:40 | 47,589,088 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,336 | py |
### HEAP-STORAGE MODULE
"""The program's heap is a dictionary that maps handles to namespaces.
An object is itself a namespace (dictionary).
heap : { (HANDLE : NAMESPACE)+ }
where HANDLE = a string of digits
NAMESPACE = a dictionary that maps var names to ints:
{ (ID : INT)* }
Example:
heap = { "0": {"x":7, "y":1, "z":2} }
heap_count = 1
is an example heap, where handle "0" names a namespace
whose x field holds int 7, "y" field holds int 1,
and "z" holds int 2.
The above example heap was generated from this sample program:
int y = 1; int x = (6 + y); int z = y;
z = (z + y)
"""
heap = {}
heap_count = 0 # how many objects stored in the heap
ns = [] # This is the handle to the namespace in the heap that holds the
# program's global variables. See initializeHeap below.
### Maintenance functions:
def activeNS():
"""returns the handle of the namespace that holds the currently visible
program variables
"""
global ns
return ns[-1]
def initializeHeap():
"""resets the heap for a new program"""
global heap_count, heap, ns
heap_count = 0
heap = {}
ns.append(allocateNS()) # create namespace in heap for global variables
def printHeap():
"""prints contents of ns and heap"""
print "namespace =", ns
print "heap = {"
global heap
handles = heap.keys()
handles.sort()
for h in handles:
print " ", h, ":", heap[h]
print "}"
def allocateNS() : #allocates new namespace in heap
"""allocates new namespace in heap"""
global heap_count
newloc = "h" + str(heap_count) # generate handle of form, hn, where n is an int
heap[newloc] = {'parentns':'nil'}
heap_count = heap_count + 1
ns.append(newloc)
return newloc
def allocateClosure(ns, rval, il, cl): #allocates closure for procedures
global heap_count
global heap
print cl
print "\n\n\n"
newloc = "h" + str(heap_count)
heap[newloc] = ['proc', il, cl, ns]
declarevar(ns, rval, newloc)
heap_count += 1
def deallocateNS() : #deallocates current namespace
global ns
global heap
global heap_count
if len(ns) != 1:
namespace = ns.pop()
def isLValid(handle, field):
"""checks if (handle, field) is a valid L-value, that is, checks
that heap[handle] is a namespace and field is found in it.
returns True if the above holds true; returns False otherwise.
"""
return (handle in heap) and (field in heap[handle])
def lookupheap(handle): #returns value in heap in position 'handle'
if handle not in heap:
crash("handle not in heap")
return heap[handle]
def storeheap(handle, rval): #stores rval in heap in position 'handle'
if handle not in heap:
crash("handle not in heap")
heap[handle] = rval
def lookup(handle, field) :
"""looks up the value of (handle,field) in the heap
param: handle,field -- such that isLValid(handle, field)
returns: The function extracts the object at heap[handle],
indexes it with field, and returns (heap[handle])[field]
"""
current = handle
while True:
if isLValid(current, field):
return heap[current][field]
if heap[current]['parentns'] == 'nil':
crash("invalid lookup address: " + handle + " " + field)
current = heap[current]['parentns']
def declarevar(handle, field, rval) :
"""creates a new definition in the heap at (handle, field) and initializes
it with rval, provided that heap[handle][field] does not already exist!
(else crashes with a "redeclaration error")
params: handle, field, as described above
rval -- an int or a handle
"""
## WRITE ME
if field in heap[handle].keys(): # method to declare vars and store on heap
crash("redeclaration error")
heap[handle][field] = rval
def store(handle, field, rval) :
"""stores rval at heap[handle][field], provided that
(i) isLValid(handle,field)
(ii) the type of rval matches the type of what's already stored at
heap[handle][field]
(else crashes with a type-error message)
params: handle, field, as described above
rval -- an int or a handle
"""
## REVISE THE FOLLOWING CODE TO MATCH THE ABOVE DOCUMENTATION:
current = handle
global heap
current = handle
while True:
if isLValid(current, field):
if type(heap[current][field]) == type(rval):
heap[current][field] = rval
break; #if heap[c][f] is initialized breaks loop
else:
crash("type-error message")
if heap[current]['parentns'] == 'nil':
crash("field (parent) not declared")
current = heap[current]['parentns']
def crash(message) :
"""prints message and stops execution"""
print "Heap error: ", message, " Crash!"
printHeap()
raise Exception # stops the interpreter
| [
"[email protected]"
] | |
9595472d083f5d6051f654065f02bd5e115c5b03 | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /call_case/point/week.py | 04be7fe9699394beff5a98745c1fa176eb31c35f | [] | no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 287 | py |
#! /usr/bin/env python
def feel_public_man_on_same_number(str_arg):
work_old_person_about_person(str_arg)
print('important_fact')
def work_old_person_about_person(str_arg):
print(str_arg)
if __name__ == '__main__':
feel_public_man_on_same_number('early_way_or_fact')
| [
"[email protected]"
] | |
274d5b7edcf55ff421d4509772cb169cfd2fbe18 | 8a42be3f930d8a215394a96ad2e91c95c3b7ff86 | /Build/Instalation/ProtoBuf/MarathonTests/Marathon 1.1/ProtoBufEditor/TestCases/Basic1/FullEdit_Edit_Menu_3.py | bce361629eac5e19e1052f684c002d4d767bc045 | [] | no_license | java-tools/jrec | 742e741418c987baa4350390d126d74c0d7c4689 | 9ece143cdd52832804eca6f3fb4a1490e2a6f891 | refs/heads/master | 2021-09-27T19:24:11.979955 | 2017-11-18T06:35:31 | 2017-11-18T06:35:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,458 | py | useFixture(default)
def test():
from Modules import commonBits
java_recorded_version = '1.5.0_11'
### Check out ....
if window(commonBits.applicationName()):
select('FileNameTxtFld', commonBits.sampleDir() + 'Ams_LocDownload_20041228.bin')
#commonBits.setRecordLayout(select, 'ams Store')
click('Edit1')
select('LinesTbl', 'rows:[1,2,3,4,5,6,7,8,9],columns:[4|Loc_Name]')
select_menu('Data>>Table View #{Selected Records#}')
## select('Table2', 'rows:[1,2,3,4,5,6,7,8,9],columns:[4|Loc_Name]')
select('LinesTbl', 'rows:[2,3,4],columns:[3|Loc_Type]')
select_menu('Edit>>Cut Record#{s#}')
select('LinesTbl', 'cell:4|Loc_Name,2(Blacktown)')
rightclick('LinesTbl', '4|Loc_Name,2')
select('LinesTbl', 'cell:4|Loc_Name,0(VIC West Ad Support)')
rightclick('LinesTbl', '4|Loc_Name,0')
select_menu('Edit Record')
## select('Table1', 'cell:4|Loc_Name,0(VIC West Ad Support)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,3(VIC West Ad Support)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', 'cell:Data,3(VIC West Ad Support)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,5(Lot 2 Little Boundary Rd)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Content', '[[Brand_Id, 1, , TAR, TAR], [Loc_Nbr, 2, , 5850, 5850], [Loc_Type, 3, , DC, DC], [Loc_Name, 4, , VIC West Ad Support, VIC West Ad Support], [Loc_Addr_Ln1, 5, , , ], [Loc_Addr_Ln2, 6, , Lot 2 Little Boundary Rd, Lot 2 Little Boundary Rd], [Loc_Addr_Ln3, 7, , Laverton, Laverton], [Loc_Postcode, 8, , 3028, 3028], [Loc_State, 9, , VIC, VIC], [Loc_Actv_Ind, 10, , A, A]]')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,5(Lot 2 Little Boundary Rd)')
click('Right')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,3(NSW North Sydney Ad Support)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', 'cell:Data,3(NSW North Sydney Ad Support)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5853)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Content', '[[Brand_Id, 1, , TAR, TAR], [Loc_Nbr, 2, , 5853, 5853], [Loc_Type, 3, , DC, DC], [Loc_Name, 4, , NSW North Sydney Ad Support, NSW North Sydney Ad Support], [Loc_Addr_Ln1, 5, , , ], [Loc_Addr_Ln2, 6, , , ], [Loc_Addr_Ln3, 7, , , ], [Loc_Postcode, 8, , , ], [Loc_State, 9, , , ], [Loc_Actv_Ind, 10, , A, A]]')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5853)')
click('Right')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5033)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', 'cell:Data,1(5033)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,4(Marayong)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Content', '[[Brand_Id, 1, , TAR, TAR], [Loc_Nbr, 2, , 5033, 5033], [Loc_Type, 3, , ST, ST], [Loc_Name, 4, , Blacktown, Blacktown], [Loc_Addr_Ln1, 5, , Marayong, Marayong], [Loc_Addr_Ln2, 6, , Dock 2, 11 Melissa Place, Dock 2, 11 Melissa Place], [Loc_Addr_Ln3, 7, , Marayong, Marayong], [Loc_Postcode, 8, , 2148, 2148], [Loc_State, 9, , NSW, NSW], [Loc_Actv_Ind, 10, , A, A]]')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,4(Marayong)')
click('Right')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,4(Building B, Portside DC)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Content', '[[Brand_Id, 1, , TAR, TAR], [Loc_Nbr, 2, , 5035, 5035], [Loc_Type, 3, , ST, ST], [Loc_Name, 4, , Rockdale, Rockdale], [Loc_Addr_Ln1, 5, , Building B, Portside DC, Building B, Portside DC], [Loc_Addr_Ln2, 6, , 2-8 Mc Pherson Street, 2-8 Mc Pherson Street], [Loc_Addr_Ln3, 7, , Botany, Botany], [Loc_Postcode, 8, , 2019, 2019], [Loc_State, 9, , NSW, NSW], [Loc_Actv_Ind, 10, , A, A]]')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5035)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', '5035', 'Data,1')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5035)')
click('Right')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5037)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', '5037', 'Data,1')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,4(Westfield Shoppingtown)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Content', '[[Brand_Id, 1, , TAR, TAR], [Loc_Nbr, 2, , 5037, 5037], [Loc_Type, 3, , ST, ST], [Loc_Name, 4, , Miranda, Miranda], [Loc_Addr_Ln1, 5, , Westfield Shoppingtown, Westfield Shoppingtown], [Loc_Addr_Ln2, 6, , Cnr. Urunga Pde & The Kingsway, Cnr. Urunga Pde & The Kingsway], [Loc_Addr_Ln3, 7, , Miranda, Miranda], [Loc_Postcode, 8, , 2228, 2228], [Loc_State, 9, , NSW, NSW], [Loc_Actv_Ind, 10, , A, A]]')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,4(Westfield Shoppingtown)')
click('BasicInternalFrameTitlePane$NoFocusButton2')
select('LinesTbl', 'cell:4|Loc_Name,2(Blacktown)')
select('LinesTbl', 'cell:4|Loc_Name,2(Blacktown)')
assert_p('LinesTbl', 'RowCount', '6')
select('LinesTbl', 'cell:4|Loc_Name,4(Miranda)')
select_menu('Edit>>Paste Record#{s#}')
select('LinesTbl', 'cell:4|Loc_Name,4(Miranda)')
rightclick('LinesTbl', '4|Loc_Name,4')
select_menu('Edit Record')
## select('Table1', 'cell:4|Loc_Name,4(Miranda)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5037)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', 'cell:Data,1(5037)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,4(Westfield Shoppingtown)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', 'cell:Data,4(Westfield Shoppingtown)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,4(Westfield Shoppingtown)')
click('Right')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5052)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', '5866', 'Data,1')
### was 5052
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,3(WA Ad Support)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,3(WA Ad Support)')
click('Right')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5015)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', '5015', 'Data,1')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,3(Bankstown)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,3(Bankstown)')
click('Right')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5019)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', 'cell:Data,1(5019)')
### was 5052
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,4(Penrith)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,4(Penrith)')
click('BasicInternalFrameTitlePane$NoFocusButton2')
select('LinesTbl', 'cell:4|Loc_Name,5(WA Ad Support)')
select('LinesTbl', 'cell:4|Loc_Name,5(WA Ad Support)')
assert_p('LinesTbl', 'RowCount', '9')
### was 6
select('LinesTbl', 'cell:4|Loc_Name,6(Bankstown)')
select('LinesTbl', 'rows:[2,3,4],columns:[4|Loc_Name]')
select_menu('Edit>>Delete Record#{s#}')
select('LinesTbl', 'cell:4|Loc_Name,1(NSW North Sydney Ad Support)')
assert_p('LinesTbl', 'RowCount', '6')
select('LinesTbl', 'cell:4|Loc_Name,1(NSW North Sydney Ad Support)')
rightclick('LinesTbl', '4|Loc_Name,1')
select_menu('Edit Record')
## select('Table1', 'cell:4|Loc_Name,1(NSW North Sydney Ad Support)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5853)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', '5853', 'Data,1')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5853)')
click('Right')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5866)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', '5866', 'Data,1')
### 5033
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5866)')
click('Right')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5015)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', '5015', 'Data,1')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,5(Unit 2, 39-41 Allingham Street)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,5(Unit 2, 39-41 Allingham Street)')
click('Right')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5019)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', '5019', 'Data,1')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,4(Penrith)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,4(Penrith)')
click('BasicInternalFrameTitlePane$NoFocusButton2')
select('LinesTbl', 'cell:4|Loc_Name,2(WA Ad Support)')
select('LinesTbl', 'cell:4|Loc_Name,2(WA Ad Support)')
assert_p('LinesTbl', 'RowCount', '6')
select('LinesTbl', 'cell:4|Loc_Name,3(Bankstown)')
select('LinesTbl', 'cell:4|Loc_Name,1(NSW North Sydney Ad Support)')
select_menu('Edit>>Copy Record#{s#}')
select('LinesTbl', 'cell:3|Loc_Type,3(ST)')
select_menu('Edit>>Paste Record#{s#} Prior')
select('LinesTbl', 'cell:2|Loc_Nbr,2(5866)')
rightclick('LinesTbl', '2|Loc_Nbr,2')
select_menu('Edit Record')
## select('Table1', 'cell:2|Loc_Nbr,2(5866)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5866)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', 'DC', 'Data,2')
### was ST
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5866)')
click('Right')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5853)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', '5853', 'Data,1')
### was 5035
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5853)')
click('Right')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5015)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', 'cell:Data,1(5015)')
### was 5037
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,3(Bankstown)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,3(Bankstown)')
click('Right')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5019)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', 'cell:Data,1(5019)')
### was 5019 ???
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5019)')
click('BasicInternalFrameTitlePane$NoFocusButton2')
select('LinesTbl', 'cell:4|Loc_Name,5(Penrith)')
select('LinesTbl', 'cell:4|Loc_Name,5(Penrith)')
select_menu('Edit>>Paste Record#{s#}')
select('LinesTbl', 'cell:4|Loc_Name,5(Penrith)')
rightclick('LinesTbl', '4|Loc_Name,5')
select_menu('Edit Record')
## select('Table1', 'cell:4|Loc_Name,5(Penrith)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5052)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', '5019', 'Data,1')
### was 5052
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5019)')
click('Right')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,1(5052)')
assert_p('BaseLineAsColumn$LineAsColTbl', 'Text', '5853', 'Data,1')
### was 5052
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,2(DC)')
select('BaseLineAsColumn$LineAsColTbl', 'cell:Data,2(DC)')
click('BasicInternalFrameTitlePane$NoFocusButton2')
select('LinesTbl', 'cell:4|Loc_Name,1(NSW North Sydney Ad Support)')
select('LinesTbl', 'rows:[1,2,3,4,5],columns:[4|Loc_Name]')
select_menu('Edit>>Delete Record#{s#}')
select('LinesTbl', 'cell:4|Loc_Name,1(NSW North Sydney Ad Support)')
assert_p('LinesTbl', 'RowCount', '3')
### was 6
select('LinesTbl', 'cell:4|Loc_Name,0(VIC West Ad Support)')
select('LinesTbl', 'cell:4|Loc_Name,0(VIC West Ad Support)')
click('BasicInternalFrameTitlePane$NoFocusButton2')
#click('WindowsInternalFrameTitlePane', 856, 12)
click('BasicInternalFrameTitlePane$NoFocusButton2')
if window('Save Changes to file: ' + commonBits.sampleDir() + 'Ams_LocDownload_20041228.bin'):
click('No')
close()
close()
| [
"bruce_a_martin@b856f413-25aa-4700-8b60-b3441822b2ec"
] | bruce_a_martin@b856f413-25aa-4700-8b60-b3441822b2ec |
0b8a24015738311184532fc1236d333df4fedb66 | 338f2c2243d9bd59ca61bb67b6573fe02d20a82c | /base.py | fb8a1d6705fc0314336401537c3886bc4c45a7e4 | [] | no_license | kakn/gilbert | 33f7d60ac5e986694ee8fa8482a5587ab91d337d | 158f037e1e9c71b573bca90d4a85b7e42880477e | refs/heads/main | 2023-02-05T05:27:34.977679 | 2020-12-28T17:01:17 | 2020-12-28T17:01:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,407 | py | """
░██████╗░██╗██╗░░░░░██████╗░███████╗██████╗░████████╗
██╔════╝░██║██║░░░░░██╔══██╗██╔════╝██╔══██╗╚══██╔══╝
██║░░██╗░██║██║░░░░░██████╦╝█████╗░░██████╔╝░░░██║░░░
██║░░╚██╗██║██║░░░░░██╔══██╗██╔══╝░░██╔══██╗░░░██║░░░
╚██████╔╝██║███████╗██████╦╝███████╗██║░░██║░░░██║░░░
░╚═════╝░╚═╝╚══════╝╚═════╝░╚══════╝╚═╝░░╚═╝░░░╚═╝░░░ v1.1
TODO:
- Unplagarize
- Uncomment
- Find new data
- Change name to Rigby
- Improve loading animations
- Choose different neural network
- If below a certainty, say IDK
- Speech to text
* Implement "listening...." animation
* Try / except statement in listening
- Text to speech
From there, we can train the model on specific input data
Make it flexible for problems by variably optimizing neural network
Make it more general, open to problem solving rather than conversation
"""
# loading animation
import os
import time
import threading
import itertools
import sys
os.system('clear')
def animate(birdword):
for c in ['', '.', '..', '...', '..', '.']:
print('{b}'.format(b=birdword) + c)
time.sleep(0.5)
os.system('clear')
# import packages
def loader():
import nltk
#nltk.download('punkt')
#nltk.download('wordnet')
from nltk.stem import WordNetLemmatizer
lemmatizer = WordNetLemmatizer()
import json
import pickle
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Activation, Dropout
from keras.optimizers import SGD
import random
# t = threading.Thread(name="load", target=loader)
# t.start()
# while t.isAlive():
# animate("packages")
# print("Done!")
# time.sleep(0.5)
# intialize training
print("Loading packages...")
import nltk
#nltk.download('punkt')
#nltk.download('wordnet')
from nltk.stem import WordNetLemmatizer
lemmatizer = WordNetLemmatizer()
import json
import pickle
import pyttsx3
import speech_recognition as sr
import numpy as np
from keras.models import Sequential
from keras.layers import Dense, Activation, Dropout
from keras.optimizers import SGD
import random
import tensorflow as tf
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
#print("Done!")
#time.sleep(0.4)
#os.system('clear')
print("Loading training data...")
words= []
classes = []
documents = []
ignore_words = ['?', '!']
data_file = open('intents.json').read()
intents = json.loads(data_file)
for intent in intents['intents']:
for pattern in intent['patterns']:
# take each word and tokenize it
w = nltk.word_tokenize(pattern)
words.extend(w)
# adding documents
documents.append((w, intent['tag']))
# adding classes to our class list
if intent['tag'] not in classes:
classes.append(intent['tag'])
words = [lemmatizer.lemmatize(w.lower()) for w in words if w not in ignore_words]
# lemmatize means to turn a word into its base meaning, or its lemma
# this is similar to stemming, which reduces an inflected word down to its root form.
words = sorted(list(set(words)))
classes = sorted(list(set(classes)))
# print (len(documents), "documents")
# print (len(classes), "classes", classes)
# print (len(words), "unique lemmatized words", words)
pickle.dump(words,open('words.pkl','wb'))
pickle.dump(classes,open('classes.pkl','wb'))
# building deep learning model
training = []
output_empty = [0] * len(classes)
for doc in documents:
# initializing bag of words
bag = []
# list of tokenized words for the pattern
pattern_words = doc[0]
# lemmatize each word - create base word, in attempt to represent related words
pattern_words = [lemmatizer.lemmatize(word.lower()) for word in pattern_words]
# create our bag of words array with 1, if word match found in current pattern
for w in words:
bag.append(1) if w in pattern_words else bag.append(0)
# output is a '0' for each tag and '1' for current tag (for each pattern)
output_row = list(output_empty)
output_row[classes.index(doc[1])] = 1
training.append([bag, output_row])
# shuffle our features and turn into np.array
random.shuffle(training)
training = np.array(training)
# create train and test lists. X - patterns, Y - intents
train_x = list(training[:,0])
train_y = list(training[:,1])
#print("Done!")
time.sleep(0.4)
#os.system('clear')
# Intitializing Neural network
print("Intializing Neural Network...")
# Create model - 3 layers. First layer 128 neurons, second layer 64 neurons and 3rd output layer contains number of neurons
# equal to number of intents to predict output intent with softmax
model = Sequential()
model.add(Dense(128, input_shape=(len(train_x[0]),), activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(64, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(len(train_y[0]), activation='softmax'))
# Compile model. Stochastic gradient descent with Nesterov accelerated gradient gives good results for this model
sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
model.fit(np.array(train_x), np.array(train_y), epochs=200, batch_size=5, verbose=False)
#print("Done!")
#time.sleep(0.4)
#os.system('clear')
# Preparation for GUI
print("Preparing GUI...")
intents = json.loads(open('intents.json').read())
words = pickle.load(open('words.pkl','rb'))
classes = pickle.load(open('classes.pkl','rb'))
def clean_up_sentence(sentence):
sentence_words = nltk.word_tokenize(sentence)
sentence_words = [lemmatizer.lemmatize(word.lower()) for word in sentence_words]
return sentence_words
# cleans up any sentence inputted
def bow(sentence, words, show_details=True):
# tokenize the pattern
sentence_words = clean_up_sentence(sentence)
# bag of words - matrix of N words, vocabulary matrix
bag = [0]*len(words)
for s in sentence_words:
for i,w in enumerate(words):
if w == s:
# assign 1 if current word is in the vocabulary position
bag[i] = 1
if show_details:
print ("found in bag: %s" % w)
return(np.array(bag))
# takes the sentences that are cleaned up and creates a bag of words that are used for predicting classes
def predict_class(sentence, model):
# filter out predictions below a threshold
p = bow(sentence, words,show_details=False)
res = model.predict(np.array([p]))[0]
ERROR_THRESHOLD = 0.25
results = [[i,r] for i,r in enumerate(res) if r>ERROR_THRESHOLD]
# sort by strength of probability
results.sort(key=lambda x: x[1], reverse=True)
return_list = []
for r in results:
return_list.append({"intent": classes[r[0]], "probability": str(r[1])})
return return_list
# error threshold of 0.25 to avoid too much overfitting.
# This function will output a list of intents and the probabilities, their likelihood of matching the correct intent
def getResponse(ints, intents_json):
tag = ints[0]['intent']
list_of_intents = intents_json['intents']
for i in list_of_intents:
if(i['tag']== tag):
result = random.choice(i['responses'])
break
return result
# takes the list outputted and checks the json file and outputs the most response with the highest probability.
def chatbot_response(msg):
ints = predict_class(msg, model)
res = getResponse(ints, intents)
return res
#print("Done!")
#time.sleep(0.4)
#os.system('clear')
# GUI
def listen():
with sr.Microphone() as source2:
audio2 = r.listen(source2)
r = sr.Recognizer()
r.pause_threshold = 0.5
print("Calibrating microphone (5 seconds)...")
with sr.Microphone() as source2:
r.adjust_for_ambient_noise(source2, duration=5)
#print("Done!")
#time.sleep(0.4)
def gooey():
os.system('clear')
print("""
░██████╗░██╗██╗░░░░░██████╗░███████╗██████╗░████████╗
██╔════╝░██║██║░░░░░██╔══██╗██╔════╝██╔══██╗╚══██╔══╝
██║░░██╗░██║██║░░░░░██████╦╝█████╗░░██████╔╝░░░██║░░░
██║░░╚██╗██║██║░░░░░██╔══██╗██╔══╝░░██╔══██╗░░░██║░░░
╚██████╔╝██║███████╗██████╦╝███████╗██║░░██║░░░██║░░░
░╚═════╝░╚═╝╚══════╝╚═════╝░╚══════╝╚═╝░░╚═╝░░░╚═╝░░░ v1.1
""")
time.sleep(0.5)
print("Built by kakn 27/12/2020. Hit enter to reply to Gilbert \n")
engine = pyttsx3.init()
print("GILBERT: What is your name?")
engine.say("What is your name?")
engine.runAndWait()
enter = input()
if enter == "":
try:
with sr.Microphone() as source2:
#r.adjust_for_ambient_noise(source2, duration=0.2)
audio2 = r.listen(source2)
name = r.recognize_google(audio2)
except sr.UnknownValueError:
engine.say("I'm sorry, but that makes no fucking sense.")
print("GILBERT: I'm sorry, but that makes no fucking sense.")
engine.runAndWait()
gooey()
#print(name)
#time.sleep(0.5)
print("GILBERT: Hello, {n}".format(n=name))
engine.say("Hello, {n}".format(n=name))
engine.runAndWait()
convo = True
while convo == True:
try:
enter = input()
if enter == "":
with sr.Microphone() as source2:
#r.adjust_for_ambient_noise(source2, duration=0.2)
audio2 = r.listen(source2)
question = r.recognize_google(audio2)
print('{n}: '.format(n=name.upper()) + question)
if question == "terminate":
engine.say("No oh oh oh oh oh oh oh oh oh oh oh oh oh oh")
break
engine.runAndWait()
convo = False
answer = chatbot_response(question)
print('GILBERT: ' + answer)
engine.say(answer)
engine.runAndWait()
with open("data_stored.txt", "a") as text_file:
text_file.write("{n}: ".format(n=name.upper()) + question + '\n' + "GILBERT: " + answer + '\n' + '\n')
except sr.UnknownValueError:
print("GILBERT: I don't understand. Speak up, bitch.")
engine.say("I don't understand. Speak up, bitch.")
engine.runAndWait()
# running functions
gooey()
| [
"[email protected]"
] | |
43e7d1bdb613cf561d4e96e96936a5fe6c55911a | 4d99f1e9dea4f18494af0667cf1ea60a0f5c7a78 | /Files/write.py | b60a8857aa0fb13164d1c9cda4a87e3112aa7bc6 | [] | no_license | arshsaluja/Python-s | deaff42e9827105f3096e47b9d6535a0fdf848ff | 0c2621f1be12aae170b5ef07d65b3d752040bc3a | refs/heads/master | 2020-12-02T04:01:00.988895 | 2020-06-14T23:24:19 | 2020-06-14T23:24:19 | 230,881,375 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 117 | py | openfile = open("mymoviescript.txt", "wt")
openfile.write('I wish I had an idea for a movie...\n')
openfile.close()
| [
"[email protected]"
] | |
1cdceab23d7b6534836ca39512729389aa317371 | 3d2252485728a26602643c5029198d2920335071 | /gen_train_data_test_data.py | 8d3ab94da2f26848db83bc568fc843956ab88873 | [
"MIT"
] | permissive | heronimus/bird_species_classification | deeb018b64092ad8f581cdcb73907e5f92089a5f | 46cd36f11604c906c5dcd9e6422bb8c100a6a0f6 | refs/heads/master | 2021-07-12T06:53:28.501292 | 2020-09-02T05:42:04 | 2020-09-02T05:42:04 | 195,226,180 | 0 | 0 | MIT | 2020-09-02T05:42:06 | 2019-07-04T11:06:55 | Python | UTF-8 | Python | false | false | 2,707 | py | import cv2
import numpy as np
from os.path import join
from os import listdir
from keras.utils import np_utils
species = [
"blasti",
"bonegl",
"brhkyt",
"cbrtsh",
"cmnmyn",
"gretit",
"hilpig",
"himbul",
"himgri",
"hsparo",
"indvul",
"jglowl",
"lbicrw",
"mgprob",
"rebimg",
"wcrsrt",
]
datapath = "./"
N_CLASSES = 16 # Number of classes
def gen_data():
"""Generate numpy files for training, validation and
testing.
"""
X_train = []
Y_train = []
X_valid = []
Y_valid = []
X_test = []
Y_test = []
count = 0
for bird_specie in species:
# Samples Location
train_data = join(datapath, "train/" + bird_specie)
val_data = join(datapath, "valid/" + bird_specie)
test_data = join(datapath, "test/" + bird_specie)
# Samples Files
train_files = listdir(train_data)
valid_files = listdir(val_data)
test_files = listdir(test_data)
for img_file in train_files:
im = join(train_data, img_file)
img = cv2.imread(im)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
img = cv2.resize(img, (416, 416))
X_train.append(img)
Y_train += [count]
for img_file in test_files:
im = join(test_data, img_file)
img = cv2.imread(im)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
img = cv2.resize(img, (416, 416))
X_test.append(img)
Y_test += [count]
for img_file in valid_files:
im = join(val_data, img_file)
img = cv2.imread(im)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
img = cv2.resize(img, (416, 416))
X_valid.append(img)
Y_valid += [count]
count += 1
X_train = np.asarray(X_train).astype("float32")
X_train /= 255
Y_train = np.asarray(Y_train)
X_valid = np.asarray(X_valid).astype("float32")
X_valid /= 255
Y_valid = np.asarray(Y_valid)
X_test = np.asarray(X_test).astype("float32")
X_test /= 255
Y_test = np.asarray(Y_test)
return X_train, Y_train, X_valid, Y_valid, X_test, Y_test
if __name__ == "__main__":
x_train, y_train, x_valid, y_valid, x_test, y_test = gen_data()
y_train = np_utils.to_categorical(y_train, N_CLASSES)
y_valid = np_utils.to_categorical(y_train, N_CLASSES)
y_test = np_utils.to_categorical(y_test, N_CLASSES)
np.save("X_train.npy", x_train)
np.save("Y_train.npy", y_train)
np.save("X_valid.npy", x_valid)
np.save("Y_valid.npy", y_valid)
np.save("X_test.npy", x_test)
np.save("Y_test.npy", y_test)
| [
"[email protected]"
] | |
432e27d6105ccfb6588961c2b1b300043b1c27f2 | 193c3cf11fe60dbea5d35dfa831edfadbecd7104 | /LightManager/preferences.py | 378e929effb97a827d195ee063bd070743be3746 | [] | no_license | jordanSautron/LightManager | 04ad4d8ada21124db74e1580cdaa2d09fc9c1f3b | 863c938ef34bc41a912b25bcb81b783bfdb4d117 | refs/heads/master | 2022-09-10T07:59:11.741566 | 2020-04-12T23:05:15 | 2020-04-12T23:05:15 | 254,673,792 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 828 | py | """
This module contains every addon's preferences
"""
import bpy
from bpy.props import EnumProperty
ADDON_NAME = __package__.split('.')[0]
class LightManagerPreferences(bpy.types.AddonPreferences):
bl_idname = ADDON_NAME
ui_mode: EnumProperty(
default='HEADER',
items=[
('HEADER', 'Header', 'Display addon in view 3d header'),
('PANEL', 'Right Panel', 'Display addon in view 3d right panel')
]
)
def draw(self, context):
layout = self.layout
col = layout.column(align=True)
col.label(text='UI display:')
col.row(align=True).prop(
self,
'ui_mode',
text='UI Display:',
expand=True
)
def get_pref():
return bpy.context.preferences.addons[ADDON_NAME].preferences
| [
"[email protected]"
] | |
50a2162bd21b21dae81fac4f1f37e28576c0ecdb | 1c390cd4fd3605046914767485b49a929198b470 | /leetcode/maximum-ascending-subarray-sum.py | 61a33fa7a8a41c75f746313b88415da2a7871dd3 | [] | no_license | wwwwodddd/Zukunft | f87fe736b53506f69ab18db674311dd60de04a43 | 03ffffee9a76e99f6e00bba6dbae91abc6994a34 | refs/heads/master | 2023-01-24T06:14:35.691292 | 2023-01-21T15:42:32 | 2023-01-21T15:42:32 | 163,685,977 | 7 | 8 | null | null | null | null | UTF-8 | Python | false | false | 306 | py | class Solution:
def maxAscendingSum(self, a: List[int]) -> int:
n = len(a)
s = 0
z = 0
for i in range(n):
if i > 0 and a[i] > a[i-1]:
s += a[i]
else:
s = a[i]
z = max(z, s)
return z
| [
"[email protected]"
] | |
f6e08ba178e5acb150ddfe7f0ea4567e32438e47 | cd69f6cd58ed195bc8cce6e6a68bc7636c1c0081 | /src/Case2.py | f2b19e42fd939ab6c49458ed0df64d6ef69efcce | [
"Apache-2.0"
] | permissive | EstudoAAS/linear-algebra-refresher-course | 0441ec9c1c9a6525c37d96cc9506ea7c54aafd2f | cf5ba417e8bbbb8a6683161e4cd7397ebb384c00 | refs/heads/master | 2021-01-19T17:59:15.720415 | 2017-09-01T21:44:16 | 2017-09-01T21:44:16 | 101,103,076 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | py | import vector
def main():
print(vector.Vector([-0.221,7.437]).magnitude())
print(vector.Vector([5.581,-2.136]).normalized())
print(vector.Vector([8.813,-1.331,-6.247]).magnitude())
print(vector.Vector([1.996,3.108,-4.554]).normalized())
main() | [
"[email protected]"
] | |
e57ec5de902759565ff35ff3e79168c5e2bd870e | a8f48e380ed42bc406b8b94255566c3ce854d6df | /rainday/appConfig.py | 79c071a2da87196b6a5d21704b459e468d1f25ba | [] | no_license | yurkee/DayliyProject | b9646cddd8add1872a09cfc60ded0a5c81e2163b | 6eb0928573881dd6250f8bcf28e6e3ef0c336f85 | refs/heads/master | 2023-06-11T14:49:23.114208 | 2021-06-28T09:48:19 | 2021-06-28T09:48:19 | 380,981,561 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 394 | py | # coding:utf-8
"""
@Author : Cong
@Time : 2021/6/21 16:32
"""
configData = {}
with open('app.config', 'r') as file:
datas = file.readlines()
for line in datas:
if line.startswith('#'):
continue
key = line.split("=")[0]
value = line.split("=")[1].replace("\n", '')
configData[key] = value
if __name__ == '__main__':
print(configData) | [
"[email protected]"
] | |
c8b7abd781a2513ddfc4ee2f3794e0963f294ffd | 20e57914367e784560cc7eaa87056099c7ace564 | /plot_MS_collect_data.py | 4d191bf50c36ca0ef489947a53857b46d9cc383c | [] | no_license | zhengang-zhong/distributionally_robust_optimization | cea464dd629e6b90297fa275e37320cb108af37d | a5aa339140fb9adc23a05ec500b32f24f7feb3f3 | refs/heads/main | 2023-07-11T08:26:59.483997 | 2021-08-16T11:01:39 | 2021-08-16T11:01:39 | 330,395,144 | 22 | 4 | null | null | null | null | UTF-8 | Python | false | false | 5,072 | py | import numpy as np
import matplotlib.pyplot as plt
def plot_tex_setting():
plt.rc('text.latex', preamble=r'\usepackage{amsmath}') # für \text{..}
plt.rcParams['text.usetex'] = True
# plt.rcParams['figure.subplot.bottom'] = .265
plt.rcParams['figure.subplot.bottom'] = .265
plt.rcParams['figure.subplot.left'] = .21
plt.rcParams['figure.subplot.top'] = .995
plt.rcParams['figure.subplot.right'] = .98
plt.rcParams['figure.subplot.hspace'] = .5 # vertikaler Abstand
plt.rcParams['figure.subplot.wspace'] = .5 # horizontaler Abstand
plt.rcParams['font.size'] = 14
plt.rcParams['axes.labelsize'] = 13
if __name__ == "__main__":
N_loop = 50
delta_t = 0.1
i_state_ub = 0.4
N_sim = 70
x_tra_list1 = []
file_path = "/Users/zhengangzhong/Dropbox/PhD/documents/paper_writing/CDC2021/result/mass_spring_collect_data/mass_spring_collect_data_x_tra_N_sample=1.txt"
with open(file_path, "r") as f:
i = 0
for line in f:
current_data = line[:-1]
current_data = current_data[1:-1]
current_data = current_data.split(',')
current_data = [float(i) for i in current_data]
x_tra_list1.append(current_data)
f.close()
x_tra1_array = np.array(x_tra_list1)
x_tra1_75_per = np.percentile(x_tra1_array, 75,axis=0)
x_tra1_25_per = np.percentile(x_tra1_array, 25, axis=0)
x_tra1_ave = np.average(np.array(x_tra1_array), axis=0)
x_tra_list3 = []
file_path = "/Users/zhengangzhong/Dropbox/PhD/documents/paper_writing/CDC2021/result/mass_spring_collect_data/mass_spring_collect_data_x_tra_N_sample=3.txt"
with open(file_path, "r") as f:
i = 0
for line in f:
current_data = line[:-1]
current_data = current_data[1:-1]
current_data = current_data.split(',')
current_data = [float(i) for i in current_data]
x_tra_list3.append(current_data)
f.close()
x_tra3_array = np.array(x_tra_list3)
x_tra3_75_per = np.percentile(x_tra3_array, 75,axis=0)
x_tra3_25_per = np.percentile(x_tra3_array, 25, axis=0)
x_tra3_ave = np.average(np.array(x_tra3_array), axis=0)
x_tra_list5 = []
file_path = "/Users/zhengangzhong/Dropbox/PhD/documents/paper_writing/CDC2021/result/mass_spring_collect_data/mass_spring_collect_data_x_tra_N_sample=5.txt"
with open(file_path, "r") as f:
i = 0
for line in f:
current_data = line[:-1]
current_data = current_data[1:-1]
current_data = current_data.split(',')
current_data = [float(i) for i in current_data]
x_tra_list5.append(current_data)
f.close()
x_tra5_array = np.array(x_tra_list5)
x_tra5_75_per = np.percentile(x_tra5_array, 75,axis=0)
x_tra5_25_per = np.percentile(x_tra5_array, 25, axis=0)
x_tra5_ave = np.average(np.array(x_tra5_array), axis=0)
Nt = np.shape(x_tra1_ave[::2])[0]
t_plot = [delta_t * i for i in range(Nt)]
plot_x_list = [x_tra1_ave[::2], x_tra3_ave[::2],x_tra5_ave[::2]]
plot_v_list = [x_tra1_ave[1::2], x_tra3_ave[1::2], x_tra5_ave[1::2]]
plot_tex_setting()
fig = plt.figure(figsize=(9, 6))
spec = fig.add_gridspec(nrows=2, ncols=1, height_ratios=[1, 1])
ax1 = fig.add_subplot(spec[0, 0])
ax2 = fig.add_subplot(spec[1, 0])
legend = []
ax1.plot(t_plot, plot_x_list[0], label=r'{\fontsize{14}{8}\selectfont} $N_{init}=1$')
ax1.plot(t_plot, plot_x_list[1], label=r'{\fontsize{14}{8}\selectfont} $N_{init}=3$')
ax1.plot(t_plot, plot_x_list[2], label=r'{\fontsize{14}{8}\selectfont} $N_{init}=5$')
ax1.set_xlabel(r'{\fontsize{14}{8}\selectfont Time, }$ t [s]$ ')
ax1.set_ylabel(r'{\fontsize{14}{8}\selectfont Displacement, }$ x_1 [m]$ ')
ax1.fill_between(t_plot, x_tra1_25_per[0::2], x_tra1_75_per[0::2],alpha=0.2)
ax1.fill_between(t_plot, x_tra3_25_per[0::2], x_tra3_75_per[0::2],alpha=0.2)
ax1.fill_between(t_plot, x_tra5_25_per[0::2], x_tra5_75_per[0::2],alpha=0.2)
ax1.legend(loc="lower right", bbox_to_anchor=(0.94, 0))
ax1.set_xlim(0, N_sim * delta_t)
ax2.plot(t_plot, plot_v_list[0], label=r'{\fontsize{14}{8}\selectfont} $N_{init}=1$',)
ax2.plot(t_plot, plot_v_list[1], label=r'{\fontsize{14}{8}\selectfont} $N_{init}=3$',)
ax2.plot(t_plot, plot_v_list[2], label=r'{\fontsize{14}{8}\selectfont} $N_{init}=5$',)
ax2.set_xlabel(r'{\fontsize{14}{8}\selectfont Time, }$ t [s]$ ')
ax2.set_ylabel(r'{\fontsize{14}{8}\selectfont Velocity, }$ x_2 [m/s]$ ')
ax2.hlines(i_state_ub, 0, N_sim * delta_t, zorder=10, color="k")
ax2.set_xlim(0, N_sim * delta_t)
ax2.fill_between(t_plot, x_tra1_25_per[1::2], x_tra1_75_per[1::2],alpha=0.2)
ax2.fill_between(t_plot, x_tra3_25_per[1::2], x_tra3_75_per[1::2],alpha=0.2)
ax2.fill_between(t_plot, x_tra5_25_per[1::2], x_tra5_75_per[1::2],alpha=0.2)
fig.savefig("/Users/zhengangzhong/Dropbox/PhD/documents/paper_writing/CDC2021/result/plot9.pdf",
bbox_inches='tight')
plt.show() | [
"[email protected]"
] | |
b62e4aa5f5c4546fdfa04eda9e0a88bd93db0717 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02729/s489492294.py | a74a86a9ade12d70a71e31be302ab59ebafe9346 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 393 | py | import math
def combinations_count(n, r):
return math.factorial(n) // (math.factorial(n - r) * math.factorial(r))
def main():
n, m = map(int, input().split())
if n < 2:
even = 0
else:
even = combinations_count(n, 2)
if m < 2:
odd = 0
else:
odd = combinations_count(m, 2)
print(even + odd)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
73bd7db44b6cc15d7dd9efc06d383f68b910b287 | 0448a662b1f16c1e9fae8c9c382bcfba69a5ac07 | /35.IniciarSesión.py | 292a7a6b2e5375b9761fc43f5e91874ab5b87f2e | [
"CC0-1.0"
] | permissive | JDKdevStudio/Taller30Abril | 4daace5668bd39dbe1662e126ea549ac34f20a62 | fe8094b6669899aae50154a6d380ce583531699a | refs/heads/main | 2023-04-16T17:06:59.953753 | 2021-04-30T15:30:03 | 2021-04-30T15:30:03 | 363,182,869 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | py | while True:
try:
usuario = str(input("Porfavor digite un usuario: "))
contra = str(input("Porfavor digite la contraseña: "))
if usuario == "carlos" and contra == "1234":
print("Bienvenido de nuevo carlos")
break
else:
print("Contraseña erronea intente de nuevo")
except:
print("Ingresa bien los numeros")
| [
"[email protected]"
] | |
28227e4dc68b0652a7a0a9a6a0219f7496c37833 | 4caa087dcb95a6a7dbe8cc49fde383e9f2aa4426 | /mmtrack/models/vid/fgfa.py | 58cd96d42546da1fd9ac8018f98712f781126e98 | [
"Apache-2.0"
] | permissive | open-mmlab/mmtracking | 1e55c69cc1a264b3c9546c19332a38e9621430ed | e79491ec8f0b8c86fda947fbaaa824c66ab2a991 | refs/heads/master | 2023-09-01T15:41:04.322684 | 2023-04-25T13:25:18 | 2023-04-25T13:25:18 | 291,213,368 | 3,263 | 604 | Apache-2.0 | 2023-08-26T04:05:00 | 2020-08-29T06:16:56 | Python | UTF-8 | Python | false | false | 15,851 | py | # Copyright (c) OpenMMLab. All rights reserved.
import warnings
import torch
from addict import Dict
from mmdet.core import bbox2result
from mmdet.models import build_detector
from mmtrack.core import flow_warp_feats
from ..builder import MODELS, build_aggregator, build_motion
from .base import BaseVideoDetector
@MODELS.register_module()
class FGFA(BaseVideoDetector):
"""Flow-Guided Feature Aggregation for Video Object Detection.
This video object detector is the implementation of `FGFA
<https://arxiv.org/abs/1703.10025>`_.
"""
def __init__(self,
detector,
motion,
aggregator,
pretrains=None,
init_cfg=None,
frozen_modules=None,
train_cfg=None,
test_cfg=None):
super(FGFA, self).__init__(init_cfg)
if isinstance(pretrains, dict):
warnings.warn('DeprecationWarning: pretrains is deprecated, '
'please use "init_cfg" instead')
motion_pretrain = pretrains.get('motion', None)
if motion_pretrain:
motion.init_cfg = dict(
type='Pretrained', checkpoint=motion_pretrain)
else:
motion.init_cfg = None
detector_pretrain = pretrains.get('detector', None)
if detector_pretrain:
detector.init_cfg = dict(
type='Pretrained', checkpoint=detector_pretrain)
else:
detector.init_cfg = None
self.detector = build_detector(detector)
self.motion = build_motion(motion)
self.aggregator = build_aggregator(aggregator)
self.train_cfg = train_cfg
self.test_cfg = test_cfg
if frozen_modules is not None:
self.freeze_module(frozen_modules)
def forward_train(self,
img,
img_metas,
gt_bboxes,
gt_labels,
ref_img,
ref_img_metas,
ref_gt_bboxes,
ref_gt_labels,
gt_instance_ids=None,
gt_bboxes_ignore=None,
gt_masks=None,
proposals=None,
ref_gt_instance_ids=None,
ref_gt_bboxes_ignore=None,
ref_gt_masks=None,
ref_proposals=None,
**kwargs):
"""
Args:
img (Tensor): of shape (N, C, H, W) encoding input images.
Typically these should be mean centered and std scaled.
img_metas (list[dict]): list of image info dict where each dict
has: 'img_shape', 'scale_factor', 'flip', and may also contain
'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'.
For details on the values of these keys see
`mmtrack/datasets/pipelines/formatting.py:VideoCollect`.
gt_bboxes (list[Tensor]): Ground truth bboxes for each image with
shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format.
gt_labels (list[Tensor]): class indices corresponding to each box.
ref_img (Tensor): of shape (N, 2, C, H, W) encoding input images.
Typically these should be mean centered and std scaled.
2 denotes there is two reference images for each input image.
ref_img_metas (list[list[dict]]): The first list only has one
element. The second list contains reference image information
dict where each dict has: 'img_shape', 'scale_factor', 'flip',
and may also contain 'filename', 'ori_shape', 'pad_shape', and
'img_norm_cfg'. For details on the values of these keys see
`mmtrack/datasets/pipelines/formatting.py:VideoCollect`.
ref_gt_bboxes (list[Tensor]): The list only has one Tensor. The
Tensor contains ground truth bboxes for each reference image
with shape (num_all_ref_gts, 5) in
[ref_img_id, tl_x, tl_y, br_x, br_y] format. The ref_img_id
start from 0, and denotes the id of reference image for each
key image.
ref_gt_labels (list[Tensor]): The list only has one Tensor. The
Tensor contains class indices corresponding to each reference
box with shape (num_all_ref_gts, 2) in
[ref_img_id, class_indice].
gt_instance_ids (None | list[Tensor]): specify the instance id for
each ground truth bbox.
gt_bboxes_ignore (None | list[Tensor]): specify which bounding
boxes can be ignored when computing the loss.
gt_masks (None | Tensor) : true segmentation masks for each box
used if the architecture supports a segmentation task.
proposals (None | Tensor) : override rpn proposals with custom
proposals. Use when `with_rpn` is False.
ref_gt_instance_ids (None | list[Tensor]): specify the instance id
for each ground truth bboxes of reference images.
ref_gt_bboxes_ignore (None | list[Tensor]): specify which bounding
boxes of reference images can be ignored when computing the
loss.
ref_gt_masks (None | Tensor) : True segmentation masks for each
box of reference image used if the architecture supports a
segmentation task.
ref_proposals (None | Tensor) : override rpn proposals with custom
proposals of reference images. Use when `with_rpn` is False.
Returns:
dict[str, Tensor]: a dictionary of loss components
"""
assert len(img) == 1, \
'fgfa video detectors only support 1 batch size per gpu for now.'
flow_imgs = torch.cat((img, ref_img[:, 0]), dim=1)
for i in range(1, ref_img.shape[1]):
flow_img = torch.cat((img, ref_img[:, i]), dim=1)
flow_imgs = torch.cat((flow_imgs, flow_img), dim=0)
flows = self.motion(flow_imgs, img_metas)
all_imgs = torch.cat((img, ref_img[0]), dim=0)
all_x = self.detector.extract_feat(all_imgs)
x = []
for i in range(len(all_x)):
ref_x_single = flow_warp_feats(all_x[i][1:], flows)
agg_x_single = self.aggregator(all_x[i][[0]], ref_x_single)
x.append(agg_x_single)
losses = dict()
# Two stage detector
if hasattr(self.detector, 'roi_head'):
# RPN forward and loss
if self.detector.with_rpn:
proposal_cfg = self.detector.train_cfg.get(
'rpn_proposal', self.detector.test_cfg.rpn)
rpn_losses, proposal_list = \
self.detector.rpn_head.forward_train(
x,
img_metas,
gt_bboxes,
gt_labels=None,
gt_bboxes_ignore=gt_bboxes_ignore,
proposal_cfg=proposal_cfg)
losses.update(rpn_losses)
else:
proposal_list = proposals
roi_losses = self.detector.roi_head.forward_train(
x, img_metas, proposal_list, gt_bboxes, gt_labels,
gt_bboxes_ignore, gt_masks, **kwargs)
losses.update(roi_losses)
# Single stage detector
elif hasattr(self.detector, 'bbox_head'):
bbox_losses = self.detector.bbox_head.forward_train(
x, img_metas, gt_bboxes, gt_labels, gt_bboxes_ignore)
losses.update(bbox_losses)
else:
raise TypeError('detector must has roi_head or bbox_head.')
return losses
def extract_feats(self, img, img_metas, ref_img, ref_img_metas):
"""Extract features for `img` during testing.
Args:
img (Tensor): of shape (1, C, H, W) encoding input image.
Typically these should be mean centered and std scaled.
img_metas (list[dict]): list of image information dict where each
dict has: 'img_shape', 'scale_factor', 'flip', and may also
contain 'filename', 'ori_shape', 'pad_shape', and
'img_norm_cfg'. For details on the values of these keys see
`mmtrack/datasets/pipelines/formatting.py:VideoCollect`.
ref_img (Tensor | None): of shape (1, N, C, H, W) encoding input
reference images. Typically these should be mean centered and
std scaled. N denotes the number of reference images. There
may be no reference images in some cases.
ref_img_metas (list[list[dict]] | None): The first list only has
one element. The second list contains image information dict
where each dict has: 'img_shape', 'scale_factor', 'flip', and
may also contain 'filename', 'ori_shape', 'pad_shape', and
'img_norm_cfg'. For details on the values of these keys see
`mmtrack/datasets/pipelines/formatting.py:VideoCollect`. There
may be no reference images in some cases.
Returns:
list[Tensor]: Multi level feature maps of `img`.
"""
frame_id = img_metas[0].get('frame_id', -1)
assert frame_id >= 0
num_left_ref_imgs = img_metas[0].get('num_left_ref_imgs', -1)
frame_stride = img_metas[0].get('frame_stride', -1)
# test with adaptive stride
if frame_stride < 1:
if frame_id == 0:
self.memo = Dict()
self.memo.img = ref_img[0]
ref_x = self.detector.extract_feat(ref_img[0])
# 'tuple' object (e.g. the output of FPN) does not support
# item assignment
self.memo.feats = []
for i in range(len(ref_x)):
self.memo.feats.append(ref_x[i])
x = self.detector.extract_feat(img)
# test with fixed stride
else:
if frame_id == 0:
self.memo = Dict()
self.memo.img = ref_img[0]
ref_x = self.detector.extract_feat(ref_img[0])
# 'tuple' object (e.g. the output of FPN) does not support
# item assignment
self.memo.feats = []
# the features of img is same as ref_x[i][[num_left_ref_imgs]]
x = []
for i in range(len(ref_x)):
self.memo.feats.append(ref_x[i])
x.append(ref_x[i][[num_left_ref_imgs]])
elif frame_id % frame_stride == 0:
assert ref_img is not None
x = []
ref_x = self.detector.extract_feat(ref_img[0])
for i in range(len(ref_x)):
self.memo.feats[i] = torch.cat(
(self.memo.feats[i], ref_x[i]), dim=0)[1:]
x.append(self.memo.feats[i][[num_left_ref_imgs]])
self.memo.img = torch.cat((self.memo.img, ref_img[0]),
dim=0)[1:]
else:
assert ref_img is None
x = self.detector.extract_feat(img)
flow_imgs = torch.cat(
(img.repeat(self.memo.img.shape[0], 1, 1, 1), self.memo.img),
dim=1)
flows = self.motion(flow_imgs, img_metas)
agg_x = []
for i in range(len(x)):
agg_x_single = flow_warp_feats(self.memo.feats[i], flows)
if frame_stride < 1:
agg_x_single = torch.cat((x[i], agg_x_single), dim=0)
else:
agg_x_single[num_left_ref_imgs] = x[i]
agg_x_single = self.aggregator(x[i], agg_x_single)
agg_x.append(agg_x_single)
return agg_x
def simple_test(self,
img,
img_metas,
ref_img=None,
ref_img_metas=None,
proposals=None,
rescale=False):
"""Test without augmentation.
Args:
img (Tensor): of shape (1, C, H, W) encoding input image.
Typically these should be mean centered and std scaled.
img_metas (list[dict]): list of image information dict where each
dict has: 'img_shape', 'scale_factor', 'flip', and may also
contain 'filename', 'ori_shape', 'pad_shape', and
'img_norm_cfg'. For details on the values of these keys see
`mmtrack/datasets/pipelines/formatting.py:VideoCollect`.
ref_img (list[Tensor] | None): The list only contains one Tensor
of shape (1, N, C, H, W) encoding input reference images.
Typically these should be mean centered and std scaled. N
denotes the number for reference images. There may be no
reference images in some cases.
ref_img_metas (list[list[list[dict]]] | None): The first and
second list only has one element. The third list contains
image information dict where each dict has: 'img_shape',
'scale_factor', 'flip', and may also contain 'filename',
'ori_shape', 'pad_shape', and 'img_norm_cfg'. For details on
the values of these keys see
`mmtrack/datasets/pipelines/formatting.py:VideoCollect`. There
may be no reference images in some cases.
proposals (None | Tensor): Override rpn proposals with custom
proposals. Use when `with_rpn` is False. Defaults to None.
rescale (bool): If False, then returned bboxes and masks will fit
the scale of img, otherwise, returned bboxes and masks
will fit the scale of original image shape. Defaults to False.
Returns:
dict[str : list(ndarray)]: The detection results.
"""
if ref_img is not None:
ref_img = ref_img[0]
if ref_img_metas is not None:
ref_img_metas = ref_img_metas[0]
x = self.extract_feats(img, img_metas, ref_img, ref_img_metas)
# Two stage detector
if hasattr(self.detector, 'roi_head'):
if proposals is None:
proposal_list = self.detector.rpn_head.simple_test_rpn(
x, img_metas)
else:
proposal_list = proposals
outs = self.detector.roi_head.simple_test(
x, proposal_list, img_metas, rescale=rescale)
# Single stage detector
elif hasattr(self.detector, 'bbox_head'):
outs = self.bbox_head(x)
bbox_list = self.bbox_head.get_bboxes(
*outs, img_metas, rescale=rescale)
# skip post-processing when exporting to ONNX
if torch.onnx.is_in_onnx_export():
return bbox_list
outs = [
bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes)
for det_bboxes, det_labels in bbox_list
]
else:
raise TypeError('detector must has roi_head or bbox_head.')
results = dict()
results['det_bboxes'] = outs[0]
if len(outs) == 2:
results['det_masks'] = outs[1]
return results
def aug_test(self, imgs, img_metas, **kwargs):
"""Test function with test time augmentation."""
raise NotImplementedError
| [
"[email protected]"
] | |
625dda6943acbebfb71c78a141b1df9c79d5f2b3 | 39e3091637c76d28c23d55499d1d4faa641a6c38 | /python/fe4.py | 6d7dc964f2ddc82330fca75f19e8aa8a4c8e52d7 | [
"MIT"
] | permissive | hyolin97/bigdata_web | 146bdcee609cc6c9c05188d4c02673b6b32d5fa3 | bcbce9e4d9fd3996f7e3c834596167c41741f350 | refs/heads/master | 2021-06-28T01:52:13.830050 | 2019-03-31T13:56:16 | 2019-03-31T13:56:16 | 119,010,114 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 563 | py | select,answer,numStr,num1,num2=0,0,"",0,0
select=int(input("1.수식 계산기 2. 두수 아이 합계:"))
if select==1:
numStr=input("***수식을 입력하세요:")
answer=eval(numStr)
print("%s 결과는 %5.1f입니다"%(numStr,answer))
elif select==2:
num1=int(input("***첫번째숫자를 입력하시오:"))
num2=int(input("***두번째숫자를 입력하시오:"))
for i in range(num1,num2+1):
answer=answer+i
print("%d+....+%d는 %d입니다."%(num1,num2,answer))
else:
print("1또는 2만 입력해야합니다.")
| [
"[email protected]"
] | |
ee140f97e2454be1575f79772d7548dcb581ff0a | 7dfe15213459ed945c4dd909e239045e74463b0a | /utils/transferPointsUv.py | eddf66ee1bbc331a027f3168cf385a7a947cad7d | [] | no_license | jonntd/mayadev-2 | 5314a088665313d851175d5cc30b856ae120070b | 1bd54f143acb7ecc1e99fa0a4fb4c3db63fa248d | refs/heads/master | 2023-03-17T21:09:21.114253 | 2020-12-23T23:23:24 | 2020-12-23T23:23:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,132 | py | import maya.api.OpenMaya as om
driver = 'pSphereShape2'
driven = 'pSphereShape1'
mSel = om.MSelectionList()
mSel.add(driver)
mSel.add(driven)
driverDag = mSel.getDagPath(0)
drivenDag = mSel.getDagPath(1)
driverMFnMesh = om.MFnMesh(driverDag)
driverVtxMIt = om.MItMeshVertex(driverDag)
drivenMFnMesh = om.MFnMesh(drivenDag)
drivenVtxMIt = om.MItMeshVertex(drivenDag)
driverUvs = driverMFnMesh.getUVs()
sortedDriverUvs = map(list, zip(driverUvs[0], driverUvs[1]))
transferSpace = om.MSpace.kObject
# mapped - driven - driver
mapped_vertices = []
while not drivenVtxMIt.isDone():
if drivenVtxMIt.getUV() in sortedDriverUvs:
while not driverVtxMIt.isDone():
if drivenVtxMIt.getUV() == driverVtxMIt.getUV():
drivenIndex = int(drivenVtxMIt.index())
driverIndex = int(driverVtxMIt.index())
mapped_vertices.append([driverIndex, drivenIndex])
pos = driverVtxMIt.position(om.MSpace.kWorld)
drivenVtxMIt.setPosition(pos, om.MSpace.kWorld)
driverVtxMIt.next()
driverVtxMIt.reset()
drivenVtxMIt.next()
| [
"[email protected]"
] | |
03a7d6b0a953d17284f0c7c33996a38e495e7aa0 | 9e992de6e5a8e47b59ea575ae67e57058bf45b3c | /iotDjango/settings.py | 47bc57e192ec7d66de1c92e61f21c2ecf0803972 | [] | no_license | santyLB9925/iotDjango | ff340c40df1c164eaf0d28ec1c0d4204860da1a0 | 8e0d1d0e1401c07ea756f76746402276bdefb170 | refs/heads/master | 2020-05-24T14:40:51.334308 | 2019-05-18T04:30:51 | 2019-05-18T04:30:51 | 187,314,012 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,097 | py | """
Django settings for iotDjango project.
Generated by 'django-admin startproject' using Django 2.1.7.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'uef&3(h&g9&zxo@(=ujr_$z#0%262h_pd2ihnu!ei7+0kd3@mo'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'iotDjango.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'iotDjango.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
d0fce14c0a6bd8572d815aa2b3327b1fe6206278 | f6d4a8086cbd11b0e7c2e3f75a2fb0880bc47f68 | /Kiraya/apps.py | b8d7c51b0f4fbbf2ebf1e64a9114055db2e331b6 | [] | no_license | seandeviniii/Kiraya | babfc39152cce80989c99ce69322a93f4eee1617 | 93de56dbc4e775196e03331f139879609ca2ede7 | refs/heads/master | 2020-03-06T15:27:03.745324 | 2018-04-08T09:57:15 | 2018-04-08T09:57:15 | 126,956,184 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | from django.apps import AppConfig
class KirayaConfig(AppConfig):
name = 'Kiraya'
| [
"[email protected]"
] | |
a147c5361d4cee8cbedb8350f9740a4b2fbca6bc | 1b211b2625b53813398babe048e88988aff3633e | /rltorch/algs/q_learning/DQNTarget/agent.py | 84136a3c21113ef357083d7f045b44c7771f8af0 | [
"MIT"
] | permissive | Jjschwartz/rltorch | 07b115767ed3aa39d516ebbeeeea9e3f111fdd5e | eeb2ad955f018d768db98c4a2be5da96a75579f6 | refs/heads/master | 2021-12-14T01:06:49.155976 | 2021-11-29T03:02:20 | 2021-11-29T03:02:20 | 238,176,896 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,905 | py | import torch
from rltorch.algs.q_learning.DQN.model import DQN
from rltorch.algs.q_learning.DQN.agent import DQNAgent
class DQNTargetAgent(DQNAgent):
def __init__(self, name="DQNTarget", **kwargs):
super().__init__(name, **kwargs)
# Target Neural Network related attributes
self.target_dqn = DQN(self.obs_dim,
kwargs["hidden_sizes"],
self.num_actions).to(self.device)
self.target_update_freq = kwargs["target_update_freq"]
def optimize(self):
if self.steps_done % self.network_update_freq != 0:
return None
if self.steps_done < self.start_steps:
return 0, 0, 0, 0
batch = self.replay.sample_batch(self.batch_size)
s_batch, a_batch, next_s_batch, r_batch, d_batch = batch
# get q_vals for each state and the action performed in that state
q_vals_raw = self.dqn(s_batch)
q_vals = q_vals_raw.gather(1, a_batch).squeeze()
# get target q val = max val of next state
with torch.no_grad():
target_q_val_raw = self.target_dqn(next_s_batch)
target_q_val = target_q_val_raw.max(1)[0]
target = r_batch + self.discount*(1-d_batch)*target_q_val
loss = self.loss_fn(q_vals, target)
# optimize the model
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
self.updates_done += 1
if self.updates_done % self.target_update_freq == 0:
self.update_target_net()
q_vals_max = q_vals_raw.max(1)[0]
mean_v = q_vals_max.mean().item()
max_v = q_vals.max().item()
mean_td_error = (target - q_vals).abs().mean().item()
return loss.item(), mean_v, max_v, mean_td_error
def update_target_net(self):
self.target_dqn.load_state_dict(self.dqn.state_dict())
| [
"[email protected]"
] | |
1f8cf8e7bda22df267f367e3dbd60608936c57a8 | ede210ecd1d89ce3dc54755c7bdc0f1dcfc88037 | /core/features/process_data.py | 65c5a0c7d142e3268e2c6ef5058642696a517578 | [] | no_license | LonelyDaoist/mlflow | 0acb82d467e029451eec16696d8467af1c270603 | a2c155590af8e83a506f5aeda31625b96c147cc1 | refs/heads/main | 2023-05-08T17:26:11.488039 | 2021-05-28T00:49:38 | 2021-05-28T00:49:38 | 370,650,193 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 596 | py | import os
import sys
import pandas as pd
import numpy as np
import mlflow
PATH = os.environ["PYTHONPATH"]
def process(raw_data):
red = pd.read_csv(f"{raw_data}/red_wine.csv",sep=";")
white = pd.read_csv(f"{raw_data}/white_wine.csv",sep=";")
red["type"] = 1
white["type"] = 2
data = pd.concat([red,white])
data = data.sample(frac=1)
data.to_csv(f"{PATH}/data/processed/tmp/data.csv",index=False)
with mlflow.start_run():
mlflow.log_artifacts(f"{PATH}/data/processed/tmp")
if __name__ == "__main__":
raw_data = sys.argv[1]
process(raw_data)
| [
"[email protected]"
] | |
163dc1b4f9a05cf24422cac3ced608c4c4e768f8 | 60163aa3ba64492218082fb9cf980944a32c43ae | /omega_miya/utils/Omega_Base/tables.py | ebb5ddb4b3c7c63c8f9a61726498fa95d2c97d46 | [
"Python-2.0",
"MIT"
] | permissive | 58563528/omega-miyabot | 63322ce2140d8d04b310741e81a38f48d562b3ef | d0c04bf06c193d90ad968973c924b01b7bdd735d | refs/heads/main | 2023-07-04T09:42:38.017986 | 2021-07-30T06:56:18 | 2021-07-30T06:56:18 | 392,531,875 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 35,014 | py | import nonebot
from sqlalchemy import Sequence, ForeignKey
from sqlalchemy import Column, Integer, BigInteger, String, DateTime
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
global_config = nonebot.get_driver().config
TABLE_PREFIX = global_config.db_table_prefix
# 创建数据表基类
Base = declarative_base()
# 系统参数表, 存放运行时状态
class OmegaStatus(Base):
__tablename__ = f'{TABLE_PREFIX}status'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
# 表结构
id = Column(Integer, Sequence('omega_status_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
name = Column(String(32), nullable=False, index=True, unique=True, comment='参数名称')
status = Column(Integer, nullable=False, comment='参数值')
info = Column(String(128), nullable=True, comment='参数说明')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
def __init__(self, name, status, info, created_at=None, updated_at=None):
self.name = name
self.status = status
self.info = info
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<OmegaStatus(name='{self.name}', status='{self.status}', info='{self.info}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# 成员表
class User(Base):
__tablename__ = f'{TABLE_PREFIX}users'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
# 表结构
id = Column(Integer, Sequence('users_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
qq = Column(BigInteger, nullable=False, index=True, unique=True, comment='QQ号')
nickname = Column(String(64), nullable=False, comment='昵称')
is_friend = Column(Integer, nullable=False, comment='是否为好友(已弃用)')
aliasname = Column(String(64), nullable=True, comment='自定义名称')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
# 声明外键联系
has_friends = relationship('Friends', back_populates='user_friend', uselist=False,
cascade="all, delete", passive_deletes=True)
has_skills = relationship('UserSkill', back_populates='user_skill',
cascade="all, delete", passive_deletes=True)
in_which_groups = relationship('UserGroup', back_populates='user_groups',
cascade="all, delete", passive_deletes=True)
vocation = relationship('Vocation', back_populates='vocation_for_user', uselist=False,
cascade="all, delete", passive_deletes=True)
user_auth = relationship('AuthUser', back_populates='auth_for_user', uselist=False,
cascade="all, delete", passive_deletes=True)
users_sub_what = relationship('UserSub', back_populates='users_sub',
cascade="all, delete", passive_deletes=True)
def __init__(self, qq, nickname, is_friend=0, aliasname=None, created_at=None, updated_at=None):
self.qq = qq
self.nickname = nickname
self.is_friend = is_friend
self.aliasname = aliasname
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<User(qq='{self.qq}', nickname='{self.nickname}', aliasname='{self.aliasname}', " \
f"is_friend='{self.is_friend}', created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# 好友表
class Friends(Base):
__tablename__ = f'{TABLE_PREFIX}friends'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
# 表结构
id = Column(Integer, Sequence('friends_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
user_id = Column(Integer, ForeignKey(f'{TABLE_PREFIX}users.id'), nullable=False)
nickname = Column(String(64), nullable=False, comment='昵称')
remark = Column(String(64), nullable=True, comment='备注')
private_permissions = Column(Integer, nullable=False, comment='是否启用私聊权限')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
user_friend = relationship('User', back_populates='has_friends')
def __init__(self, user_id, nickname, remark=None, private_permissions=0, created_at=None, updated_at=None):
self.user_id = user_id
self.nickname = nickname
self.remark = remark
self.private_permissions = private_permissions
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<Friends(user_id='{self.user_id}', nickname='{self.nickname}', remark='{self.remark}', " \
f"private_permissions='{self.private_permissions}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# 技能表
class Skill(Base):
__tablename__ = f'{TABLE_PREFIX}skills'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
id = Column(Integer, Sequence('skills_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
name = Column(String(64), nullable=False, index=True, unique=True, comment='技能名称')
description = Column(String(64), nullable=True, comment='技能介绍')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
avaiable_skills = relationship('UserSkill', back_populates='skill_used',
cascade="all, delete", passive_deletes=True)
def __init__(self, name, description=None, created_at=None, updated_at=None):
self.name = name
self.description = description
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<Skill(name='{self.name}', description='{self.description}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# 成员与技能表
class UserSkill(Base):
__tablename__ = f'{TABLE_PREFIX}users_skills'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
id = Column(Integer, Sequence('users_skills_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
user_id = Column(Integer, ForeignKey(f'{TABLE_PREFIX}users.id'), nullable=False)
skill_id = Column(Integer, ForeignKey(f'{TABLE_PREFIX}skills.id'), nullable=False)
skill_level = Column(Integer, nullable=False, comment='技能等级')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
user_skill = relationship('User', back_populates='has_skills')
skill_used = relationship('Skill', back_populates='avaiable_skills')
def __init__(self, user_id, skill_id, skill_level, created_at=None, updated_at=None):
self.user_id = user_id
self.skill_id = skill_id
self.skill_level = skill_level
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<UserSkill(user_id='{self.user_id}', skill_id='{self.skill_id}', skill_level='{self.skill_level}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# qq群表
class Group(Base):
__tablename__ = f'{TABLE_PREFIX}groups'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
id = Column(Integer, Sequence('groups_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
name = Column(String(64), nullable=False, comment='qq群名称')
group_id = Column(BigInteger, nullable=False, index=True, unique=True, comment='qq群号')
notice_permissions = Column(Integer, nullable=False, comment='通知权限')
command_permissions = Column(Integer, nullable=False, comment='命令权限')
permission_level = Column(Integer, nullable=False, comment='权限等级, 越大越高')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
avaiable_groups = relationship('UserGroup', back_populates='groups_have_users',
cascade="all, delete", passive_deletes=True)
sub_what = relationship('GroupSub', back_populates='groups_sub',
cascade="all, delete", passive_deletes=True)
group_auth = relationship('AuthGroup', back_populates='auth_for_group', uselist=False,
cascade="all, delete", passive_deletes=True)
group_box = relationship('GroupEmailBox', back_populates='box_for_group',
cascade="all, delete", passive_deletes=True)
def __init__(self, name, group_id, notice_permissions, command_permissions,
permission_level, created_at=None, updated_at=None):
self.name = name
self.group_id = group_id
self.notice_permissions = notice_permissions
self.command_permissions = command_permissions
self.permission_level = permission_level
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<Group(name='{self.name}', group_id='{self.group_id}', " \
f"notice_permissions='{self.notice_permissions}', command_permissions='{self.command_permissions}', " \
f"permission_level='{self.permission_level}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# 成员与qq群表
class UserGroup(Base):
__tablename__ = f'{TABLE_PREFIX}users_groups'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
id = Column(Integer, Sequence('users_groups_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
user_id = Column(Integer, ForeignKey(f'{TABLE_PREFIX}users.id'), nullable=False)
group_id = Column(Integer, ForeignKey(f'{TABLE_PREFIX}groups.id'), nullable=False)
user_group_nickname = Column(String(64), nullable=True, comment='用户群昵称')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
user_groups = relationship('User', back_populates='in_which_groups')
groups_have_users = relationship('Group', back_populates='avaiable_groups')
def __init__(self, user_id, group_id, user_group_nickname=None, created_at=None, updated_at=None):
self.user_id = user_id
self.group_id = group_id
self.user_group_nickname = user_group_nickname
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<UserGroup(user_id='{self.user_id}', group_id='{self.group_id}', " \
f"user_group_nickname='{self.user_group_nickname}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# 用户授权表
class AuthUser(Base):
__tablename__ = f'{TABLE_PREFIX}auth_user'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
id = Column(Integer, Sequence('auth_user_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
user_id = Column(Integer, ForeignKey(f'{TABLE_PREFIX}users.id'), nullable=False)
auth_node = Column(String(128), nullable=False, index=True, comment='授权节点, 由插件检查')
allow_tag = Column(Integer, nullable=False, comment='授权标签')
deny_tag = Column(Integer, nullable=False, comment='拒绝标签')
auth_info = Column(String(128), nullable=True, comment='授权信息备注')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
auth_for_user = relationship('User', back_populates='user_auth')
def __init__(self, user_id, auth_node, allow_tag=0, deny_tag=0, auth_info=None, created_at=None, updated_at=None):
self.user_id = user_id
self.auth_node = auth_node
self.allow_tag = allow_tag
self.deny_tag = deny_tag
self.auth_info = auth_info
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<AuthUser(user_id='{self.user_id}', auth_node='{self.auth_node}', " \
f"allow_tag='{self.allow_tag}', deny_tag='{self.deny_tag}', auth_info='{self.auth_info}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# 群组授权表
class AuthGroup(Base):
__tablename__ = f'{TABLE_PREFIX}auth_group'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
id = Column(Integer, Sequence('auth_group_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
group_id = Column(Integer, ForeignKey(f'{TABLE_PREFIX}groups.id'), nullable=False)
auth_node = Column(String(128), nullable=False, index=True, comment='授权节点, 由插件检查')
allow_tag = Column(Integer, nullable=False, comment='授权标签')
deny_tag = Column(Integer, nullable=False, comment='拒绝标签')
auth_info = Column(String(128), nullable=True, comment='授权信息备注')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
auth_for_group = relationship('Group', back_populates='group_auth')
def __init__(self, group_id, auth_node, allow_tag=0, deny_tag=0, auth_info=None, created_at=None, updated_at=None):
self.group_id = group_id
self.auth_node = auth_node
self.allow_tag = allow_tag
self.deny_tag = deny_tag
self.auth_info = auth_info
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<AuthGroup(group_id='{self.group_id}', auth_node='{self.auth_node}', " \
f"allow_tag='{self.allow_tag}', deny_tag='{self.deny_tag}', auth_info='{self.auth_info}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# 邮箱表
class EmailBox(Base):
__tablename__ = f'{TABLE_PREFIX}email_box'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
id = Column(Integer, Sequence('email_box_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
address = Column(String(128), nullable=False, index=True, unique=True, comment='邮箱地址')
server_host = Column(String(128), nullable=False, comment='IMAP服务器地址')
protocol = Column(String(16), nullable=False, comment='协议')
port = Column(Integer, nullable=False, comment='服务器端口')
password = Column(String(256), nullable=False, comment='密码')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
used_box = relationship('GroupEmailBox', back_populates='has_box',
cascade="all, delete", passive_deletes=True)
def __init__(self, address: str, server_host: str, password: str,
protocol: str = 'imap', port: int = 993, created_at=None, updated_at=None):
self.address = address
self.server_host = server_host
self.protocol = protocol
self.port = port
self.password = password
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<EmailBox(address='{self.address}', server_host='{self.server_host}', " \
f"protocol='{self.protocol}', port='{self.port}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# 群组邮箱表
class GroupEmailBox(Base):
__tablename__ = f'{TABLE_PREFIX}group_email_box'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
id = Column(Integer, Sequence('group_email_box_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
email_box_id = Column(Integer, ForeignKey(f'{TABLE_PREFIX}email_box.id'), nullable=False)
group_id = Column(Integer, ForeignKey(f'{TABLE_PREFIX}groups.id'), nullable=False)
box_info = Column(String(64), nullable=True, comment='群邮箱信息,暂空备用')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
box_for_group = relationship('Group', back_populates='group_box')
has_box = relationship('EmailBox', back_populates='used_box')
def __init__(self, email_box_id, group_id, box_info=None, created_at=None, updated_at=None):
self.email_box_id = email_box_id
self.group_id = group_id
self.box_info = box_info
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<GroupEmailBox(email_box_id='{self.email_box_id}', group_id='{self.group_id}', " \
f"box_info='{self.box_info}', created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# 邮件表
class Email(Base):
__tablename__ = f'{TABLE_PREFIX}emails'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
# 表结构
id = Column(Integer, Sequence('emails_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
mail_hash = Column(String(128), nullable=False, index=True, unique=True, comment='邮件hash')
date = Column(String(128), nullable=False, comment='时间')
header = Column(String(128), nullable=False, comment='标题')
sender = Column(String(128), nullable=False, comment='发件人')
to = Column(String(1024), nullable=False, comment='收件人')
body = Column(String(4096), nullable=True, comment='正文')
html = Column(String(8192), nullable=True, comment='html正文')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
def __init__(self, mail_hash, date, header, sender, to, body, html, created_at=None, updated_at=None):
self.mail_hash = mail_hash
self.date = date
self.header = header
self.sender = sender
self.to = to
self.body = body
self.html = html
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<Email(mail_hash='{self.mail_hash}', date='{self.date}', " \
f"header='{self.header}', sender='{self.sender}', to='{self.to}', " \
f"body='{self.body}', html='{self.html}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# 记录表
class History(Base):
__tablename__ = f'{TABLE_PREFIX}history'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
# 表结构
id = Column(Integer, Sequence('history_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
time = Column(BigInteger, nullable=False, comment='事件发生的时间戳')
self_id = Column(BigInteger, nullable=False, comment='收到事件的机器人QQ号')
post_type = Column(String(64), nullable=False, comment='事件类型')
detail_type = Column(String(64), nullable=False, comment='消息/通知/请求/元事件类型')
sub_type = Column(String(64), nullable=True, comment='子事件类型')
event_id = Column(BigInteger, nullable=True, comment='事件id, 消息事件为message_id')
group_id = Column(BigInteger, nullable=True, comment='群号')
user_id = Column(BigInteger, nullable=True, comment='发送者QQ号')
user_name = Column(String(64), nullable=True, comment='发送者名称')
raw_data = Column(String(4096), nullable=True, comment='原始事件内容')
msg_data = Column(String(4096), nullable=True, comment='经处理的事件内容')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
def __init__(self, time, self_id, post_type, detail_type, sub_type=None, event_id=None,
group_id=None, user_id=None, user_name=None, raw_data=None, msg_data=None,
created_at=None, updated_at=None):
self.time = time
self.self_id = self_id
self.post_type = post_type
self.detail_type = detail_type
self.sub_type = sub_type
self.event_id = event_id
self.group_id = group_id
self.user_id = user_id
self.user_name = user_name
self.raw_data = raw_data
self.msg_data = msg_data
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<History(time='{self.time}', self_id='{self.self_id}', post_type='{self.post_type}', " \
f"detail_type='{self.detail_type}', sub_type='{self.sub_type}', event_id='{self.event_id}', " \
f"group_id='{self.group_id}', user_id='{self.user_id}', user_name='{self.user_name}', " \
f"raw_data='{self.raw_data}', msg_data='{self.msg_data}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# 订阅表
class Subscription(Base):
__tablename__ = f'{TABLE_PREFIX}subscription'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
id = Column(Integer, Sequence('subscription_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
# 订阅类型, 0暂留, 1直播间, 2动态, 8Pixivsion
sub_type = Column(Integer, nullable=False, comment='订阅类型,0暂留,1直播间,2动态')
sub_id = Column(Integer, nullable=False, index=True, comment='订阅id,直播为直播间房间号,动态为用户uid')
up_name = Column(String(64), nullable=False, comment='up名称')
live_info = Column(String(64), nullable=True, comment='相关信息,暂空备用')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
be_sub = relationship('GroupSub', back_populates='sub_by', cascade="all, delete", passive_deletes=True)
be_sub_users = relationship('UserSub', back_populates='sub_by_users', cascade="all, delete", passive_deletes=True)
def __init__(self, sub_type, sub_id, up_name, live_info=None, created_at=None, updated_at=None):
self.sub_type = sub_type
self.sub_id = sub_id
self.up_name = up_name
self.live_info = live_info
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<Subscription(sub_type='{self.sub_type}', sub_id='{self.sub_id}', up_name='{self.up_name}', " \
f"live_info='{self.live_info}', created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# qq群订阅表
class GroupSub(Base):
__tablename__ = f'{TABLE_PREFIX}groups_subs'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
id = Column(Integer, Sequence('groups_subs_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
sub_id = Column(Integer, ForeignKey(f'{TABLE_PREFIX}subscription.id'), nullable=False)
group_id = Column(Integer, ForeignKey(f'{TABLE_PREFIX}groups.id'), nullable=False)
group_sub_info = Column(String(64), nullable=True, comment='群订阅信息,暂空备用')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
groups_sub = relationship('Group', back_populates='sub_what')
sub_by = relationship('Subscription', back_populates='be_sub')
def __init__(self, sub_id, group_id, group_sub_info=None, created_at=None, updated_at=None):
self.sub_id = sub_id
self.group_id = group_id
self.group_sub_info = group_sub_info
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<GroupSub(sub_id='{self.sub_id}', group_id='{self.group_id}', " \
f"group_sub_info='{self.group_sub_info}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# 好友用户订阅表
class UserSub(Base):
__tablename__ = f'{TABLE_PREFIX}users_subs'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
id = Column(Integer, Sequence('users_subs_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
sub_id = Column(Integer, ForeignKey(f'{TABLE_PREFIX}subscription.id'), nullable=False)
user_id = Column(Integer, ForeignKey(f'{TABLE_PREFIX}users.id'), nullable=False)
user_sub_info = Column(String(64), nullable=True, comment='用户订阅信息,暂空备用')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
users_sub = relationship('User', back_populates='users_sub_what')
sub_by_users = relationship('Subscription', back_populates='be_sub_users')
def __init__(self, sub_id, user_id, user_sub_info=None, created_at=None, updated_at=None):
self.sub_id = sub_id
self.user_id = user_id
self.user_sub_info = user_sub_info
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<UserSub(sub_id='{self.sub_id}', user_id='{self.user_id}', " \
f"user_sub_info='{self.user_sub_info}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# B站动态表
class Bilidynamic(Base):
__tablename__ = f'{TABLE_PREFIX}bili_dynamics'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
# 表结构
id = Column(Integer, Sequence('bili_dynamics_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
uid = Column(Integer, nullable=False, index=True, comment='up的uid')
dynamic_id = Column(BigInteger, nullable=False, index=True, unique=True, comment='动态的id')
dynamic_type = Column(Integer, nullable=False, comment='动态的类型')
content = Column(String(4096), nullable=False, comment='动态内容')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
def __init__(self, uid, dynamic_id, dynamic_type, content, created_at=None, updated_at=None):
self.uid = uid
self.dynamic_id = dynamic_id
self.dynamic_type = dynamic_type
self.content = content
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<Bilidynamic(uid='{self.uid}', dynamic_id='{self.dynamic_id}', " \
f"dynamic_type='{self.dynamic_type}', content='{self.content}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# 假期表
class Vocation(Base):
__tablename__ = f'{TABLE_PREFIX}vocations'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
id = Column(Integer, Sequence('vocations_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
user_id = Column(Integer, ForeignKey(f'{TABLE_PREFIX}users.id'), nullable=False)
status = Column(Integer, nullable=False, comment='请假状态 0-空闲 1-请假 2-工作中')
stop_at = Column(DateTime, nullable=True, comment='假期结束时间')
reason = Column(String(64), nullable=True, comment='请假理由')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
vocation_for_user = relationship('User', back_populates='vocation')
def __init__(self, user_id, status, stop_at=None, reason=None, created_at=None, updated_at=None):
self.user_id = user_id
self.status = status
self.stop_at = stop_at
self.reason = reason
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<Vocation(user_id='{self.user_id}', status='{self.status}', stop_at='{self.stop_at}', " \
f"reason='{self.reason}', created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# Pixiv tag表
class PixivTag(Base):
__tablename__ = f'{TABLE_PREFIX}pixiv_tag'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
id = Column(Integer, Sequence('pixiv_tag_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
tagname = Column(String(128), nullable=False, index=True, unique=True, comment='tag名称')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
has_illusts = relationship('PixivT2I', back_populates='tag_has_illusts',
cascade="all, delete", passive_deletes=True)
def __init__(self, tagname, created_at=None, updated_at=None):
self.tagname = tagname
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<PixivTag(tagname='{self.tagname}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# Pixiv作品表
class Pixiv(Base):
__tablename__ = f'{TABLE_PREFIX}pixiv_illusts'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
# 表结构
id = Column(Integer, Sequence('upixiv_illusts_id_seq'), primary_key=True, nullable=False, index=True, unique=True)
pid = Column(Integer, nullable=False, index=True, unique=True, comment='pid')
uid = Column(Integer, nullable=False, index=True, comment='uid')
title = Column(String(128), nullable=False, index=True, comment='title')
uname = Column(String(128), nullable=False, index=True, comment='author')
nsfw_tag = Column(Integer, nullable=False, comment='nsfw标签, 0=safe, 1=setu. 2=r18')
tags = Column(String(1024), nullable=False, comment='tags')
url = Column(String(1024), nullable=False, comment='url')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
has_tags = relationship('PixivT2I', back_populates='illust_tags',
cascade="all, delete", passive_deletes=True)
def __init__(self, pid, uid, title, uname, nsfw_tag, tags, url, created_at=None, updated_at=None):
self.pid = pid
self.uid = uid
self.title = title
self.uname = uname
self.nsfw_tag = nsfw_tag
self.tags = tags
self.url = url
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<Pixiv(pid='{self.pid}', uid='{self.uid}', title='{self.title}', uname='{self.uname}', " \
f"nsfw_tag='{self.nsfw_tag}', tags='{self.tags}', url='{self.url}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# Pixiv作品-tag表
class PixivT2I(Base):
__tablename__ = f'{TABLE_PREFIX}pixiv_tag_to_illusts'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
id = Column(Integer, Sequence('pixiv_tag_to_illusts_id_seq'),
primary_key=True, nullable=False, index=True, unique=True)
illust_id = Column(Integer, ForeignKey(f'{TABLE_PREFIX}pixiv_illusts.id'), nullable=False)
tag_id = Column(Integer, ForeignKey(f'{TABLE_PREFIX}pixiv_tag.id'), nullable=False)
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
illust_tags = relationship('Pixiv', back_populates='has_tags')
tag_has_illusts = relationship('PixivTag', back_populates='has_illusts')
def __init__(self, illust_id, tag_id, created_at=None, updated_at=None):
self.illust_id = illust_id
self.tag_id = tag_id
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<PixivT2I(illust_id='{self.illust_id}', tag_id='{self.tag_id}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# Pixivision表
class Pixivision(Base):
__tablename__ = f'{TABLE_PREFIX}pixivision_article'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
# 表结构
id = Column(Integer, Sequence('pixivision_article_id_seq'),
primary_key=True, nullable=False, index=True, unique=True)
aid = Column(Integer, nullable=False, index=True, unique=True, comment='aid')
title = Column(String(256), nullable=False, comment='title')
description = Column(String(1024), nullable=False, comment='description')
tags = Column(String(1024), nullable=False, comment='tags')
illust_id = Column(String(1024), nullable=False, comment='tags')
url = Column(String(1024), nullable=False, comment='url')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
def __init__(self, aid, title, description, tags, illust_id, url, created_at=None, updated_at=None):
self.aid = aid
self.title = title
self.description = description
self.tags = tags
self.illust_id = illust_id
self.url = url
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<Pixivision(aid='{self.aid}', title='{self.title}', description='{self.description}', " \
f"tags='{self.tags}', illust_id='{self.illust_id}', url='{self.url}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
# 冷却事件表
class CoolDownEvent(Base):
__tablename__ = f'{TABLE_PREFIX}cool_down_event'
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8mb4'}
# 表结构
id = Column(Integer, Sequence('cool_down_event_id_seq'),
primary_key=True, nullable=False, index=True, unique=True)
event_type = Column(String(16), nullable=False, index=True, comment='冷却事件类型/global/plugin/group/user')
stop_at = Column(DateTime, nullable=False, comment='冷却结束时间')
plugin = Column(String(64), nullable=True, index=True, comment='plugin事件对应插件名')
group_id = Column(BigInteger, nullable=True, index=True, comment='group事件对应group_id')
user_id = Column(BigInteger, nullable=True, index=True, comment='user事件对应user_id')
description = Column(String(128), nullable=True, comment='事件描述')
created_at = Column(DateTime, nullable=True)
updated_at = Column(DateTime, nullable=True)
def __init__(self, event_type, stop_at, plugin=None, group_id=None, user_id=None, description=None,
created_at=None, updated_at=None):
self.event_type = event_type
self.stop_at = stop_at
self.plugin = plugin
self.group_id = group_id
self.user_id = user_id
self.description = description
self.created_at = created_at
self.updated_at = updated_at
def __repr__(self):
return f"<CoolDownEvent(event_type='{self.event_type}', stop_at='{self.stop_at}', plugin='{self.plugin}'," \
f"group_id='{self.group_id}', user_id='{self.user_id}', description='{self.description}', " \
f"created_at='{self.created_at}', updated_at='{self.updated_at}')>"
| [
"[email protected]"
] | |
74629208c15fdeab63fc87df6a43365b04f6f8ef | f318085a59b061fce733fda34d5587129b6d2047 | /examen/problema-2.py | 0c2c2c367a0cf4d1b9bdac664b37a2fb7714c69d | [] | no_license | ochesto/esi-ce5303 | 7b933667f35e978b7541770097860ed8d8dec3c7 | aaf0f8c38c628d5bd041fbb2850dd9af6d1a7ccc | refs/heads/master | 2020-07-08T14:59:44.923818 | 2019-11-15T03:26:23 | 2019-11-15T03:26:23 | 203,707,253 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,017 | py | # Fabian Astorga Cerdas
# Ernesto Ulate Ramirez
from threading import Thread
import queue
import time
# inputs / outputs
input_array = [0, 0, 1, 0, 3]
output_array = []
# global variables
amount_zeroes = 0
ak = queue.Queue()
bk = queue.Queue()
ck = queue.Queue()
dk = queue.Queue()
ek = queue.Queue()
fk = queue.Queue()
gk = queue.Queue()
# read data from input_array
def read():
cont = 0
while ~ dk.empty() or ~ gk.empty():
if len(input_array) > cont:
ak.put(input_array[cont])
cont += 1
# increment zero amount_zeroes
def count():
global amount_zeroes
while True:
if ~ bk.empty():
bk.get()
amount_zeroes += 1
# write results in output_array
def write():
global amount_zeroes
while True:
if ~ ek.empty():
data = ek.get()
output_array.append(amount_zeroes)
output_array.append(data)
amount_zeroes = 0
# reset amount_zeroes
def reset():
global amount_zeroes
while True:
if ~ fk.empty():
fk.get()
amount_zeroes = 0
gk.put(1)
# detect zeros in the array
def detect0():
while True:
if ~ ak.empty():
data = ak.get()
time.sleep(1)
if data == 0:
bk.put(1)
time.sleep(1)
else:
ek.put(data)
# insert delay
def delay0():
dk.put(0)
while True:
if ~ ck.empty():
dk.put(ck.get())
def main():
print("Input: ", input_array)
thread_delay0 = Thread(target=delay0)
thread_detect0 = Thread(target=detect0)
thread_count = Thread(target=count)
thread_read = Thread(target=read)
thread_write = Thread(target=write)
thread_reset = Thread(target=reset)
thread_delay0.start()
thread_read.start()
thread_detect0.start()
thread_count.start()
thread_write.start()
thread_reset.start()
time.sleep(10)
print("Output: ", output_array)
main() | [
"[email protected]"
] | |
ffeef5d33acdb08a6571a823192f8c0613dc75bc | fef8c4dacdd1a19bd4ef9982b47d2c1fe77a4ea4 | /src/pygone.py | 09f8bafb62dab48f67e00425657760120f115d88 | [] | no_license | Aloril/pygone | 6548f0769c93fd0513561656000317e37e78f923 | 2eaad99c5e5e0088237f1a9b523dca87e3e502ac | refs/heads/master | 2022-12-17T09:24:20.518632 | 2020-09-24T11:50:57 | 2020-09-24T11:50:57 | 298,381,790 | 0 | 0 | null | 2020-09-24T19:57:14 | 2020-09-24T19:57:13 | null | UTF-8 | Python | false | false | 29,585 | py | #!/usr/bin/env pypy3
import math, sys, time
PIECEPOINTS = {'p': 100, 'r': 480, 'n': 280, 'b': 320, 'q': 960, 'k': 6e4}
ALLPSQT = {
'p': [[0]*8,
[78, 83, 86, 73, 102, 82, 85, 90],
[7, 29, 21, 44, 40, 31, 44, 7],
[-17, 16, -2, 15, 14, 0, 15, -13],
[-26, 3, 10, 9, 6, 1, 0, -23],
[-22, 9, 5, -11, -10, -2, 3, -19],
[-31, 8, -7, -37, -36, -14, 3, -31],
[0]*8],
'n': [[-66, -53, -75, -75, -10, -55, -58, -70],
[-3, -6, 100, -36, 4, 62, -4, -14],
[10, 67, 1, 74, 73, 27, 62, -2],
[24, 24, 45, 37, 33, 41, 25, 17],
[-1, 5, 31, 21, 22, 35, 2, 0],
[-18, 10, 13, 22, 18, 15, 11, -14],
[-23, -15, 2, 0, 2, 0, -23, -20],
[-74, -23, -26, -24, -19, -35, -22, -69]],
'b': [[-59, -78, -82, -76, -23, -107, -37, -50],
[-11, 20, 35, -42, -39, 31, 2, -22],
[-9, 39, -32, 41, 52, -10, 28, -14],
[25, 17, 20, 34, 26, 25, 15, 10],
[13, 10, 17, 23, 17, 16, 0, 7],
[14, 25, 24, 15, 8, 25, 20, 15],
[19, 20, 11, 6, 7, 6, 20, 16],
[-7, 2, -15, -12, -14, -15, -10, -10]],
'r': [[35, 29, 33, 4, 37, 33, 56, 50],
[55, 29, 56, 67, 55, 62, 34, 60],
[19, 35, 28, 33, 45, 27, 25, 15],
[0, 5, 16, 13, 18, -4, -9, -6],
[-28, -35, -16, -21, -13, -29, -46, -30],
[-42, -28, -42, -25, -25, -35, -26, -46],
[-53, -38, -31, -26, -29, -43, -44, -53],
[-30, -24, -18, 5, -2, -18, -31, -32]],
'q': [[6, 1, -8, -104, 69, 24, 88, 26],
[14, 32, 60, -10, 20, 76, 57, 24],
[-2, 43, 32, 60, 72, 63, 43, 2],
[1, -16, 22, 17, 25, 20, -13, -6],
[-14, -15, -2, -5, -1, -10, -20, -22],
[-30, -6, -13, -11, -16, -11, -16, -27],
[-36, -18, 0, -19, -15, -15, -21, -38],
[-39, -30, -31, -13, -31, -36, -34, -42]],
'k': [[4, 54, 47, -99, -99, 60, 83, -62],
[-32, 10, 45, 56, 56, 55, 10, 3],
[-62, 12, -57, 44, -67, 28, 37, -31],
[-55, 50, 11, -4, -19, 13, 0, -49],
[-55, -43, -52, -28, -51, -47, -8, -50],
[-47, -42, -43, -79, -64, -32, -29, -32],
[-4, 3, -14, -50, -57, -18, 13, 4],
[22, 30, -3, -14, 6, -1, 40, 26]]
}
for tpiece, table in ALLPSQT.items():
for trow in range(8):
for tcolumn in range(8):
ALLPSQT[tpiece][trow][tcolumn] += PIECEPOINTS[tpiece]
WHITE_PIECES = ['P', 'R', 'N', 'B', 'Q', 'K']
BLACK_PIECES = ['p', 'r', 'n', 'b', 'q', 'k']
EXACT = 1
UPPER = 2
LOWER = 3
def letter_to_number(letter):
return abs((ord(letter) - 96) - 1)
def number_to_letter(number):
return chr(number + 96)
def print_to_terminal(letter):
print(letter, flush=True)
def get_perf_counter():
return time.perf_counter()
def print_stats(v_depth, v_score, v_time, v_nodes, v_nps, v_pv):
print_to_terminal("info depth " + v_depth + " score cp " + v_score + " time " + v_time + " nodes " + v_nodes + " nps " + v_nps + " pv " + v_pv)
class Board:
# represent the board state as it is
board_state = []
played_move_count = 0
move_list = []
white_valid_moves = []
black_valid_moves = []
capture_moves = []
white_attack_squares = []
black_attack_squares = []
white_castling = [True, True]
black_castling = [True, True]
white_king_position = 'e1'
black_king_position = 'e8'
rolling_score = 0
def reset(self):
# reset board to default state
self.set_default_board_state()
self.played_move_count = 0
self.move_list = []
self.white_valid_moves = []
self.black_valid_moves = []
self.capture_moves = []
self.white_attack_squares = []
self.black_attack_squares = []
self.white_castling = [True, True]
self.black_castling = [True, True]
self.white_king_position = 'e1'
self.black_king_position = 'e8'
self.rolling_score = 0
def set_default_board_state(self):
self.board_state = [['r', 'n', 'b', 'q', 'k', 'b', 'n', 'r'],
['p']*8,
['-']*8,
['-']*8,
['-']*8,
['-']*8,
['P']*8,
['R', 'N', 'B', 'Q', 'K', 'B', 'N', 'R']]
def set_board_state(self, board_state):
self.board_state = board_state
def apply_move(self, uci_coordinate):
from_letter_number = letter_to_number(uci_coordinate[0:1])
from_number = abs(int(uci_coordinate[1:2]) - 8)
to_letter_number = letter_to_number(uci_coordinate[2:3])
to_number = abs(int(uci_coordinate[3:4]) - 8)
from_piece = self.board_state[from_number][from_letter_number]
to_piece = self.board_state[to_number][to_letter_number]
if from_piece == 'K':
self.white_king_position = uci_coordinate[2:4]
elif from_piece == 'k':
self.black_king_position = uci_coordinate[2:4]
is_white = self.played_move_count % 2 == 0
self.board_state[to_number][to_letter_number] = from_piece
self.board_state[from_number][from_letter_number] = '-'
promote = ""
if len(uci_coordinate) > 4:
promote = uci_coordinate[4:5]
if from_piece in ('P', 'p') and to_piece == '-' and uci_coordinate[0:1] != uci_coordinate[2:3]:
self.board_state[from_number][from_letter_number] = '-'
self.board_state[to_number][to_letter_number] = from_piece
self.board_state[from_number][to_letter_number] = '-'
elif (from_piece in ('K', 'k') and uci_coordinate in ('e1g1', 'e1c1', 'e8g8', 'e8c8')):
self.board_state[from_number][from_letter_number] = '-'
if uci_coordinate[2] == 'g':
self.board_state[to_number][to_letter_number + 1] = '-'
self.board_state[from_number][from_letter_number + 1] = 'R' if from_piece == 'K' else 'r'
else:
self.board_state[to_number][to_letter_number - 2] = '-'
self.board_state[from_number][from_letter_number - 1] = 'R' if from_piece == 'K' else 'r'
self.board_state[to_number][to_letter_number] = from_piece
else:
if promote != "":
self.board_state[to_number][to_letter_number] = promote.upper() if is_white else promote
return [from_piece, to_piece]
def calculate_score(self, uci_coordinate):
if uci_coordinate is None:
return 0
is_white = self.played_move_count % 2 == 0
offset = 0 if is_white else 7
from_letter_number = letter_to_number(uci_coordinate[0:1])
from_number = abs(int(uci_coordinate[1:2]) - 8)
to_letter_number = letter_to_number(uci_coordinate[2:3])
to_number = abs(int(uci_coordinate[3:4]) - 8)
from_piece = self.board_state[from_number][from_letter_number]
to_piece = self.board_state[to_number][to_letter_number]
local_score = ALLPSQT[from_piece.lower()][abs(to_number - offset)][abs(to_letter_number - offset)] - ALLPSQT[from_piece.lower()][abs(from_number - offset)][abs(from_letter_number - offset)]
if to_piece != '-':
local_score += ALLPSQT[to_piece.lower()][abs(to_number - offset)][abs(to_letter_number - offset)]
if (from_piece in ('K', 'k') and uci_coordinate in ('e1g1', 'e1c1', 'e8g8', 'e8c8')):
if uci_coordinate[2] == 'g':
local_score += ALLPSQT['r'][abs(to_number - offset)][abs(to_letter_number - 1 - offset)] - ALLPSQT['r'][abs(to_number - offset)][abs(to_letter_number + 1 - offset)]
else:
local_score += ALLPSQT['r'][abs(to_number - offset)][abs(to_letter_number + 1 - offset)] - ALLPSQT['r'][abs(to_number - offset)][abs(to_letter_number - 2 - offset)]
if len(uci_coordinate) > 4:
local_score += ALLPSQT['q'][abs(to_number - offset)][abs(to_letter_number - offset)] - ALLPSQT['p'][abs(to_number - offset)][abs(to_letter_number - offset)]
return local_score
def make_move(self, uci_coordinate, calculate_next=False):
board = Board()
board.played_move_count = self.played_move_count
board.board_state = [x[:] for x in self.board_state]
board.white_valid_moves = self.white_valid_moves.copy()
board.black_valid_moves = self.black_valid_moves.copy()
board.white_attack_squares = self.white_attack_squares.copy()
board.black_attack_squares = self.black_attack_squares.copy()
board.move_list = self.move_list.copy()
board.white_castling = self.white_castling.copy()
board.black_castling = self.black_castling.copy()
board.white_king_position = self.white_king_position
board.black_king_position = self.black_king_position
# should calc score before moving
board.rolling_score = self.rolling_score + self.calculate_score(uci_coordinate)
if uci_coordinate is not None:
if 'e1' in uci_coordinate:
board.white_castling = [False, False]
if 'a1' in uci_coordinate:
board.white_castling[0] = False
if 'h1' in uci_coordinate:
board.white_castling[1] = False
if 'e8' in uci_coordinate:
board.black_castling = [False, False]
if 'a8' in uci_coordinate:
board.black_castling[0] = False
if 'h8' in uci_coordinate:
board.black_castling[1] = False
board.apply_move(uci_coordinate)
board.move_list.append(uci_coordinate)
board.played_move_count += 1
if calculate_next:
board.get_valid_moves()
board.rolling_score = -board.rolling_score
return board
def str_board(self):
s_board = ''
for i in range(8):
for j in range(8):
s_board += self.board_state[i][j]
return s_board + str(self.played_move_count % 2 == 0)
def get_valid_moves(self, previous_turn=False):
is_white = self.played_move_count % 2 == 0
if previous_turn:
is_white = not is_white
valid_moves = []
self.capture_moves = []
attack_squares = []
if is_white:
self.white_valid_moves = []
self.white_attack_squares = []
else:
self.black_valid_moves = []
self.black_attack_squares = []
eval_state = self.board_state
for row in range(8):
for column in range(8):
piece = eval_state[row][column]
if piece == "-" or (is_white and piece in BLACK_PIECES) or (not is_white and piece in WHITE_PIECES):
continue
start_coordinate = number_to_letter(column + 1) + str(abs(row - 8))
if piece.lower() == 'k':
king_moves = {
1: {'column': (column + 0), 'row': (row + 1)},
2: {'column': (column + 0), 'row': (row - 1)},
3: {'column': (column + 1), 'row': (row + 0)},
4: {'column': (column - 1), 'row': (row + 0)},
5: {'column': (column + 1), 'row': (row + 1)},
6: {'column': (column + 1), 'row': (row - 1)},
7: {'column': (column - 1), 'row': (row + 1)},
8: {'column': (column - 1), 'row': (row - 1)},
}
if is_white:
if self.white_castling[1] and start_coordinate == 'e1' and ''.join(eval_state[7][5:8]) == '--R' and \
not set(['e1', 'f1', 'g1']).issubset(set(self.black_attack_squares)):
valid_moves.append(start_coordinate + 'g1')
if self.white_castling[0] and start_coordinate == 'e1' and ''.join(eval_state[7][0:4]) == 'R---' and \
not set(['e1', 'd1', 'c1']).issubset(set(self.black_attack_squares)):
valid_moves.append(start_coordinate + 'c1')
else:
if self.black_castling[1] and start_coordinate == 'e8' and ''.join(eval_state[0][5:8]) == '--r' and \
not set(['e8', 'f8', 'g8']).issubset(set(self.white_attack_squares)):
valid_moves.append(start_coordinate + 'g8')
if self.black_castling[0] and start_coordinate == 'e8' and ''.join(eval_state[0][0:4]) == 'r---' and \
not set(['e8', 'd8', 'c8']).issubset(set(self.white_attack_squares)):
valid_moves.append(start_coordinate + 'c8')
for _, k_move in king_moves.items():
if k_move['column'] in range(8) and k_move['row'] in range(8):
eval_piece = eval_state[k_move['row']][k_move['column']]
if is_white:
can_capture = (eval_piece != '-' and eval_piece.islower())
else:
can_capture = (eval_piece != '-' and eval_piece.isupper())
dest = number_to_letter(k_move['column'] + 1) + str(abs(k_move['row'] - 8))
if eval_piece == '-' or can_capture:
valid_moves.append(start_coordinate + dest)
if can_capture:
self.capture_moves.append(start_coordinate + dest)
attack_squares.append(dest)
if piece.lower() in ('b', 'r', 'q'):
all_moves = {
# rook/queen
1: {'column': column, 'row': (row - 1), 'colIncrement': 0, 'rowIncrement': -1},
2: {'column': column, 'row': (row + 1), 'colIncrement': 0, 'rowIncrement': 1},
3: {'column': (column - 1), 'row': row, 'colIncrement': -1, 'rowIncrement': 0},
4: {'column': (column + 1), 'row': row, 'colIncrement': 1, 'rowIncrement': 0},
# bish/queen
5: {'column': (column - 1), 'row': (row - 1), 'colIncrement': -1, 'rowIncrement': -1},
6: {'column': (column + 1), 'row': (row + 1), 'colIncrement': 1, 'rowIncrement': 1},
7: {'column': (column - 1), 'row': (row + 1), 'colIncrement': -1, 'rowIncrement': 1},
8: {'column': (column + 1), 'row': (row - 1), 'colIncrement': 1, 'rowIncrement': -1},
}
for key, a_move in all_moves.items():
if (key <= 4 and piece.lower() == 'b') or (key >= 5 and piece.lower() == 'r'):
continue
temp_row = a_move['row']
temp_col = a_move['column']
while temp_row in range(8) and temp_col in range(8):
eval_piece = eval_state[temp_row][temp_col]
can_capture = (is_white and eval_piece in BLACK_PIECES) or (not is_white and eval_piece in WHITE_PIECES)
if eval_piece == '-' or can_capture:
dest = number_to_letter(temp_col + 1) + str(abs(temp_row - 8))
valid_moves.append(start_coordinate + dest)
attack_squares.append(dest)
if can_capture:
self.capture_moves.append(start_coordinate + dest)
break
else:
break
temp_row += a_move['rowIncrement']
temp_col += a_move['colIncrement']
if piece.lower() == 'n':
night_moves = {
1: {'column': (column + 1), 'row': (row - 2)},
2: {'column': (column - 1), 'row': (row - 2)},
3: {'column': (column + 2), 'row': (row - 1)},
4: {'column': (column - 2), 'row': (row - 1)},
5: {'column': (column + 1), 'row': (row + 2)},
6: {'column': (column - 1), 'row': (row + 2)},
7: {'column': (column + 2), 'row': (row + 1)},
8: {'column': (column - 2), 'row': (row + 1)}
}
for _, n_move in night_moves.items():
if n_move['column'] in range(8) and n_move['row'] in range(8):
eval_piece = eval_state[n_move['row']][n_move['column']]
if is_white:
can_capture = (eval_piece != '-' and eval_piece.islower())
else:
can_capture = (eval_piece != '-' and eval_piece.isupper())
if eval_piece == '-' or can_capture:
dest = number_to_letter(n_move['column'] + 1) + str(abs(n_move['row'] - 8))
valid_moves.append(start_coordinate + dest)
if can_capture:
self.capture_moves.append(start_coordinate + dest)
attack_squares.append(dest)
if piece.lower() == 'p':
if is_white:
if row > 1 and eval_state[row - 1][column] == '-':
valid_moves.append(start_coordinate + number_to_letter(column + 1) + str(abs(row - 9)))
if row == 6 and eval_state[row - 1][column] == '-' and eval_state[row - 2][column] == '-':
valid_moves.append(start_coordinate + number_to_letter(column + 1) + str(abs(row - 10)))
if row == 1 and eval_state[row - 1][column] == '-':
valid_moves.append(start_coordinate + number_to_letter(column + 1) + str(abs(row - 9)) + 'q')
if ((column - 1) >= 0 and (row - 1) >= 0) or ((column + 1) < 8 and (row - 1) >= 0):
prom = ''
if row == 1:
prom = 'q'
if (column - 1) >= 0:
dest = number_to_letter(column) + str(abs(row - 9))
if eval_state[row - 1][column - 1] == '-' or eval_state[row - 1][column - 1].islower():
if eval_state[row - 1][column - 1] != '-':
valid_moves.append(start_coordinate + dest + prom)
self.capture_moves.append(start_coordinate + dest + prom)
attack_squares.append(dest)
if (column + 1) < 8:
dest = number_to_letter(column + 2) + str(abs(row - 9))
if eval_state[row - 1][column + 1] == '-' or eval_state[row - 1][column + 1].islower():
if eval_state[row - 1][column + 1] != '-':
valid_moves.append(start_coordinate + dest + prom)
self.capture_moves.append(start_coordinate + dest + prom)
attack_squares.append(dest)
else:
if row < 6 and eval_state[row + 1][column] == '-':
valid_moves.append(start_coordinate + number_to_letter(column + 1) + str(abs(row - 7)))
if row == 1 and eval_state[row + 1][column] == '-' and eval_state[row + 2][column] == '-':
valid_moves.append(start_coordinate + number_to_letter(column + 1) + str(abs(row - 6)))
if row == 6 and eval_state[row + 1][column] == '-':
valid_moves.append(start_coordinate + number_to_letter(column + 1) + str(abs(row - 7)) + 'q')
if ((column - 1) >= 0 and (row + 1) < 8) or ((column + 1) < 8 and (row + 1) < 8):
prom = ''
if row == 6:
prom = 'q'
if (column + 1) < 8:
dest = number_to_letter(column + 2) + str(abs(row - 7))
if eval_state[row + 1][column + 1] == '-' or eval_state[row + 1][column + 1].isupper():
if eval_state[row + 1][column + 1] != '-':
valid_moves.append(start_coordinate + dest + prom)
self.capture_moves.append(start_coordinate + dest + prom)
attack_squares.append(dest)
if (column - 1) >= 0:
dest = number_to_letter(column) + str(abs(row - 7))
if eval_state[row + 1][column - 1] == '-' or eval_state[row + 1][column - 1].isupper():
if eval_state[row + 1][column - 1] != '-':
valid_moves.append(start_coordinate + dest + prom)
self.capture_moves.append(start_coordinate + dest + prom)
attack_squares.append(dest)
if is_white:
self.white_valid_moves = valid_moves
self.white_attack_squares = attack_squares
else:
self.black_valid_moves = valid_moves
self.black_attack_squares = attack_squares
return valid_moves
def in_check(self):
if self.played_move_count % 2 != 0:
return self.white_king_position in self.black_attack_squares
return self.black_king_position in self.white_attack_squares
class Search:
v_nodes = 0
v_tthits = 0
v_depth = 0
end_time = 0
tt_bucket = {}
def reset(self):
# reset to base state
self.v_nodes = 0
self.v_tthits = 0
self.tt_bucket = {}
def iterative_search(self, local_board, v_depth, move_time):
start_time = get_perf_counter()
self.end_time = get_perf_counter() + move_time
alpha = -1e8
beta = 1e8
iterative_score = -1e8
iterative_move = None
# self.v_depth = v_depth
self.v_depth = 0
while v_depth > 0:
self.v_depth += 1
v_depth -= 1
(iterative_score, iterative_move) = self.search(local_board, self.v_depth, alpha, beta)
# alpha = max(alpha, iterative_score)
elapsed_time = math.ceil(get_perf_counter() - start_time)
v_nps = math.ceil(self.v_nodes / elapsed_time)
print_stats(str(self.v_depth), str(math.ceil(iterative_score)), str(elapsed_time), str(self.v_nodes), str(v_nps), iterative_move)
return [iterative_score, iterative_move]
def search(self, local_board, v_depth, alpha, beta):
global_score = -1e8
chosen_move = None
local_score = -1e8
is_white = local_board.played_move_count % 2 == 0
v_depth = max(v_depth, 1)
for s_move in sorted(local_board.get_valid_moves(), key=local_board.calculate_score, reverse=is_white):
self.v_nodes += 1
temp_board = local_board.make_move(s_move, True)
if temp_board.in_check():
continue
local_score = -self.pvs(temp_board, -beta, -alpha, v_depth - 1)
if local_score >= global_score:
global_score = local_score
chosen_move = s_move
print_to_terminal("info nodes " + str(self.v_nodes))
return [global_score, chosen_move]
def pvs(self, local_board, alpha, beta, v_depth):
if v_depth < 1:
return self.q_search(local_board, alpha, beta, 6)
if local_board.rolling_score <= -50000:
return -70000
original_alpha = alpha
tt_entry = self.tt_lookup(local_board)
if tt_entry['tt_depth'] >= v_depth:
if tt_entry['tt_flag'] == EXACT:
self.v_nodes += 1
return tt_entry['tt_value']
if tt_entry['tt_flag'] == LOWER:
alpha = max(alpha, tt_entry['tt_value'])
elif tt_entry['tt_flag'] == UPPER:
beta = min(beta, tt_entry['tt_value'])
if alpha >= beta:
self.v_nodes += 1
return tt_entry['tt_value']
local_score = -1e8
for s_move in local_board.get_valid_moves():
self.v_nodes += 1
temp_board = local_board.make_move(s_move)
local_score = -self.pvs(temp_board, -alpha - 1, -alpha, v_depth - 1)
if alpha < local_score < beta:
local_score = -self.pvs(temp_board, -beta, -local_score, v_depth - 1)
alpha = max(alpha, local_score)
if alpha >= beta:
break
tt_entry['tt_value'] = alpha
if alpha <= original_alpha:
tt_entry['tt_flag'] = UPPER
elif alpha >= beta:
tt_entry['tt_flag'] = LOWER
else:
tt_entry['tt_flag'] = EXACT
tt_entry['tt_depth'] = v_depth
self.store_tt(local_board, tt_entry)
return alpha
def q_search(self, local_board, alpha, beta, v_depth):
if v_depth <= 0:
return local_board.rolling_score
if local_board.rolling_score >= beta:
return beta
alpha = max(local_board.rolling_score, alpha)
local_board.get_valid_moves()
local_score = -1e8
for s_move in local_board.capture_moves:
self.v_nodes += 1
local_score = -self.q_search(local_board.make_move(s_move), -beta, -alpha, v_depth - 1)
if local_score >= beta:
return beta
alpha = max(local_score, alpha)
return alpha
def tt_lookup(self, local_board):
board_string = local_board.str_board()
if board_string not in self.tt_bucket:
self.tt_bucket[board_string] = {
'tt_depth': 0,
'tt_value': -1e5,
'tt_flag': 2
}
return self.tt_bucket[board_string]
def store_tt(self, local_board, tt_entry):
board_string = local_board.str_board()
if len(self.tt_bucket) > 1e7:
self.tt_bucket.clear()
self.tt_bucket[board_string] = tt_entry
def main():
searcher = Search()
game_board = Board()
game_board.reset()
while 1:
try:
line = input()
if line == "quit":
sys.exit()
elif line == "uci":
print_to_terminal("pygone 1.1\nuciok")
elif line == "ucinewgame":
game_board.reset()
searcher.reset()
elif line == "isready":
print_to_terminal("readyok")
elif line.startswith("position"):
moves = line.split()
game_board.reset()
for position_move in moves[3:]:
game_board = game_board.make_move(position_move)
game_board.get_valid_moves(True)
elif line.startswith("go"):
white_time = 1e8
black_time = 1e8
go_depth = 6
input_depth = 0
args = line.split()
for key, arg in enumerate(args):
if arg == 'wtime':
white_time = int(args[key + 1])
elif arg == 'btime':
black_time = int(args[key + 1])
elif arg == 'depth':
go_depth = int(args[key + 1])
elif arg == 'infinite':
input_depth = 30
time_move_calc = max(40 - game_board.played_move_count, 2)
move_time = 1e8
is_white = game_board.played_move_count % 2 == 0
if is_white:
move_time = white_time / (time_move_calc * 1e3)
else:
move_time = black_time / (time_move_calc * 1e3)
if move_time < 25:
go_depth = 5
if move_time < 15:
go_depth = 4
if move_time < 5:
go_depth = 3
if move_time <= 2:
go_depth = 2
move_time = 2
go_depth = max(input_depth, go_depth)
searcher.v_nodes = 0
searcher.v_tthits = 0
(_, s_move) = searcher.iterative_search(game_board, go_depth, move_time)
print_to_terminal("bestmove " + s_move)
except (KeyboardInterrupt, SystemExit):
print_to_terminal('quit')
sys.exit()
except Exception as exc:
print_to_terminal(exc)
raise
main()
| [
"[email protected]"
] | |
c5778b848e1a4633f1101a6b345d0ef4a74f2d8b | efaee41c5662b25af1c1eefbeeb3efc136096665 | /backend/server/apps/endpoints/models.py | ce9c4211bc0f299352e904cc91cf723b0b716a54 | [] | no_license | edgarbasto/machine-learning-webservice | ad93ddd5b667658f9ebfc7c3234debe1c4ef494c | 1deacaaf3dba29cbc5f7c089e5b8d46641fc09b9 | refs/heads/master | 2020-09-13T10:00:57.652982 | 2019-12-06T17:28:07 | 2019-12-06T17:28:07 | 222,735,023 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,776 | py | from django.db import models
# Create your models here.
class Endpoint(models.Model):
'''
The Endpoint object represents ML API endpoint.
'''
name = models.CharField(max_length=128)
owner = models.CharField(max_length=128)
created_at = models.DateTimeField(auto_now_add=True, blank=True)
class MLAlgorithm(models.Model):
'''
The MLAlgorithm represents the ML algorithm object.
'''
name = models.CharField(max_length=128)
description = models.CharField(max_length=1000)
code = models.CharField(max_length=100000)
version = models.CharField(max_length=128)
owner = models.CharField(max_length=128)
created_at = models.DateTimeField(auto_now_add=True, blank=True)
parent_endpoint = models.ForeignKey(Endpoint, on_delete=models.CASCADE)
class MLAlgorithmStatus(models.Model):
'''
The MLAlgorithmStatus represents the status of the MLAlgorithm which can change over time.
'''
status = models.CharField(max_length=128)
active = models.BooleanField()
created_by = models.CharField(max_length=128)
created_at = models.DateTimeField(auto_now_add=True, blank=True)
parent_mlalgorithm = models.ForeignKey(MLAlgorithm, on_delete=models.CASCADE, related_name= 'status')
class MLRequest(models.Model):
'''
The MLRequest will keep information about all requests to ML algorithms.
'''
input_data = models.CharField(max_length=100000)
full_response = models.CharField(max_length=100000)
response = models.CharField(max_length=100000)
feedback = models.CharField(max_length=100000, blank=True, null=True)
created_at = models.DateTimeField(auto_now_add=True, blank=True)
parent_mlalgorithm = models.ForeignKey(MLAlgorithm, on_delete=models.CASCADE)
| [
"[email protected]"
] | |
767056e6660c9bdda0475c00bac87e823eca2023 | 729243a020efed22445849c5cd95e78506f9845d | /Semana04/Multiprocess/mpprog04.py | c6500fcd68472c271798a46ffc0d79607a84c8cd | [] | no_license | yuri-almeid/SEII-YuriLimaAlmeida | 6f031667943f469827bcb89db968d5b7a0188c2f | 81fbf275fcc74a99d8b3630c953aece416546416 | refs/heads/main | 2023-08-10T19:49:18.162469 | 2021-10-07T11:02:25 | 2021-10-07T11:02:25 | 347,435,913 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 440 | py | import concurrent.futures
import multiprocessing
import time
start = time.perf_counter()
def do_something (seconds):
print('sleeping {seconds} in 1 seconds(s)..')
time.sleep(seconds)
return 'done sleeping'
# roda o executor
with concurrent.futures.ProcessPoolExecutor() as executor:
f1 = executor.submit(do_something,1)
print(f1.result())
finish = time.perf_counter()
print(f'Finished in {round(finish-start,3)} second(s)') | [
"[email protected]"
] | |
564fa4166165e29ab7d5d089ff642ae892e463ee | 824d98a9d8bb129073e5816962ddf31494580201 | /iLQRController.py | f32326dbb6b8cd7ff4604448216ee7707d9216bd | [] | no_license | rhester5/dynamics_and_control_sim | 9f804b164372c14b9baa507cd39fe4e333c829d3 | adaab02e0e0b8cfa5453593c8728936d57ae3a87 | refs/heads/main | 2023-05-23T22:48:25.923238 | 2021-06-17T17:10:08 | 2021-06-17T17:10:08 | 377,906,217 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,863 | py | import sys
sys.path.append('../ilqr/ilqr')
from ilqr import iLQR
from iLQRCost import iLQR_cost
import numpy as np
# class iLQR_Controller():
# def __init__(self, dynamics, trajectory, actions, linear, num_steps,):
# self.dynamics = dynamics
# self.trajectory = trajectory
# self.actions = actions
# self.linear = linear
# self.num_steps = num_steps
# def __call__(self, x, sp, k):
# # if not k%self.num_steps:
# # print(k)
# # traj = self.trajectory[k:(k//self.num_steps+1)*self.num_steps, :]
# # traj = self.trajectory[k:k+self.num_steps, :]
# if k+self.num_steps < self.trajectory.shape[0]:
# traj = self.trajectory[k:k+self.num_steps, :]
# us_init = self.actions[k:k+self.num_steps, :]
# horizon = self.num_steps
# else:
# traj = self.trajectory[k:, :]
# us_init = self.actions[k:, :]
# horizon = self.actions.shape[0]-k
# # print(self.trajectory.shape, self.actions.shape, traj.shape, us_init.shape)
# x0 = x # traj[0, :]
# goal = traj[-1, 0:3]
# cost = iLQR_cost(goal, self.linear)
# ilqr = iLQR(self.dynamics, cost, horizon) # traj.shape[0])
# # us_init = self.actions[k:(k//self.num_steps+1)*self.num_steps, :]
# # us_init = self.actions[k:k+self.num_steps, :]
# xs, us = ilqr.fit(x0, us_init)
# return us[0]
# ok it's stupid to re-optimize the whole trajectory (at least in python)
# I should do an initial optimization over the whole trajectory
# break it into chunks
# and re-optimize every chunk
# how do I go from one chunk to the next though without it fucking up though?
# yeah so far this doesn't work, re-optimizing the entirety of the remaining trajectory works best
# but how to do it quickly? re-optimizing every 50 steps was the smallest number of steps I could do
# re-optimizing every 10 steps was taking forever
# but the best I can do is equivalent to 2 Hz and it was not good enough, really need it to be like 40 Hz probably
# is this what tedrake was talking about? the solvers haven't caught up yet
# or maybe it would be quicker if the dynamics were hard coded instead of auto differentiated?
# obviously it's not recomputing the dynamics every time, but whatever theano object it plugs into for
# the Jacobian might take longer than just a numpy array (but that seems unlikely since all the
# autodiff and tensor stuff was designed for deep learning where training time is very important)
# ooo wait one thing I'm clearly doing wrong is setting x0 to where it's supposed to be, not where it is... let's try that
# eh that's better but x position is still a mess and the z tracking is nowhere near as good as the "re-optimize everything every 50 steps"
# 20 steps is better but still not satisfactory
# or what if we do a receding horizon? e.g. always re-optimize the next 10 steps
# struggling with the receding horizon because actions and trajectory are different length, could that have been causing problems elsewhere?
# ok receding horizon is trash, at least with 10 time steps, wonder what happens if I reduce the time step length (and increase the number of time steps looked at a time?)
# hey that actually worked pretty well, like really well with a 20 step horizon
# or honestly reducing time step length and re-optimizing the entire trajectory might be the best
# ok yeah that's almost literally perfect
# I'm very happy
# so in conclusion, re-optimizing the entire trajectory as you go rather than re-optimizing a receding horizon is better and can be done sufficiently quickly at 20 Hz instead of 100 Hz
# oh wait shit nvm I'm re-optimizing the entire trajectory every 10 time steps which in this case is... every 1/2 second lol but for some reason it works really well when it didn't before?
# anyway whaterver we're gucci, the x position control isn't perfect but it's not bad
# oof let's try it on an x step
# ok hype it works very well, z is good during the x step response
# now I'm curious what happens if the geometric controller is given the iLQR trajectory
# and I'm curious if I can get iLQR to follow some crazy ass trajectories
# need to figure out how to formulate them because the way I was making the circle did not work at all
# and then once I can make complicated trajectories instead of just steps I can initialize those trajectories with a plannnnnnerrrrrr
# and if I can get the geometric controller working better than the base iLQR then I'll have all of the parts of the pipeline that I'm interested in
# (plan -> traj opt -> control -> state estimation)
# and then I can either do it in C++ or think about research directions or shit like that
# also try turning up the noise on the kalman filter
# need to reimplement the geometric controller
# also wondering if including linear and angular acceleration in the dynamics model/trajectory/state would be helpful
# I mean you can get dv/dt and dw/dt directly from the velocity/angular velocity differences and the time step length
# so 2 main pieces now are:
# - get iLQR to generate trajectories of any shape
# - get geometric controller to follow those trajectories (look at implementation again based on kdc report)
# ^ and subject to increasing amounts of noise
class iLQR_Controller():
def __init__(self, dynamics, actions, cost, us_init, K):
self.dynamics = dynamics
self.actions = actions
self.cost = cost
self.us_init = us_init
self.K = K
def __call__(self, x, sp, next_sp, k):
if k > 0 and not k % 10:
ilqr = iLQR(self.dynamics, self.cost, self.K-k)
xs, us = ilqr.fit(x, self.us_init[k:, :])
self.actions = np.zeros(self.actions.shape)
self.actions[k:, :] = us
return us[0]
else:
return self.actions[k]
def set_gains(self, gains):
raise TypeError('iLQR Controller does not have gains')
# class iLQR_Controller():
# def __init__(self, actions):
# self.actions = actions
# def __call__(self, x, sp, k):
# return self.actions[k] | [
"[email protected]"
] | |
59a5d48378d296750f67db8bbb5961de3268bce6 | 4a10c668331cd32fb0efc4de3c0e2608fd48d47d | /pipeline_simulation/scripts/summarize_simulation_stats_all.py | 8de7984bd79a7168d6bc146f7959a8f7140d8e75 | [] | no_license | joannahard/Genome_Biology_2019 | 312df456c15447cac6cd7809bd89638ffbf54324 | 51f70d6e6ee9927af160153e5fb787fc847b097f | refs/heads/master | 2020-04-27T17:38:48.094704 | 2019-03-11T12:10:29 | 2019-03-11T12:10:29 | 174,530,388 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,785 | py | import pandas as pd
import os, sys, math
from argparse import ArgumentParser
parser = ArgumentParser(description='Merge all stats from lira,conbase,monovar & sccaller')
# [Required input]
parser.add_argument('-o', '--outprefix', metavar='outprefix', help='Outprefix', required=True)
parser.add_argument('-i', '--infiles', metavar='infiles', help='Input stats files', required=True, nargs="+")
args = parser.parse_args()
#################################################3
# check inputs:
outCB = pd.DataFrame()
outMV = pd.DataFrame()
outSCC = pd.DataFrame()
outL = pd.DataFrame()
for infile in args.infiles:
if not os.path.exists(infile):
print("Error! No such file "+ infile)
sys.exit(1)
data = pd.read_csv(infile, sep = ",", index_col = 0)
sim_name = infile.split("/")[1]
settings = sim_name.split("_")[1:] # ['snv1', 'eal0.5', 'ado0.5']
snv = float(settings[0].replace("snv",""))
eal = float(settings[1].replace("eal",""))
ado = float(settings[2].replace("ado",""))
data.loc["fSNV"] = [snv,snv,snv,snv]
data.loc["fEAL"] = [eal,eal,eal,eal]
data.loc["fADO"] = [ado,ado,ado,ado]
outCB[sim_name] = data["conbase"]
outMV[sim_name] = data["monovar"]
outL[sim_name] = data["lira"]
outSCC[sim_name] = data["sccaller"]
#print(out)
outCB.to_csv(args.outprefix + "_conbase.csv")
print("Conbase stats written to "+ args.outprefix + "_conbase.csv")
outMV.to_csv(args.outprefix + "_monovar.csv")
print("Monovar stats written to "+ args.outprefix + "_monovar.csv")
outL.to_csv(args.outprefix + "_lira.csv")
print("Lira stats written to "+ args.outprefix + "_lira.csv")
outSCC.to_csv(args.outprefix + "_sccaller.csv")
print("SCCaller stats written to "+ args.outprefix + "_sccaller.csv")
| [
"[email protected]"
] | |
f3bf30a1633fb85c491ee3d011f3937fe306d3bd | a6fae33cdf3d3cb0b0d458c2825a8d8cc010cd25 | /l3/z2/.history/population_20200522202231.py | ffc71c3c452970dfdc5d316b5285cc18dab8386d | [] | no_license | Qabrix/optimization_amh | 12aab7c7980b38812ec38b7e494e82452a4176b4 | 6a4f5b897a4bef25f6e2acf535ba20ace7351689 | refs/heads/main | 2022-12-28T10:57:00.064130 | 2020-10-17T22:57:27 | 2020-10-17T22:57:27 | 304,983,340 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,819 | py | import random
import numpy as np
from utils import decision
class Inhabitant:
def __init__(self, gene, value=0):
self.gene = gene
self.value = 0
def __iter__(self):
for char in self.gene:
yield char
def __len__(self):
return len(self.gene)
def __getitem__(self, item):
return self.gene[item]
def get_str_gene(self, up):
return "".join(self.gene[:up])
class Population:
def __init__(self, population_size, all_puzzle_keys, starter_words):
self.all_puzzle_keys = all_puzzle_keys
self.best_res = None
self.population_size = population_size
self.generation = self._gen_generation(starter_words)
def __iter__(self):
for inhabitant in self.generation:
yield inhabitant
def _random_word(self):
return random.sample(self.all_puzzle_keys, len(self.all_puzzle_keys))
def _gen_generation(self, starter_words):
min_size = min([len(word) for word in starter_words])
max_size = max([len(word) for word in starter_words])
generation = []
for word in starter_words:
generation.append(Inhabitant(list(word)))
for _ in range(len(starter_words), self.population_size):
word = self._random_word()[: random.randint(min_size, max_size)]
generation.append(Inhabitant(word))
return generation
def sorted_generation(self):
return sorted(self.generation, key=lambda x: x.value, reverse=True)
def make_selection(self, elite_percentage, percentage=0.75):
selection = []
sorted_generation = self.sorted_generation()
selection_size = int(self.population_size * percentage)
elite_size = int(elite_percentage * selection_size)
for inhabitant in sorted_generation[:elite_size]:
selection.append(inhabitant)
if elite_size - selection_size < 0:
for inhabitant in sorted_generation[elite_size - selection_size :]:
selection.append(inhabitant)
return selection
def _check_if_correct(self, word):
possible_chars = self.all_puzzle_keys.copy()
for char in word:
if char in possible_chars:
possible_chars.remove(char)
else:
return False
return True
def recombinate(self, elite_percentage=0.6):
selection = self.make_selection(elite_percentage)
permutation = np.random.permutation(len(selection))
new_generation = []
new_generation.append(Inhabitant(selection[0].gene.copy()))
new_generation.append(Inhabitant(selection[1].gene.copy()))
for i in range(1, len(permutation)):
pivot = random.randint(
0,
min(
len(selection[permutation[i % len(permutation)]]),
len(selection[permutation[(i + 1) % len(permutation)]]),
)
// 2,
)
if len(selection[permutation[i % len(permutation)]]) > 1:
pivot2 = random.randint(len(selection[permutation[i % len(permutation)]])//2, len(selection[permutation[i % len(permutation)]])-1)
else:
pivot2 = 1
new_word = selection[permutation[i % len(permutation)]][:pivot2]
if self._check_if_correct(new_word):
new_generation.append(Inhabitant(new_word))
else:
new_generation.append(
Inhabitant(selection[permutation[i % len(permutation)]].gene)
)
if len(selection[permutation[(i+1) % len(permutation)]]) > 2:
pivot2 = random.randint(0, len(selection[permutation[(i+1) % len(permutation)]])-2)
else:
pivot2 = 1
new_word = selection[permutation[(i+1) % len(permutation)]][pivot2:]
if self._check_if_correct(new_word):
new_generation.append(Inhabitant(new_word))
else:
new_generation.append(
Inhabitant(selection[permutation[(i + 1) % len(permutation)]].gene)
)
self.generation = new_generation
def mutate(
self,
min_swap_probability=0.2,
max_swap_probability=0.5,
inverse_probability=0.001,
random_probability=0.05,
shift_probability=0.001,
insert_probability=0.9,
replace_probability=0.5,
):
swap_probability = random.uniform(min_swap_probability, max_swap_probability)
for inhabitant in self.generation[1:]:
if decision(insert_probability):
insert_amount = random.randint(1, 3)
if decision(0.5): # remove decision
if(len(inhabitant)+insert_amount < len(self.all_puzzle_keys)):
possible_chars = self._random_word()
for char in inhabitant.gene:
if char in possible_chars:
possible_chars.remove(char)
if decision(0.33):
inhabitant.gene += possible_chars[:insert_amount]
elif decision(0.5):
inhabitant.gene = possible_chars[:insert_amount] + inhabitant.gene
else:
insert_index = random.randint(1, len(inhabitant.gene))
inhabitant.gene = inhabitant.gene[:insert_index] + possible_chars[:insert_amount] + inhabitant.gene[insert_index:]
else:
if(len(inhabitant)-insert_amount > 0):
if len(inhabitant) - insert_amount > 0:
if decision(0.33):
inhabitant.gene = inhabitant.gene[insert_amount:]
elif decision(0.5):
inhabitant.gene = inhabitant.gene[:-insert_amount]
else:
remove_index = random.randint(1, len(inhabitant.gene)-insert_amount)
inhabitant.gene = (
inhabitant.gene[:remove_index]
+ inhabitant.gene[remove_index+insert_amount:]
)
elif decision(random_probability):
inhabitant.gene = self._random_word()
else:
if decision(shift_probability):
shift_range = random.randint(1, 3)
for _ in range(shift_range + 1):
inhabitant.gene = [inhabitant.gene[-1]] + inhabitant.gene[:-1]
if decision(replace_probability):
replace_index = random.randint(0, len(inhabitant.gene)-1)
possible_chars = self._random_word()
for char in inhabitant.gene:
if char in possible_chars:
possible_chars.remove(char)
if possible_chars:
inhabitant.gene[replace_index] = possible_chars[0]
else:
for i in range(len(inhabitant.gene) // 2):
if decision(swap_probability):
random_id = random.randint(0, len(inhabitant) - 1)
inhabitant.gene[i], inhabitant.gene[random_id] = (
inhabitant.gene[random_id],
inhabitant.gene[i],
)
if decision(inverse_probability):
inhabitant.gene = inhabitant.gene[::-1] | [
"[email protected]"
] | |
48346c248586b26277996331954eb2e6c4b6b052 | 14d846c87eada5c39f05a9e5d9c5d8d1638dae37 | /test/provider/test_requests_provider.py | bfc10abf08c8836de2b4249484ea6776ce55a778 | [] | no_license | ShaneLee/windscreen | ec738b2b085e1b2ab11304300f2ede742446ee8a | f6f25b7429c64861ce9d53f10c975ef73a188981 | refs/heads/master | 2023-06-25T10:41:06.464216 | 2021-07-24T13:45:55 | 2021-07-24T13:45:55 | 388,843,443 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | import unittest
from src.provider.requests_provider import RequestsProvider
class TestRequestsProvider(unittest.TestCase):
def test_get_requests(self):
self.assertIsNotNone(RequestsProvider().get())
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
a803771bd0a74ff59e8144cfac65ca2320281bb0 | 741c769a5d38b8ba563fa399d5358b9146d6989d | /fakeRequests.py | 953b6b9ce4b4956ababa5ad5bd53b19916c6d0be | [] | no_license | RogerMonteiro124/Python | 54397b1182028619ead8bb491bbb432e2a11d1a4 | 0606032bf1598441ee0bc3f63aca80183e403021 | refs/heads/master | 2021-01-19T10:17:09.122070 | 2018-04-06T20:36:28 | 2018-04-06T20:36:28 | 87,849,093 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,228 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#Developed by Roger Monteiro
#Github: https://github.com/RogerMonteiro124
import os
import urllib
from urllib import FancyURLopener
from random import *
def getUrl():
os.system("setterm -foreground green")
url=raw_input('Endereco do site alvo:\n>_http://')
return str(url)
def getOrigem():
os.system("setterm -foreground green")
prx=raw_input('Endereco da Origem:\n>_')
return str(prx)
def conectaSite(url,prx,user):
Origem = {"http" : "http://"+prx}
urllib.URLopener.version = user
os.system("setterm -foreground red")
resposta = urllib.urlopen(url,proxies=Origem).read()
return str(resposta)
def Banner():
os.system("clear")
os.system("setterm -foreground red")
print '''
·▄▄▄ ▄▄▄· ▄ •▄ ▄▄▄ . ▄▄▄ ▄▄▄ ..▄▄▄ ▄• ▄▌▄▄▄ ..▄▄ · ▄▄▄▄▄.▄▄ ·
▐▄▄·▐█ ▀█ █▌▄▌▪▀▄.▀· ▀▄ █·▀▄.▀·▐▀•▀█ █▪██▌▀▄.▀·▐█ ▀. •██ ▐█ ▀.
██▪ ▄█▀▀█ ▐▀▀▄·▐▀▀▪▄ ▐▀▀▄ ▐▀▀▪▄█▌·.█▌█▌▐█▌▐▀▀▪▄▄▀▀▀█▄ ▐█.▪▄▀▀▀█▄
██▌.▐█ ▪▐▌▐█.█▌▐█▄▄▌ ▐█•█▌▐█▄▄▌▐█▪▄█·▐█▄█▌▐█▄▄▌▐█▄▪▐█ ▐█▌·▐█▄▪▐█
▀▀▀ ▀ ▀ ·▀ ▀ ▀▀▀ .▀ ▀ ▀▀▀ ·▀▀█. ▀▀▀ ▀▀▀ ▀▀▀▀ ▀▀▀ ▀▀▀▀
'''
Help()
def Help():
os.system("setterm -foreground white")
print '''
Uso: python fakeRequestes.py
Endereço da WEB é a pagina que deseja acessar
com um UserAgent falso.
'''
def main():
Banner()
opt=input('''[1] - Usar um UserAgent especifico\n
[2] - Usar um UserAgent randominco\n>_''')
dic = {
1:'Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)',
2:'Mozilla/5.0 (compatible; MSIE 10.0; Macintosh; Intel Mac OS X 10_7_3; Trident/6.0)',
3:'Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; GTB7.4; InfoPath.2; SV1; .NET CLR 3.3.69573; WOW64; en-US)',
4:'Opera/9.80 (X11; Linux i686; U; ru) Presto/2.8.131 Version/11.11',
5:'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.2 (KHTML, like Gecko) Chrome/22.0.1216.0 Safari/537.2',
6:'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_4) AppleWebKit/537.13 (KHTML, like Gecko) Chrome/24.0.1290.1 Safari/537.13',
7:'Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11',
8:'Mozilla/5.0 (Windows NT 6.2; Win64; x64; rv:16.0.1) Gecko/20121011 Firefox/16.0.1',
9:'Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:15.0) Gecko/20100101 Firefox/15.0.1',
10:'Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5355d Safari/8536.25'
}
if opt ==1:
for x in dic:
print '[',x,'] ',dic[x]+'\n'
userOpt=input('Opcao\n>_')
user=dic[userOpt]
url='http://'+getUrl()
prx=getOrigem()
print conectaSite(url,prx,user)
elif opt ==2:
user=dic[randint(1,11)]
url='http://'+getUrl()
prx=getOrigem()
print conectaSite(url,prx,user)
else:
print "Opção invalida"
main()
main()
| [
"[email protected]"
] | |
8d004556ac7cdf7846fd6ce3fd624d9082919774 | 904fd885e9c9c629246aa2c8b533745604bc55c2 | /09. Decorators/01. Lab/02. Vowel Filter.py | 3b6a2c9851b59f96f8639f74dd1600609de38c28 | [] | no_license | Tuchev/Python-OOP---June---2021 | 3cc5e9206f910262d567f4f151bb6cd1b17779fe | 782efd1bde9a4177d6fcd3a33c85a48d7c8991fd | refs/heads/master | 2023-07-07T05:31:27.384073 | 2021-08-11T12:00:51 | 2021-08-11T12:00:51 | 387,717,053 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 266 | py | def vowel_filter(function):
def wrapper():
letters = function()
return [letter for letter in letters if letter.lower() in "aouei"]
return wrapper
@vowel_filter
def get_letters():
return ["a", "b", "c", "d", "e"]
print(get_letters())
| [
"[email protected]"
] | |
66583f834ce0817cdac30dd67385d1cb0b84b683 | 03e3138f99f275d15d41a5c5bfb212f85d64d02e | /source/res/scripts/client/gui/Scaleform/locale/CREW_OPERATIONS.py | a4684bacfdaafe4122ed2edf2f9193f51aec4da7 | [] | no_license | TrenSeP/WorldOfTanks-Decompiled | e428728e7901146d0b599d02c930d70532232a97 | 1faa748acec1b7e435b657fd054ecba23dd72778 | refs/heads/1.4.1 | 2020-04-27T08:07:49.813023 | 2019-03-05T17:37:06 | 2019-03-05T17:37:06 | 174,159,837 | 1 | 0 | null | 2019-03-06T14:33:33 | 2019-03-06T14:24:36 | Python | UTF-8 | Python | false | false | 3,029 | py | # Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/Scaleform/locale/CREW_OPERATIONS.py
class CREW_OPERATIONS(object):
RETURN_WARNING_MEMBERSINBATTLE_TOOLTIP = '#crew_operations:return/warning/membersInBattle/tooltip'
RETURN_WARNING_MEMBERDEMOBILIZED_TOOLTIP = '#crew_operations:return/warning/memberDemobilized/tooltip'
DROPINBARRACK_WARNING_NOSPACE_TOOLTIP = '#crew_operations:dropInBarrack/warning/noSpace/tooltip'
WINDOW_TITLE = '#crew_operations:window/title'
CREWOPERATIONS_BTN_TOOLTIP_HEADER = '#crew_operations:crewOperations/btn/tooltip/header'
CREWOPERATIONS_BTN_TOOLTIP_BODY = '#crew_operations:crewOperations/btn/tooltip/body'
RETRAIN_TITLE = '#crew_operations:retrain/title'
RETRAIN_DESCRIPTION = '#crew_operations:retrain/description'
RETRAIN_BUTTON_LABEL = '#crew_operations:retrain/button/label'
RETRAIN_ERROR_NOCREW = '#crew_operations:retrain/error/noCrew'
RETRAIN_ERROR_ALREADYRETRAINED = '#crew_operations:retrain/error/alreadyRetrained'
RETURN_TITLE = '#crew_operations:return/title'
RETURN_DESCRIPTION = '#crew_operations:return/description'
RETURN_BUTTON_LABEL = '#crew_operations:return/button/label'
RETURN_ERROR_NOPREVIOUS = '#crew_operations:return/error/noPrevious'
RETURN_ERROR_ALLDEMOBILIZED = '#crew_operations:return/error/allDemobilized'
RETURN_ERROR_ALREADYONPLACES = '#crew_operations:return/error/alreadyOnPlaces'
RETURN_WARNING_MEMBERDEMOBILIZED_TOOLTIP_HEADER = '#crew_operations:return/warning/memberDemobilized/tooltip/header'
RETURN_WARNING_MEMBERDEMOBILIZED_TOOLTIP_BODY = '#crew_operations:return/warning/memberDemobilized/tooltip/body'
RETURN_WARNING_NOSPACE_TOOLTIP_HEADER = '#crew_operations:return/warning/noSpace/tooltip/header'
RETURN_WARNING_NOSPACE_TOOLTIP_BODY = '#crew_operations:return/warning/noSpace/tooltip/body'
RETURN_WARNING_MEMBERSINBATTLE_TOOLTIP_HEADER = '#crew_operations:return/warning/membersInBattle/tooltip/header'
RETURN_WARNING_MEMBERSINBATTLE_TOOLTIP_BODY = '#crew_operations:return/warning/membersInBattle/tooltip/body'
DROPINBARRACK_TITLE = '#crew_operations:dropInBarrack/title'
DROPINBARRACK_DESCRIPTION = '#crew_operations:dropInBarrack/description'
DROPINBARRACK_BUTTON_LABEL = '#crew_operations:dropInBarrack/button/label'
DROPINBARRACK_ERROR_NOCREW = '#crew_operations:dropInBarrack/error/noCrew'
DROPINBARRACK_WARNING_NOSPACE_TOOLTIP_HEADER = '#crew_operations:dropInBarrack/warning/noSpace/tooltip/header'
DROPINBARRACK_WARNING_NOSPACE_TOOLTIP_BODY = '#crew_operations:dropInBarrack/warning/noSpace/tooltip/body'
ROLECHANGE_WINDOWTITLE = '#crew_operations:roleChange/windowTitle'
ROLECHANGE_ACCEPTBUTTON = '#crew_operations:roleChange/acceptButton'
ROLECHANGE_VEHICLESELECTLABEL = '#crew_operations:roleChange/vehicleSelectlabel'
ROLECHANGE_ROLESELECTLABEL = '#crew_operations:roleChange/roleSelectLabel'
ROLECHANGE_FOOTERINFO = '#crew_operations:roleChange/footerInfo'
| [
"[email protected]"
] | |
3533c8b4a56628c86921a0853ec4dbe46f9fdc31 | 6f3389c93cf1057bca5398940932561c19dbec1d | /Solving Club/휴강기간/이진수2.py | 845a4033c4f3f9f7848517c39d2134794469ec92 | [] | no_license | Jeonseoghyeon/APR | 0af9ac1b4ba666a97d78b92e3e599c5a8bc87acc | 7a3822913b84ae6ecf80c8e35c7c8e400981d1fe | refs/heads/master | 2020-12-23T13:55:24.194463 | 2020-06-30T07:00:23 | 2020-06-30T07:00:23 | 237,172,582 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 487 | py | import sys
sys.stdin = open("이진수2_input.txt","r")
T = int(input())
for tc in range(1,T+1):
N = float(input())
i = 0
result = ""
while i !=13:
i+=1
N *= 2
if N == 0:
print("#{} {}".format(tc,result))
break
else:
if N >=1:
N -=1
result+= '1'
else:
result+= '0'
if N != 0:
print("#{} {}".format(tc,'overflow'))
| [
"[email protected]"
] | |
80da78c8d02078a86dfcc60853d66eadc2c25c7d | 404df203a3fbcde272856db26835381400a3f896 | /venv/bin/pip3.8 | ee6fd9c5ab18215234ab7856026729d7f6b6370d | [] | no_license | fayza-khan/DecoratorsGenerators_Questions | ad6858d5bac7b5ab81ed69514d5a7b5134bf45c5 | d4bf2393e68ac8b685efbd2df4ae6c47670ddd3b | refs/heads/master | 2022-12-23T10:32:12.280386 | 2020-10-03T16:23:07 | 2020-10-03T16:23:07 | 300,924,606 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 416 | 8 | #!/Users/test/Desktop/python_pycharm/decorators/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3.8'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3.8')()
)
| [
"[email protected]"
] | |
4bba01aef14bab61d523392feb74baf039036236 | d8f672acc944ab09d63d938682b4890d2de70039 | /crcapp/admin.py | 91e922513591a90655d8181b1f609438ad9d2948 | [
"Unlicense"
] | permissive | tuckj3569/mywebsite | 28c971c919e7c82f37965d62c05c0bfb50ff0d24 | 1123d00380634207a03281d121792dda7a6c0136 | refs/heads/master | 2022-05-30T08:24:31.427905 | 2018-08-31T02:03:10 | 2018-08-31T02:03:10 | 145,658,376 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 531 | py | from django.contrib import admin
# Register your models here.
from .models import Car,Orders,Store,Customer
@admin.register(Car)
class CarAdmin(admin.ModelAdmin):
list_display=['Car_MakeName','Car_Model','Car_Series','Car_SeriesYear','Car_PriceNew']
@admin.register(Orders)
class OrdersAdmin(admin.ModelAdmin):
list_display=['Order_ID']
@admin.register(Store)
class StoreAdmin(admin.ModelAdmin):
list_display=['Store_Name']
@admin.register(Customer)
class CustomerAdmin(admin.ModelAdmin):
list_display=['Customer_ID'] | [
"[email protected]"
] | |
d5fb01fc5f5ddc9d417b7f00ddc7f123c601ec92 | ba7f4e6241244a0e5d58e3a66c128774a344f25c | /projectGNT/gnt/migrations/0002_auto_20171214_1153.py | d5e7a4d0a65a917482b36247229be9b9faa0af17 | [] | no_license | joelranjithjebanesan7/Give-and-Take-API | 83158ba1af2603b523f112d0f577534fdbb9b377 | a646b09b93479b5066fa69ca0db8a83829b0407f | refs/heads/master | 2021-08-31T11:21:09.934108 | 2017-12-21T05:32:06 | 2017-12-21T05:32:06 | 114,759,456 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 520 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-14 11:53
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('gnt', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='giveoffer',
name='giver',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='gnt.Profile'),
),
]
| [
"[email protected]"
] | |
d85b393ecf305e5519f1c7e9e3090bf56c993193 | 991143820bf4746c681f380039f7311c76024a94 | /wb5m3.py | 46b7eb0ff31e17e0a6bae74c5ba842c03bbd0184 | [] | no_license | ddaannddrree/baseball | 60be947cd2fef24546850c64034c17130ec07367 | d03858619aec96d438d9ac41dcd434baa53d1590 | refs/heads/master | 2020-12-29T02:21:41.812361 | 2017-06-01T05:06:37 | 2017-06-01T05:06:37 | 38,767,428 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,527 | py | #!/usr/bin/python
import urllib
import urllib2
import xml.etree.ElementTree as ET
import datetime
from bs4 import BeautifulSoup
import re
import time
from datetime import date
from datetime import timedelta
from datetime import datetime
import string
import collections
import copy
import pdb
import sys
import traceback
from gd2functions import *
from wbhelpers import *
import os.path
codehome = "/home/eddie7/code/"
#codehome = "/Users/martin/Baseball/WhiskeyBall/Code/"
monthfolder = "wb5m3/"
def getVictoryPoints(ts):
#return a list of dictonaries with the season-point scores
#PA, BB%-K%, wOBA, wSB
#IP, K%-BB%, FIP, LOB%
cats = makeScoringNames(10)
fnames_for_scoring = ['team']
fnames_for_scoring.extend(cats)
fnames_for_scoring.append('total')
tsc = copy.deepcopy(ts)
vsc = []
for tsii in tsc:
tsii['total'] = 0
for lab in fnames_for_scoring[1:len(fnames_for_scoring)-1]:
nteams = len(tsc)
for tt in range(0,nteams):
dascore = scoreSingle([x[lab] for x in ts],tt)
tsc[tt][lab] = dascore
tsc[tt]['total'] = tsc[tt]['total'] + dascore
return tsc
def GameInRange(gid,start,end):
gamedate = gid[0:10]
if gamedate >= start and gamedate <= end:
return True
return False
def pythag(r,ra):
#1/(1+[runs allowed/runs scored]^2)
if r == 0:
if ra == 0:
return 0.5
else:
return 0
else:
return 1/(1+(float(ra)/float(r))**2)
def makeScoringNames(N):
zinlist = ['inn'+str(i+1) for i in range(N)]
zinlist[-1] = 'innX'
return zinlist
def getFilledTeams(date1,date2):
ts = getTeams()
(ress) = CompileRangeGames(date1,date2)
labels = ['inning_runs','inning_runs_against']
for t in ts:
#print 'doing', t['team_name']
zlist = range(0,10)
zinlist = makeScoringNames(len(zlist))
mysum=lambda team,label,idx,llist:sum([int(x[label][idx]) for x in llist if x['team'] == team])
stats = {}
#need to do the teams separately!!!
# get player stats
i=0
t['inning_recs'] = {}
for tm in t['team_teams']:
t['inning_recs'][tm] = {}
myir = t['inning_recs'][tm]
for lbl in labels:
myir[lbl] = [mysum(tm,lbl,i,ress) for i in zlist]
myir['pythag'] = [pythag(myir['inning_runs'][i],myir['inning_runs_against'][i]) for i in zlist]
t['inning_recs']['joint'] = [t['inning_recs'][t['team_teams'][0]]['pythag'][i] + t['inning_recs'][t['team_teams'][1]]['pythag'][i] for i in zlist]
for i in range(10):
t[zinlist[i]] = t['inning_recs']['joint'][i]
return ts,ress
def printTeamTeamInnings(ts,tm):
print ts['team'] + ',' + tm + ',runs, ' + str(ts['inning_recs'][tm]['inning_runs'])
print ts['team'] + ',' + tm + ',ra, ' + str(ts['inning_recs'][tm]['inning_runs_against'])
print ts['team'] + ',' + tm + ',pythag, ' + str(ts['inning_recs'][tm]['pythag'])
def printTeamInnings(ts):
printTeamTeamInnings(ts,ts['team_teams'][0])
printTeamTeamInnings(ts,ts['team_teams'][1])
def printAllTeams(ts):
for t in ts:
printTeamInnings(t)
def printFilesForTeams(ts,press,bress):
for t in ts:
bff = open(codehome + monthfolder + t['team_name'].replace(" ","") + '_batters.csv','wb')
pff = open(codehome + monthfolder + t['team_name'].replace(" ","") + '_pitchers.csv','wb')
bre = getBattingRawEvents(bress,t)
pre = getPitchingRawEvents(press,t)
printDictListCSV(bff,bre)
printDictListCSV(pff,pre)
bff.close()
pff.close()
def getTeams():
team_names = ['Drumpfallacious','No-Talent Ass Clowns', 'Portlandia Misfits', 'The Rube', 'Paly Players', 'Dr. Watson', 'Buena Vista Bottoms', 'Damnedest of the Nice']
players = loadCSVDict(codehome + 'players2016.csv')
team_teams = [['lan','tba'],['sea','det'],['tex','was'],['sfn','hou'],['bal','nya'],['nyn','tor'],['cle','sln'],['bos','kca']]
mscores = [[7,7],[3.5,8],[3.5,4],[5,5],[1,1],[2,3],[6,6],[8,2]]
mtotal = [sum(x) for x in mscores]
teams = []
i=0
for team_name in team_names:
teams.append({'team_name':team_name, 'team':team_name, 'team_teams':team_teams[i], 'mscores':mscores[i], 'mtotal':mtotal[i]})
i=i+1
return teams
def CleanStats(sts):
sns = makeScoringNames(10)
for s in sts:
for n in sns:
s[n] = round(s[n] * 50,1)
return sts
def OutputTablesToFile(filename,ts,ress):
tls = ts
vps = getVictoryPoints(ts)
svps = sorted(vps,key=lambda k: k['total'],reverse=True)
sts = [x for (x,y) in sorted(zip(ts,vps),key=lambda k: k[1]['total'],reverse=True)]
#stsToday = [x for (x,y) in sorted(zip(tsToday,vps),key=lambda k: k[1]['total'],reverse=True)]
ii = 0
for svi in svps:
m3s = scoreSingle([x['total'] for x in svps],ii)
svi['through_three'] = svi['mtotal'] + m3s
svi['m1'] = svi['mscores'][0]
svi['m2'] = svi['mscores'][1]
svi['m3'] = m3s
ii = ii+1
ff = open(filename,'wb')
ff.write('<BR><BR>Stats<BR><tt>')
cats = makeScoringNames(10)
plist = ['team']
plist.extend(cats)
csts = CleanStats(sts)
printDictList(ff,csts,plist)
ff.flush()
ff.write('<BR><BR>Points<BR>')
plist.append('total')
printDictList(ff,svps,plist)
ff.write('<BR><BR>Season scores As of Today:<BR>')
ssvps = sorted(svps,key=lambda k: k['through_three'],reverse=True)
printDictList(ff,ssvps,['team_name','m1','m2','m3','through_three'])
ff.write('<BR><BR>')
#ff.write("<a href='all.csv'> all.csv </a><BR><BR>")
# provide links to all teams' batter and pitcher stat files
# ff.write("<a href='TheRube_pitchers.csv'> Rube pitchers </a><BR>")
ff.write('<BR><BR>')
ff.write('</tt>')
ff.write(str(datetime.now()))
ff.close()
#printFilesForTeams(ts,press,bress)
#ff = open(codehome + monthfolder + 'all.csv','wb')
#printDictListCSV(ff,ress,['team','game_id','runs_for','h','d','t','hr','tb','bb','sb','sac','sf','sacsf','runs_against','qs','so','saves','holds','batting_team','pitching_team'])
#printDictListCSV(ff,ress)
#ff.close()
def getFullTeamName(teamcode):
teamMapping = {'ana':'Angels', 'ari':'Diamondbacks', 'atl':'Braves', 'bal':'Orioles', 'bos':'Red Sox', 'cha':'White Sox',
'chn':'Cubs', 'cin':'Reds', 'cle':'Indians', 'col':'Rockies', 'det':'Tigers', 'hou':'Astros', 'kca':'Royals',
'lan':'Dodgers', 'mia':'Marlins', 'mil':'Brewers', 'min':'Twins', 'nya':'Yankees', 'nyn':'Mets', 'oak':'Athletics',
'phi':'Phillies', 'pit':'Pirates', 'sdn':'Padres', 'sea':'Mariners', 'sfn':'Giants', 'sln':'Cardinals', 'tba':'Rays',
'tex':'Rangers', 'tor':'Blue Jays', 'was':'Nationals'}
return teamMapping[teamcode]
def myint(x):
if x == 'x':
return 0
else:
return int(x)
def getRuns(x,teamloc):
try:
rval = myint(x[teamloc])
except:
rval = 0
return rval
def getInningRuns(teamloc, dd):
try:
retvec = [getRuns(x,teamloc) for x in dd['data']['boxscore']['linescore']['inning_line_score'] if int(x['inning']) < 10]
extras = [getRuns(x,teamloc) for x in dd['data']['boxscore']['linescore']['inning_line_score'] if int(x['inning']) > 9]
if len(retvec) < 9:
retvec.extend([0]*9-len(retvec))
if len(extras) > 0:
retvec.append(sum(extras))
else:
retvec.append(0)
except:
retvec = [0]*10
return retvec
def ExtractTeamRecords(dd):
"""Takes json dictionary of all boxscore, returns relevant stuff"""
tgh = {}
tga = {}
res = None
try:
tgh['team'] = dd['data']['boxscore']['home_team_code']
tga['team'] = dd['data']['boxscore']['away_team_code']
tgh['game_id'] = dd['data']['boxscore']['game_id']
tga['game_id'] = dd['data']['boxscore']['game_id']
tgh['inning_runs'] = getInningRuns('home',dd)
tgh['inning_runs_against'] = getInningRuns('away',dd)
tga['inning_runs'] = getInningRuns('away',dd)
tga['inning_runs_against'] = getInningRuns('home',dd)
res = [tga,tgh]
except:
traceback.print_exc()
print 'no data yet'
return res
def CompileDayGames(curdate):
team_games = []
gameids = DateGames(curdate)
for g in gameids:
print 'Doing game ' + g
sys.stdout.flush()
jsonbox = GetGameBoxScoreJson(g)
if not jsonbox is None:
trs = ExtractTeamRecords(jsonbox)
if not trs is None:
team_games.extend(trs)
return (team_games)
def CompileRangeGames(date1,date2):
res = []
pdate1 = datetime.strptime(date1,'%Y_%m_%d').date()
pdate2 = datetime.strptime(date2,'%Y_%m_%d').date()
if pdate2 < pdate1:
raise Exception('date2 must be at or after date1')
oneday = timedelta(1)
thedate = pdate1
while thedate <= pdate2:
print 'Doing games for date ' + str(thedate)
sys.stdout.flush()
(rs) = CompileDayGames(thedate.strftime('%Y_%m_%d'))
if len(rs) > 0:
res.extend(rs)
thedate = thedate+oneday
return (res)
def DoTheDay():
today = datetime.now()
today = today.date()
start_date = date(2016,6,7)
end_date = date(2016,7,10)
#end_date = date(2016,5,10)
end_date = min(end_date,today)
ts,ress = getFilledTeams(d2s(start_date),d2s(end_date))
OutputTablesToFile(codehome + monthfolder + 'stats_wb5m3.html',ts,ress)
| [
"[email protected]"
] | |
f20a396d57ccb70fae8a2111ec008e29306a02f6 | 8b6e22a9ffce2830f3a718441d5644de17fc04e4 | /Observe_And_Solve_782. Transform to Chessboard.py | d479a8cb5b3810f5e1a55a49848cae1a3c536c78 | [] | no_license | lonelyarcher/leetcode.python3 | 8fc0e47e63ce1b1466d8990a0b0d4c226cafed9d | f3ec3e6a82ad092bc5d83732af582dc987da6aac | refs/heads/master | 2020-03-08T06:42:15.172664 | 2020-02-26T21:16:23 | 2020-02-26T21:16:23 | 127,977,905 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,792 | py | """ An N x N board contains only 0s and 1s. In each move, you can swap any 2 rows with each other, or any 2 columns with each other.
What is the minimum number of moves to transform the board into a "chessboard" - a board where no 0s and no 1s are 4-directionally adjacent? If the task is impossible, return -1.
Examples:
Input: board = [[0,1,1,0],[0,1,1,0],[1,0,0,1],[1,0,0,1]]
Output: 2
Explanation:
One potential sequence of moves is shown below, from left to right:
0110 1010 1010
0110 --> 1010 --> 0101
1001 0101 1010
1001 0101 0101
The first move swaps the first and second column.
The second move swaps the second and third row.
Input: board = [[0, 1], [1, 0]]
Output: 0
Explanation:
Also note that the board with 0 in the top left corner,
01
10
is also a valid chessboard.
Input: board = [[1, 0], [1, 0]]
Output: -1
Explanation:
No matter what sequence of moves you make, you cannot end with a valid chessboard.
Note:
board will have the same number of rows and columns, a number in the range [2, 30].
board[i][j] will be only 0s or 1s. """
'''
first let careful observe the sample process
the target is chessboard, only two type of rows and columns: [1, 0, 1, 0, ...] and [0, 1, 0, 1, ...]
swap the rows and swap the columns are independent, when move rows, you only change order of all columns, same as move columns, you change order of all rows
so before swap, you should only have two types of rows and each position of two types have different 0 and 1
the mini-step to swap is to swap from original [0110] -> [1010] or [0101], if N is odd, [11100] -> [10101]
we can calculate the mask, then compare with original, the sum of diff /2 is min step to swap the rows
for the columns it is same to, we can put together with a loop to iterate rows and columns
'''
from typing import List
import collections
class Solution:
def movesToChessboard(self, board: List[List[int]]) -> int:
N = len(board)
ans = 0
for count in ((rows := collections.Counter(map(tuple, board))), (cols := collections.Counter(zip(*board)))):
if len(count) != 2 or sorted(count.values()) != [N//2, (N + 1)//2]: return -1
t1, t2 = count
if not all(x^y for x, y in zip(t1, t2)): return -1
mask = [t1.count(1) * 2 > N]
for _ in range(1, N): mask.append(not mask[-1])
if N % 2 == 1:
ans += sum(x ^ y for x, y in zip(t1, mask)) // 2
else:
ans += min((diff := sum(x ^ y for x, y in zip(t1, mask)) // 2), N//2 - diff)
return ans
print(Solution().movesToChessboard([[0,1,1,0],[0,1,1,0],[1,0,0,1],[1,0,0,1]])) # 2
print(Solution().movesToChessboard([[0, 1], [1, 0]])) # 0
print(Solution().movesToChessboard([[1, 0], [1, 0]])) # -1 | [
"[email protected]"
] | |
a78f0dff4519e307c8328160d7fcb679f8b2d6a5 | 12458f134f349ae1443f17c297eae7a2725f1181 | /twoStep/keychain/management/commands/checkintegrity.py | b4163468e9b1ff377d3f41f6964ba48f52bad8a4 | [] | no_license | aeaa1998/cifrado-keychain-back-end | 48ce1f872cb842542976ad954b37d7d91f5a14ea | ce4e3e104dd9a1839de3a8f9e89234b688e78b0a | refs/heads/master | 2023-01-13T19:53:36.805400 | 2020-11-19T05:18:38 | 2020-11-19T05:18:38 | 303,234,056 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,546 | py | from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from keychain.models import KeyChain, App
from keychain.lib import *
from hashlib import sha256
from django.core.files.storage import FileSystemStorage
from pathlib import Path
import os
class Command(BaseCommand):
def handle(self, *args, **options):
mypath = Path().absolute()
# print('Absolute path : {}'.format(mypath))
users = User.objects.all()
for user in users:
userId = create_hmac_sha256_signature(generate_derivation(masterPass="55555",seed="65C6AEdf045CFbb9d3D818CC7a708d6c", num= sumOrd(user.username) // len(user.username)), user.id)
keychain = KeyChain.objects.get(owner=userId)
apps = App.objects.filter(keychain__pk=keychain.id)
for app in apps:
if not app.integrity_ckeck == sha256(app.password.encode()).hexdigest():
folder = os.path.join("keychain/dumps/" + str(keychain.id) + "/", str(app.id))
try:
print(folder)
# f=open(folder + "/dump.txt","r")
# contents =f.read()
print(app.password)
# app.password = contents
# print(app.password)
# app.save()
# f.close()
except:
print("does not have a available dump")
| [
"[email protected]"
] | |
6a48ec22acbff8efec462b02e62692b6e3657252 | 925fa0208e07ac2aeb64f9201249a91f48b900fa | /LeetCode/buySell.py | c2ca23a5f4c2ece5b722241fab4b2700d7c57e6d | [] | no_license | Sanchi02/Dojo | 984eb3cba26e43a8f6f0ef9c93f7aed24527b3ae | b25288c42a67d8639195f3fddef698f5cd179aac | refs/heads/master | 2022-03-18T02:58:27.506082 | 2022-03-13T12:11:18 | 2022-03-13T12:11:18 | 197,040,319 | 0 | 0 | null | 2019-07-15T17:14:20 | 2019-07-15T17:06:36 | null | UTF-8 | Python | false | false | 1,042 | py | # Say you have an array for which the ith element is the price of a given stock on day i.
# If you were only permitted to complete at most one transaction (i.e., buy one and sell one share of the stock), design an algorithm to find the maximum profit.
# Note that you cannot sell a stock before you buy one.
# Example 1:
# Input: [7,1,5,3,6,4]
# Output: 5
# Explanation: Buy on day 2 (price = 1) and sell on day 5 (price = 6), profit = 6-1 = 5.
# Not 7-1 = 6, as selling price needs to be larger than buying price.
# Example 2:
# Input: [7,6,4,3,1]
# Output: 0
# Explanation: In this case, no transaction is done, i.e. max profit = 0.
class Solution:
def maxProfit(self, prices: List[int]) -> int:
if(len(prices) <= 1):
return 0
minp = prices[0]
maxProfit = 0
for i in range(1,len(prices)):
if(prices[i] < minp):
minp = prices[i]
elif(prices[i] - minp > maxProfit):
maxProfit = prices[i] - minp
return maxProfit | [
"[email protected]"
] | |
f2a0f2554553caf14c358e31a7c6a467e481fa1d | e096f35070c78472661bf425176c9b39b7405eed | /python/log_analysis/log_velocity_to_file.py | 9ea5d302c3ed3c06997c238dd3f02dc5f3666195 | [] | no_license | ripl/lcm-python-scripts | 50c09bb6fbea6ee8f5277e07a046b7766dd4ac91 | aa5820cf4b9881d96a9f3ea88a2934f1693b45bf | refs/heads/master | 2021-09-24T14:33:21.042042 | 2018-10-10T15:52:21 | 2018-10-10T15:52:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,935 | py | from time import time
import lcm
from ripl.orc_debug_stat_msg_t import orc_debug_stat_msg_t
from ripl.velocity_msg_t import velocity_msg_t
import threading
from sys import argv
class log_lcm_listener(object):
def __init__(self, log_name):
self.lc = lcm.LCM()
self.base_lcm_msg = None
self.subscription_base_stat = self.lc.subscribe("BASE_DEBUG_STAT", self.lcm_base_handler)
self.subscription_robot_vel_cmd = self.lc.subscribe("ROBOT_VELOCITY_CMD", self.lcm_robot_vel_handler)
self._running = True
self.file_name = log_name#"vel.log"
def lcm_robot_vel_handler(self, channel, data):
msg = velocity_msg_t.decode(data)
print "=====Robot Vel Command : " , msg.tv, msg.rv
def lcm_base_handler(self, channel, data):
msg = orc_debug_stat_msg_t.decode(data)
print "Pos Status : " , msg.qei_position
print "Vel Status : " , msg.qei_velocity
print "Desired Vel (TV, RV) : " , msg.s_desired_vel[0], msg.s_desired_vel[1]
print "PWM (TV, RV) : " , msg.pwm_value[0], msg.pwm_value[1]
print "Actual Vel (TV, RV) : " , msg.s_actual[0], msg.s_actual[1]
print "Commanded Vel (TV, RV) : " , msg.command_velocity[0], msg.command_velocity[1]
f = open(self.file_name,'a')
vel_string = str(float(msg.utime)/1e6)+ "," + str(msg.s_actual[0]) + "," + str(msg.s_actual[1]) + "," + \
str(msg.s_desired_vel[0]) + "," + str(msg.s_desired_vel[1])+ "\n"
f.write(vel_string)
f.close()
def run(self):
print "Started LCM Listener"
try:
while self._running:
self.lc.handle()
except KeyboardInterrupt:
pass
if __name__ == '__main__':
log_name = argv[1]
background = log_lcm_listener(log_name)
bg_thread = threading.Thread( target=background.run )
bg_thread.start()
| [
"[email protected]"
] | |
9e36f8b04946d9147e7be4c63f951e7ed275219c | 033407a52993a19ac4299a459a1ef01df4ec3d26 | /scripts/cluster_data.py | de22bebcb58e3c94ad4ac1a33e1ebb51946d1d55 | [
"BSD-3-Clause"
] | permissive | oxpig/learning-from-the-ligand | 27070e883b6b7780801a7a31d310e68fd81d1bd8 | 4ab2d72994c403d72549bcbff5abd8620b91527c | refs/heads/master | 2023-01-07T23:26:13.518102 | 2020-11-17T15:50:28 | 2020-11-17T15:50:28 | 296,307,884 | 4 | 1 | BSD-3-Clause | 2020-11-17T15:50:30 | 2020-09-17T11:35:21 | Jupyter Notebook | UTF-8 | Python | false | false | 4,327 | py | """Clusters the PDBbind 2017 refined set by 90% similarity to proteins in the core set.
Usage:
cluster_data.py [-h] <pdbbind_2016_dir> <cluster_file> <training_set_file> <test_set_file>
Arguments:
pdbbind_2016_dir top-level directory of the PDBbind 2016 database
cluster_file file containing BLASTClust clusters of the PDBbind 2017 refined set in .json format
training_set_file file containing PDB codes of the training set
test_set_file file containing PDB codes of the test set
Options:
-h --help show this message and exit
This script is gnarly and inefficient, but documented and functional.
It is neither well-designed nor well-engineered. I'm sorry.
Embrace the chaos of throwaway research code. Climb the ladder.
It is assumed that the PDBbind 2016 indices live in the standard directory structure
i.e. <top-level-dir>/index/<filename>. If you just downloaded the core cluster file
and saved it somewhere else, go ahead and edit the script. You'll probably make it better.
Clusters of the refined set corresponding to the 56 clusters of the core set we use
are saved to ../data/single_target_clusters.json
A list of PDB codes belonging to the training set whose proteins are in the same
BLASTClust clusters as any test set protein is written to ../data/training_test_overlap.txt
"""
import json
import os
from docopt import docopt
# parse command line arguments
args = docopt(__doc__)
pdbbind_2016_dir = args['<pdbbind_2016_dir>']
cluster_file = args['<cluster_file>']
test_set_file = args['<test_set_file>']
training_set_file = args['<training_set_file>']
# parse protein names and clustering data from PDBbind indices
core_cluster_file = os.path.join(pdbbind_2016_dir, 'index', 'INDEX_core_cluster.2016')
with open(core_cluster_file, 'r') as f:
lines = [l.strip().split()[0] for l in f if not l.startswith('#')]
core_cluster_data = [lines[x:x+5] for x in range(0, len(lines),5)]
core_names_file = os.path.join(pdbbind_2016_dir, 'index', 'INDEX_core_name.2016')
with open(core_names_file, 'r') as f:
lines = [line.strip().split() for line in f if not line.startswith('#')]
core_protein_names = {line[0]: ' '.join(line[3:]) for line in lines}
# dict of protein name: pdb codes for each cluster of the core set
# note - there are actually two clusters for beta-lactamase
core_cluster_names = list(set([name for _, name in core_protein_names.items()]))
core_clusters = {name: [] for name in core_cluster_names}
for pdb in core_protein_names:
core_clusters[core_protein_names[pdb]].append(pdb)
# training and test set PDB codes
with open(training_set_file, 'r') as f:
training_set = [line.strip() for line in f]
with open(test_set_file, 'r') as f:
test_set = [line.strip() for line in f]
# clusters of the full data set
with open(cluster_file, 'r') as f:
clusters = json.load(f)
# using sets speeds up checking membership of each cluster
set_clusters = [set(c) for c in clusters]
# for each core set cluster, identify all proteins in the refined set which
# are >90% sequence identical to any protein from the core set cluster
refined_clusters = {}
for c in core_clusters:
# first, add the proteins from the core set that are in our test set
refined_clusters[c] = [i for i in core_clusters[c] if i in test_set]
# for each protein from the core set, add all proteins from each refined
# set cluster to which the core set protein belongs
for pdb in core_clusters[c]:
for cluster in set_clusters:
if pdb in cluster:
refined_clusters[c].extend([i for i in cluster if i in training_set])
# remove repeated entries
refined_clusters[c] = list(set(refined_clusters[c]))
# finally, list the PDB codes of all proteins in the training data that
# belong to the same cluster as any protein in the test set
similar_pdbs = [pdb for c in refined_clusters for pdb in refined_clusters[c]]
training_set_overlap = list(set(similar_pdbs).difference(set(test_set)))
# finally, dump all the clustering information and never look back
with open(os.path.join('..', 'data', 'training_set_overlap.txt'), 'w') as f:
print(*training_set_overlap, sep='\n', file=f)
with open(os.path.join('..', 'data', 'single_target_clusters.json'), 'w') as f:
json.dump(refined_clusters, f)
| [
"[email protected]"
] | |
38b53f3d8bc7344999eff20c47386782a5e8d5bc | 1bf9f6b0ef85b6ccad8cb029703f89039f74cedc | /src/connectedk8s/azext_connectedk8s/_utils.py | 2a988f348cfbba081397f4cb65ee1a28e28fadda | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | VSChina/azure-cli-extensions | a1f4bf2ea4dc1b507618617e299263ad45213add | 10b7bfef62cb080c74b1d59aadc4286bd9406841 | refs/heads/master | 2022-11-14T03:40:26.009692 | 2022-11-09T01:09:53 | 2022-11-09T01:09:53 | 199,810,654 | 4 | 2 | MIT | 2020-07-13T05:51:27 | 2019-07-31T08:10:50 | Python | UTF-8 | Python | false | false | 26,287 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import os
import shutil
import subprocess
from subprocess import Popen, PIPE
import time
import requests
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
import json
from knack.util import CLIError
from knack.log import get_logger
from knack.prompting import NoTTYException, prompt_y_n
from azure.cli.core.commands.client_factory import get_subscription_id
from azure.cli.core.util import send_raw_request
from azure.cli.core import telemetry
from azure.core.exceptions import ResourceNotFoundError, HttpResponseError
from msrest.exceptions import AuthenticationError, HttpOperationError, TokenExpiredError
from msrest.exceptions import ValidationError as MSRestValidationError
from kubernetes.client.rest import ApiException
from azext_connectedk8s._client_factory import _resource_client_factory, _resource_providers_client
import azext_connectedk8s._constants as consts
from kubernetes import client as kube_client
from azure.cli.core import get_default_cli
from azure.cli.core.azclierror import CLIInternalError, ClientRequestError, ArgumentUsageError, ManualInterrupt, AzureResponseError, AzureInternalError, ValidationError
logger = get_logger(__name__)
# pylint: disable=line-too-long
# pylint: disable=bare-except
class TimeoutHTTPAdapter(HTTPAdapter):
def __init__(self, *args, **kwargs):
self.timeout = consts.DEFAULT_REQUEST_TIMEOUT
if "timeout" in kwargs:
self.timeout = kwargs["timeout"]
del kwargs["timeout"]
super().__init__(*args, **kwargs)
def send(self, request, **kwargs):
timeout = kwargs.get("timeout")
if timeout is None:
kwargs["timeout"] = self.timeout
return super().send(request, **kwargs)
def validate_location(cmd, location):
subscription_id = get_subscription_id(cmd.cli_ctx)
rp_locations = []
resourceClient = _resource_client_factory(cmd.cli_ctx, subscription_id=subscription_id)
try:
providerDetails = resourceClient.providers.get('Microsoft.Kubernetes')
except Exception as e: # pylint: disable=broad-except
arm_exception_handler(e, consts.Get_ResourceProvider_Fault_Type, 'Failed to fetch resource provider details')
for resourceTypes in providerDetails.resource_types:
if resourceTypes.resource_type == 'connectedClusters':
rp_locations = [location.replace(" ", "").lower() for location in resourceTypes.locations]
if location.lower() not in rp_locations:
telemetry.set_exception(exception='Location not supported', fault_type=consts.Invalid_Location_Fault_Type,
summary='Provided location is not supported for creating connected clusters')
raise ArgumentUsageError("Connected cluster resource creation is supported only in the following locations: " +
', '.join(map(str, rp_locations)), recommendation="Use the --location flag to specify one of these locations.")
break
def get_chart_path(registry_path, kube_config, kube_context, helm_client_location):
# Pulling helm chart from registry
os.environ['HELM_EXPERIMENTAL_OCI'] = '1'
pull_helm_chart(registry_path, kube_config, kube_context, helm_client_location)
# Exporting helm chart after cleanup
chart_export_path = os.path.join(os.path.expanduser('~'), '.azure', 'AzureArcCharts')
try:
if os.path.isdir(chart_export_path):
shutil.rmtree(chart_export_path)
except:
logger.warning("Unable to cleanup the azure-arc helm charts already present on the machine. In case of failure, please cleanup the directory '%s' and try again.", chart_export_path)
export_helm_chart(registry_path, chart_export_path, kube_config, kube_context, helm_client_location)
# Returning helm chart path
helm_chart_path = os.path.join(chart_export_path, 'azure-arc-k8sagents')
chart_path = os.getenv('HELMCHART') if os.getenv('HELMCHART') else helm_chart_path
return chart_path
def pull_helm_chart(registry_path, kube_config, kube_context, helm_client_location):
cmd_helm_chart_pull = [helm_client_location, "chart", "pull", registry_path]
if kube_config:
cmd_helm_chart_pull.extend(["--kubeconfig", kube_config])
if kube_context:
cmd_helm_chart_pull.extend(["--kube-context", kube_context])
response_helm_chart_pull = subprocess.Popen(cmd_helm_chart_pull, stdout=PIPE, stderr=PIPE)
_, error_helm_chart_pull = response_helm_chart_pull.communicate()
if response_helm_chart_pull.returncode != 0:
telemetry.set_exception(exception=error_helm_chart_pull.decode("ascii"), fault_type=consts.Pull_HelmChart_Fault_Type,
summary='Unable to pull helm chart from the registry')
raise CLIInternalError("Unable to pull helm chart from the registry '{}': ".format(registry_path) + error_helm_chart_pull.decode("ascii"))
def export_helm_chart(registry_path, chart_export_path, kube_config, kube_context, helm_client_location):
cmd_helm_chart_export = [helm_client_location, "chart", "export", registry_path, "--destination", chart_export_path]
if kube_config:
cmd_helm_chart_export.extend(["--kubeconfig", kube_config])
if kube_context:
cmd_helm_chart_export.extend(["--kube-context", kube_context])
response_helm_chart_export = subprocess.Popen(cmd_helm_chart_export, stdout=PIPE, stderr=PIPE)
_, error_helm_chart_export = response_helm_chart_export.communicate()
if response_helm_chart_export.returncode != 0:
telemetry.set_exception(exception=error_helm_chart_export.decode("ascii"), fault_type=consts.Export_HelmChart_Fault_Type,
summary='Unable to export helm chart from the registry')
raise CLIInternalError("Unable to export helm chart from the registry '{}': ".format(registry_path) + error_helm_chart_export.decode("ascii"))
def add_helm_repo(kube_config, kube_context, helm_client_location):
repo_name = os.getenv('HELMREPONAME')
repo_url = os.getenv('HELMREPOURL')
cmd_helm_repo = [helm_client_location, "repo", "add", repo_name, repo_url]
if kube_config:
cmd_helm_repo.extend(["--kubeconfig", kube_config])
if kube_context:
cmd_helm_repo.extend(["--kube-context", kube_context])
response_helm_repo = Popen(cmd_helm_repo, stdout=PIPE, stderr=PIPE)
_, error_helm_repo = response_helm_repo.communicate()
if response_helm_repo.returncode != 0:
telemetry.set_exception(exception=error_helm_repo.decode("ascii"), fault_type=consts.Add_HelmRepo_Fault_Type,
summary='Failed to add helm repository')
raise CLIInternalError("Unable to add repository {} to helm: ".format(repo_url) + error_helm_repo.decode("ascii"))
def get_helm_registry(cmd, config_dp_endpoint, dp_endpoint_dogfood=None, release_train_dogfood=None):
# Setting uri
get_chart_location_url = "{}/{}/GetLatestHelmPackagePath?api-version=2019-11-01-preview".format(config_dp_endpoint, 'azure-arc-k8sagents')
release_train = os.getenv('RELEASETRAIN') if os.getenv('RELEASETRAIN') else 'stable'
if dp_endpoint_dogfood:
get_chart_location_url = "{}/azure-arc-k8sagents/GetLatestHelmPackagePath?api-version=2019-11-01-preview".format(dp_endpoint_dogfood)
if release_train_dogfood:
release_train = release_train_dogfood
uri_parameters = ["releaseTrain={}".format(release_train)]
resource = cmd.cli_ctx.cloud.endpoints.active_directory_resource_id
# Sending request
try:
r = send_raw_request(cmd.cli_ctx, 'post', get_chart_location_url, uri_parameters=uri_parameters, resource=resource)
except Exception as e:
telemetry.set_exception(exception=e, fault_type=consts.Get_HelmRegistery_Path_Fault_Type,
summary='Error while fetching helm chart registry path')
raise CLIInternalError("Error while fetching helm chart registry path: " + str(e))
if r.content:
try:
return r.json().get('repositoryPath')
except Exception as e:
telemetry.set_exception(exception=e, fault_type=consts.Get_HelmRegistery_Path_Fault_Type,
summary='Error while fetching helm chart registry path')
raise CLIInternalError("Error while fetching helm chart registry path from JSON response: " + str(e))
else:
telemetry.set_exception(exception='No content in response', fault_type=consts.Get_HelmRegistery_Path_Fault_Type,
summary='No content in acr path response')
raise CLIInternalError("No content was found in helm registry path response.")
def arm_exception_handler(ex, fault_type, summary, return_if_not_found=False):
if isinstance(ex, AuthenticationError):
telemetry.set_exception(exception=ex, fault_type=fault_type, summary=summary)
raise AzureResponseError("Authentication error occured while making ARM request: " + str(ex) + "\nSummary: {}".format(summary))
if isinstance(ex, TokenExpiredError):
telemetry.set_exception(exception=ex, fault_type=fault_type, summary=summary)
raise AzureResponseError("Token expiration error occured while making ARM request: " + str(ex) + "\nSummary: {}".format(summary))
if isinstance(ex, HttpOperationError):
status_code = ex.response.status_code
if status_code == 404 and return_if_not_found:
return
if status_code // 100 == 4:
telemetry.set_user_fault()
telemetry.set_exception(exception=ex, fault_type=fault_type, summary=summary)
if status_code // 100 == 5:
raise AzureInternalError("Http operation error occured while making ARM request: " + str(ex) + "\nSummary: {}".format(summary))
raise AzureResponseError("Http operation error occured while making ARM request: " + str(ex) + "\nSummary: {}".format(summary))
if isinstance(ex, MSRestValidationError):
telemetry.set_exception(exception=ex, fault_type=fault_type, summary=summary)
raise AzureResponseError("Validation error occured while making ARM request: " + str(ex) + "\nSummary: {}".format(summary))
if isinstance(ex, HttpResponseError):
status_code = ex.status_code
if status_code == 404 and return_if_not_found:
return
if status_code // 100 == 4:
telemetry.set_user_fault()
telemetry.set_exception(exception=ex, fault_type=fault_type, summary=summary)
if status_code // 100 == 5:
raise AzureInternalError("Http response error occured while making ARM request: " + str(ex) + "\nSummary: {}".format(summary))
raise AzureResponseError("Http response error occured while making ARM request: " + str(ex) + "\nSummary: {}".format(summary))
if isinstance(ex, ResourceNotFoundError) and return_if_not_found:
return
telemetry.set_exception(exception=ex, fault_type=fault_type, summary=summary)
raise ClientRequestError("Error occured while making ARM request: " + str(ex) + "\nSummary: {}".format(summary))
def kubernetes_exception_handler(ex, fault_type, summary, error_message='Error occured while connecting to the kubernetes cluster: ',
message_for_unauthorized_request='The user does not have required privileges on the kubernetes cluster to deploy Azure Arc enabled Kubernetes agents. Please ensure you have cluster admin privileges on the cluster to onboard.',
message_for_not_found='The requested kubernetes resource was not found.', raise_error=True):
telemetry.set_user_fault()
if isinstance(ex, ApiException):
status_code = ex.status
if status_code == 403:
logger.warning(message_for_unauthorized_request)
elif status_code == 404:
logger.warning(message_for_not_found)
else:
logger.debug("Kubernetes Exception: " + str(ex))
if raise_error:
telemetry.set_exception(exception=ex, fault_type=fault_type, summary=summary)
raise ValidationError(error_message + "\nError Response: " + str(ex.body))
else:
if raise_error:
telemetry.set_exception(exception=ex, fault_type=fault_type, summary=summary)
raise ValidationError(error_message + "\nError: " + str(ex))
else:
logger.debug("Kubernetes Exception: " + str(ex))
def validate_infrastructure_type(infra):
for s in consts.Infrastructure_Enum_Values[1:]: # First value is "auto"
if s.lower() == infra.lower():
return s
return None
def get_values_file():
values_file_provided = False
values_file = os.getenv('HELMVALUESPATH')
if (values_file is not None) and (os.path.isfile(values_file)):
values_file_provided = True
logger.warning("Values files detected. Reading additional helm parameters from same.")
# trimming required for windows os
if (values_file.startswith("'") or values_file.startswith('"')):
values_file = values_file[1:]
if (values_file.endswith("'") or values_file.endswith('"')):
values_file = values_file[:-1]
return values_file_provided, values_file
def ensure_namespace_cleanup():
api_instance = kube_client.CoreV1Api()
timeout = time.time() + 180
while True:
if time.time() > timeout:
telemetry.set_user_fault()
logger.warning("Namespace 'azure-arc' still in terminating state. Please ensure that you delete the 'azure-arc' namespace before onboarding the cluster again.")
return
try:
api_response = api_instance.list_namespace(field_selector='metadata.name=azure-arc')
if not api_response.items:
return
time.sleep(5)
except Exception as e: # pylint: disable=broad-except
logger.warning("Error while retrieving namespace information: " + str(e))
kubernetes_exception_handler(e, consts.Get_Kubernetes_Namespace_Fault_Type, 'Unable to fetch kubernetes namespace',
raise_error=False)
def delete_arc_agents(release_namespace, kube_config, kube_context, helm_client_location, no_hooks=False):
if(no_hooks):
cmd_helm_delete = [helm_client_location, "delete", "azure-arc", "--namespace", release_namespace, "--no-hooks"]
else:
cmd_helm_delete = [helm_client_location, "delete", "azure-arc", "--namespace", release_namespace]
if kube_config:
cmd_helm_delete.extend(["--kubeconfig", kube_config])
if kube_context:
cmd_helm_delete.extend(["--kube-context", kube_context])
response_helm_delete = Popen(cmd_helm_delete, stdout=PIPE, stderr=PIPE)
_, error_helm_delete = response_helm_delete.communicate()
if response_helm_delete.returncode != 0:
if 'forbidden' in error_helm_delete.decode("ascii") or 'Error: warning: Hook pre-delete' in error_helm_delete.decode("ascii") or 'Error: timed out waiting for the condition' in error_helm_delete.decode("ascii"):
telemetry.set_user_fault()
telemetry.set_exception(exception=error_helm_delete.decode("ascii"), fault_type=consts.Delete_HelmRelease_Fault_Type,
summary='Unable to delete helm release')
raise CLIInternalError("Error occured while cleaning up arc agents. " +
"Helm release deletion failed: " + error_helm_delete.decode("ascii") +
" Please run 'helm delete azure-arc' to ensure that the release is deleted.")
ensure_namespace_cleanup()
def helm_install_release(chart_path, subscription_id, kubernetes_distro, kubernetes_infra, resource_group_name, cluster_name,
location, onboarding_tenant_id, http_proxy, https_proxy, no_proxy, proxy_cert, private_key_pem,
kube_config, kube_context, no_wait, values_file_provided, values_file, cloud_name, disable_auto_upgrade,
enable_custom_locations, custom_locations_oid, helm_client_location, enable_private_link, onboarding_timeout="600",
container_log_path=None):
cmd_helm_install = [helm_client_location, "upgrade", "--install", "azure-arc", chart_path,
"--set", "global.subscriptionId={}".format(subscription_id),
"--set", "global.kubernetesDistro={}".format(kubernetes_distro),
"--set", "global.kubernetesInfra={}".format(kubernetes_infra),
"--set", "global.resourceGroupName={}".format(resource_group_name),
"--set", "global.resourceName={}".format(cluster_name),
"--set", "global.location={}".format(location),
"--set", "global.tenantId={}".format(onboarding_tenant_id),
"--set", "global.onboardingPrivateKey={}".format(private_key_pem),
"--set", "systemDefaultValues.spnOnboarding=false",
"--set", "global.azureEnvironment={}".format(cloud_name),
"--set", "systemDefaultValues.clusterconnect-agent.enabled=true",
"--output", "json"]
# Add custom-locations related params
if enable_custom_locations and not enable_private_link:
cmd_helm_install.extend(["--set", "systemDefaultValues.customLocations.enabled=true"])
cmd_helm_install.extend(["--set", "systemDefaultValues.customLocations.oid={}".format(custom_locations_oid)])
# Disable cluster connect if private link is enabled
if enable_private_link is True:
cmd_helm_install.extend(["--set", "systemDefaultValues.clusterconnect-agent.enabled=false"])
# To set some other helm parameters through file
if values_file_provided:
cmd_helm_install.extend(["-f", values_file])
if disable_auto_upgrade:
cmd_helm_install.extend(["--set", "systemDefaultValues.azureArcAgents.autoUpdate={}".format("false")])
if https_proxy:
cmd_helm_install.extend(["--set", "global.httpsProxy={}".format(https_proxy)])
if http_proxy:
cmd_helm_install.extend(["--set", "global.httpProxy={}".format(http_proxy)])
if no_proxy:
cmd_helm_install.extend(["--set", "global.noProxy={}".format(no_proxy)])
if proxy_cert:
cmd_helm_install.extend(["--set-file", "global.proxyCert={}".format(proxy_cert)])
cmd_helm_install.extend(["--set", "global.isCustomCert={}".format(True)])
if https_proxy or http_proxy or no_proxy:
cmd_helm_install.extend(["--set", "global.isProxyEnabled={}".format(True)])
if container_log_path is not None:
cmd_helm_install.extend(["--set", "systemDefaultValues.fluent-bit.containerLogPath={}".format(container_log_path)])
if kube_config:
cmd_helm_install.extend(["--kubeconfig", kube_config])
if kube_context:
cmd_helm_install.extend(["--kube-context", kube_context])
if not no_wait:
# Change --timeout format for helm client to understand
onboarding_timeout = onboarding_timeout + "s"
cmd_helm_install.extend(["--wait", "--timeout", "{}".format(onboarding_timeout)])
response_helm_install = Popen(cmd_helm_install, stdout=PIPE, stderr=PIPE)
_, error_helm_install = response_helm_install.communicate()
if response_helm_install.returncode != 0:
if ('forbidden' in error_helm_install.decode("ascii") or 'timed out waiting for the condition' in error_helm_install.decode("ascii")):
telemetry.set_user_fault()
telemetry.set_exception(exception=error_helm_install.decode("ascii"), fault_type=consts.Install_HelmRelease_Fault_Type,
summary='Unable to install helm release')
logger.warning("Please check if the azure-arc namespace was deployed and run 'kubectl get pods -n azure-arc' to check if all the pods are in running state. A possible cause for pods stuck in pending state could be insufficient resources on the kubernetes cluster to onboard to arc.")
raise CLIInternalError("Unable to install helm release: " + error_helm_install.decode("ascii"))
def flatten(dd, separator='.', prefix=''):
try:
if isinstance(dd, dict):
return {prefix + separator + k if prefix else k: v for kk, vv in dd.items() for k, v in flatten(vv, separator, kk).items()}
else:
return {prefix: dd}
except Exception as e:
telemetry.set_exception(exception=e, fault_type=consts.Error_Flattening_User_Supplied_Value_Dict,
summary='Error while flattening the user supplied helm values dict')
raise CLIInternalError("Error while flattening the user supplied helm values dict")
def check_features_to_update(features_to_update):
update_cluster_connect, update_azure_rbac, update_cl = False, False, False
for feature in features_to_update:
if feature == "cluster-connect":
update_cluster_connect = True
elif feature == "azure-rbac":
update_azure_rbac = True
elif feature == "custom-locations":
update_cl = True
return update_cluster_connect, update_azure_rbac, update_cl
def user_confirmation(message, yes=False):
if yes:
return
try:
if not prompt_y_n(message):
raise ManualInterrupt('Operation cancelled.')
except NoTTYException:
raise CLIInternalError('Unable to prompt for confirmation as no tty available. Use --yes.')
def is_guid(guid):
import uuid
try:
uuid.UUID(guid)
return True
except ValueError:
return False
def try_list_node_fix():
try:
from kubernetes.client.models.v1_container_image import V1ContainerImage
def names(self, names):
self._names = names
V1ContainerImage.names = V1ContainerImage.names.setter(names)
except Exception as ex:
logger.debug("Error while trying to monkey patch the fix for list_node(): {}".format(str(ex)))
def check_provider_registrations(cli_ctx):
try:
rp_client = _resource_providers_client(cli_ctx)
cc_registration_state = rp_client.get(consts.Connected_Cluster_Provider_Namespace).registration_state
if cc_registration_state != "Registered":
telemetry.set_exception(exception="{} provider is not registered".format(consts.Connected_Cluster_Provider_Namespace), fault_type=consts.CC_Provider_Namespace_Not_Registered_Fault_Type,
summary="{} provider is not registered".format(consts.Connected_Cluster_Provider_Namespace))
raise ValidationError("{} provider is not registered. Please register it using 'az provider register -n 'Microsoft.Kubernetes' before running the connect command.".format(consts.Connected_Cluster_Provider_Namespace))
kc_registration_state = rp_client.get(consts.Kubernetes_Configuration_Provider_Namespace).registration_state
if kc_registration_state != "Registered":
telemetry.set_user_fault()
logger.warning("{} provider is not registered".format(consts.Kubernetes_Configuration_Provider_Namespace))
except ValidationError as e:
raise e
except Exception as ex:
logger.warning("Couldn't check the required provider's registration status. Error: {}".format(str(ex)))
def can_create_clusterrolebindings():
try:
api_instance = kube_client.AuthorizationV1Api()
access_review = kube_client.V1SelfSubjectAccessReview(spec={
"resourceAttributes": {
"verb": "create",
"resource": "clusterrolebindings",
"group": "rbac.authorization.k8s.io"
}
})
response = api_instance.create_self_subject_access_review(access_review)
return response.status.allowed
except Exception as ex:
logger.warning("Couldn't check for the permission to create clusterrolebindings on this k8s cluster. Error: {}".format(str(ex)))
return "Unknown"
def validate_node_api_response(api_instance, node_api_response):
if node_api_response is None:
try:
node_api_response = api_instance.list_node()
return node_api_response
except Exception as ex:
logger.debug("Error occcured while listing nodes on this kubernetes cluster: {}".format(str(ex)))
return None
else:
return node_api_response
def az_cli(args_str):
args = args_str.split()
cli = get_default_cli()
cli.invoke(args, out_file=open(os.devnull, 'w'))
if cli.result.result:
return cli.result.result
elif cli.result.error:
raise Exception(cli.result.error)
return True
# def is_cli_using_msal_auth():
# response_cli_version = az_cli("version --output json")
# try:
# cli_version = response_cli_version['azure-cli']
# except Exception as ex:
# raise CLIInternalError("Unable to decode the az cli version installed: {}".format(str(ex)))
# if version.parse(cli_version) >= version.parse(consts.AZ_CLI_ADAL_TO_MSAL_MIGRATE_VERSION):
# return True
# else:
# return False
def is_cli_using_msal_auth():
response_cli_version = az_cli("version --output json")
try:
cli_version = response_cli_version['azure-cli']
except Exception as ex:
raise CLIInternalError("Unable to decode the az cli version installed: {}".format(str(ex)))
v1 = cli_version
v2 = consts.AZ_CLI_ADAL_TO_MSAL_MIGRATE_VERSION
for i, j in zip(map(int, v1.split(".")), map(int, v2.split("."))):
if i == j:
continue
return i > j
return len(v1.split(".")) == len(v2.split("."))
| [
"[email protected]"
] | |
2972890ca8b9bc86b37c737a11cd49d5efe0d684 | 48e37a05093c797b96133455b771354ef60d231c | /perceptronTest.py | d958682d3f019c146ee0253e8282ed643218f352 | [] | no_license | StewartFong/deepLearning | f3757250b33040d7ddfe46338c5de6e45852c63e | d4c7f7d77a2b8daf94bb2640f43ec60f89c23bb7 | refs/heads/master | 2021-01-18T16:59:25.680092 | 2017-04-20T08:09:21 | 2017-04-20T08:09:21 | 86,779,830 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,312 | py | import numpy as np
import pandas as pd
import perceptron as pt
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
df = pd.read_csv('./iris.data',header=None)
y = df.iloc[0:100,4].values
y = np.where(y=='Iris-setosa',-1,1)
X = df.iloc[0:100,[0,2]].values
ppn = pt.Perceptron(eta=0.1,n_iter=10)
ppn.fit(X,y)
def plot_decision_regions(X,y,ppn,resolution=0.02):
#setup marker generator and color map
markers = ('s','x','o','^','v')
colors = ('red','blue','lightgreen','gray','cyan')
cmap = ListedColormap(colors[:len(np.unique(y))])
#plot the decision surface
x1_min,x1_max = X[:,0].min()-1,X[:,0].max()+1
x2_min,x2_max = X[:,1].min()-1,X[:,1].max()+1
xx1,xx2 = np.meshgrid(np.arange(x1_min,x1_max,resolution),np.arange(x2_min,x2_max,resolution))
Z = ppn.predict(np.array([xx1.ravel(),xx2.ravel()]).T)
Z = Z.reshape(xx1.shape)
plt.contourf(xx1,xx2,Z,alpha=0.4,cmap=cmap)
plt.xlim(xx1.min(),xx1.max())
plt.ylim(xx2.min(),xx2.max())
#plot class samples
for idx,cl in enumerate(np.unique(y)):
plt.scatter(x=X[y==cl,0],y=X[y==cl,1],alpha=0.8,c=cmap(idx),marker=markers[idx],label=cl)
plot_decision_regions(X,y,ppn=ppn)
plt.xlabel('sepal length [cm]')
plt.ylabel('petal length [cm]')
plt.legend(loc='upper left')
plt.show()
| [
"[email protected]"
] | |
0dbf59418bfb45e7f93191e3e49627ee0ad3dfee | f3b233e5053e28fa95c549017bd75a30456eb50c | /jnk1_input/26/26-24_MD_NVT_rerun/set_7.py | 315e7341f06312978ff865a1d5b6bd3680458db7 | [] | no_license | AnguseZhang/Input_TI | ddf2ed40ff1c0aa24eea3275b83d4d405b50b820 | 50ada0833890be9e261c967d00948f998313cb60 | refs/heads/master | 2021-05-25T15:02:38.858785 | 2020-02-18T16:57:04 | 2020-02-18T16:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 741 | py | import os
dir = '/mnt/scratch/songlin3/run/jnkl/L626/MD_NVT_rerun/ti_one-step/26_24/'
filesdir = dir + 'files/'
temp_prodin = filesdir + 'temp_prod_7.in'
temp_pbs = filesdir + 'temp_7.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.chdir("%6.5f" %(j))
workdir = dir + "%6.5f" %(j) + '/'
#prodin
prodin = workdir + "%6.5f_prod_7.in" %(j)
os.system("cp %s %s" %(temp_prodin, prodin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, prodin))
#PBS
pbs = workdir + "%6.5f_7.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#submit pbs
#os.system("qsub %s" %(pbs))
os.chdir(dir)
| [
"[email protected]"
] | |
43da45409a62395c63dc06135e37a58a4f087665 | 41c605bf3a002a757cb2344cff526d7a7ae56ea9 | /plotly/graph_objs/funnelarea/__init__.py | 1d50afd8acbf77872facfc0cdfb7eda983e73ff3 | [
"MIT"
] | permissive | Jonathan-MW/plotly.py | 9674b90b5de11fd9089e6afefd04b57bc4587829 | 7528c00772f44dee24c0df7e15d70a4852f171a8 | refs/heads/master | 2020-05-30T06:04:13.621478 | 2019-05-31T10:34:15 | 2019-05-31T10:34:15 | 189,571,988 | 2 | 0 | MIT | 2019-05-31T09:59:53 | 2019-05-31T09:59:53 | null | UTF-8 | Python | false | false | 62,191 | py |
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Title(_BaseTraceHierarchyType):
# font
# ----
@property
def font(self):
"""
Sets the font used for `title`. Note that the title's font used
to be set by the now deprecated `titlefont` attribute.
The 'font' property is an instance of Font
that may be specified as:
- An instance of plotly.graph_objs.funnelarea.title.Font
- A dict of string/value properties that will be passed
to the Font constructor
Supported dict properties:
color
colorsrc
Sets the source reference on plot.ly for color
.
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include "Arial",
"Balto", "Courier New", "Droid Sans",, "Droid
Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for
family .
size
sizesrc
Sets the source reference on plot.ly for size
.
Returns
-------
plotly.graph_objs.funnelarea.title.Font
"""
return self['font']
@font.setter
def font(self, val):
self['font'] = val
# position
# --------
@property
def position(self):
"""
Specifies the location of the `title`. Note that the title's
position used to be set by the now deprecated `titleposition`
attribute.
The 'position' property is an enumeration that may be specified as:
- One of the following enumeration values:
['top left', 'top center', 'top right']
Returns
-------
Any
"""
return self['position']
@position.setter
def position(self, val):
self['position'] = val
# text
# ----
@property
def text(self):
"""
Sets the title of the chart. If it is empty, no title is
displayed. Note that before the existence of `title.text`, the
title's contents used to be defined as the `title` attribute
itself. This behavior has been deprecated.
The 'text' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self['text']
@text.setter
def text(self, val):
self['text'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'funnelarea'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
font
Sets the font used for `title`. Note that the title's
font used to be set by the now deprecated `titlefont`
attribute.
position
Specifies the location of the `title`. Note that the
title's position used to be set by the now deprecated
`titleposition` attribute.
text
Sets the title of the chart. If it is empty, no title
is displayed. Note that before the existence of
`title.text`, the title's contents used to be defined
as the `title` attribute itself. This behavior has been
deprecated.
"""
def __init__(
self, arg=None, font=None, position=None, text=None, **kwargs
):
"""
Construct a new Title object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.funnelarea.Title
font
Sets the font used for `title`. Note that the title's
font used to be set by the now deprecated `titlefont`
attribute.
position
Specifies the location of the `title`. Note that the
title's position used to be set by the now deprecated
`titleposition` attribute.
text
Sets the title of the chart. If it is empty, no title
is displayed. Note that before the existence of
`title.text`, the title's contents used to be defined
as the `title` attribute itself. This behavior has been
deprecated.
Returns
-------
Title
"""
super(Title, self).__init__('title')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.funnelarea.Title
constructor must be a dict or
an instance of plotly.graph_objs.funnelarea.Title"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.funnelarea import (title as v_title)
# Initialize validators
# ---------------------
self._validators['font'] = v_title.FontValidator()
self._validators['position'] = v_title.PositionValidator()
self._validators['text'] = v_title.TextValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('font', None)
self['font'] = font if font is not None else _v
_v = arg.pop('position', None)
self['position'] = position if position is not None else _v
_v = arg.pop('text', None)
self['text'] = text if text is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Textfont(_BaseTraceHierarchyType):
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, saddlebrown, salmon, sandybrown,
seagreen, seashell, sienna, silver, skyblue,
slateblue, slategray, slategrey, snow, springgreen,
steelblue, tan, teal, thistle, tomato, turquoise,
violet, wheat, white, whitesmoke, yellow,
yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self['color']
@color.setter
def color(self, val):
self['color'] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on plot.ly for color .
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['colorsrc']
@colorsrc.setter
def colorsrc(self, val):
self['colorsrc'] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The plotly service (at https://plot.ly or on-
premise) generates images on a server, where only a select
number of fonts are installed and supported. These include
"Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open
Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New
Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self['family']
@family.setter
def family(self, val):
self['family'] = val
# familysrc
# ---------
@property
def familysrc(self):
"""
Sets the source reference on plot.ly for family .
The 'familysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['familysrc']
@familysrc.setter
def familysrc(self, val):
self['familysrc'] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|float|numpy.ndarray
"""
return self['size']
@size.setter
def size(self, val):
self['size'] = val
# sizesrc
# -------
@property
def sizesrc(self):
"""
Sets the source reference on plot.ly for size .
The 'sizesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['sizesrc']
@sizesrc.setter
def sizesrc(self, val):
self['sizesrc'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'funnelarea'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
"""
Construct a new Textfont object
Sets the font used for `textinfo`.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.funnelarea.Textfont
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
Returns
-------
Textfont
"""
super(Textfont, self).__init__('textfont')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.funnelarea.Textfont
constructor must be a dict or
an instance of plotly.graph_objs.funnelarea.Textfont"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.funnelarea import (textfont as v_textfont)
# Initialize validators
# ---------------------
self._validators['color'] = v_textfont.ColorValidator()
self._validators['colorsrc'] = v_textfont.ColorsrcValidator()
self._validators['family'] = v_textfont.FamilyValidator()
self._validators['familysrc'] = v_textfont.FamilysrcValidator()
self._validators['size'] = v_textfont.SizeValidator()
self._validators['sizesrc'] = v_textfont.SizesrcValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('color', None)
self['color'] = color if color is not None else _v
_v = arg.pop('colorsrc', None)
self['colorsrc'] = colorsrc if colorsrc is not None else _v
_v = arg.pop('family', None)
self['family'] = family if family is not None else _v
_v = arg.pop('familysrc', None)
self['familysrc'] = familysrc if familysrc is not None else _v
_v = arg.pop('size', None)
self['size'] = size if size is not None else _v
_v = arg.pop('sizesrc', None)
self['sizesrc'] = sizesrc if sizesrc is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Stream(_BaseTraceHierarchyType):
# maxpoints
# ---------
@property
def maxpoints(self):
"""
Sets the maximum number of points to keep on the plots from an
incoming stream. If `maxpoints` is set to 50, only the newest
50 points will be displayed on the plot.
The 'maxpoints' property is a number and may be specified as:
- An int or float in the interval [0, 10000]
Returns
-------
int|float
"""
return self['maxpoints']
@maxpoints.setter
def maxpoints(self, val):
self['maxpoints'] = val
# token
# -----
@property
def token(self):
"""
The stream id number links a data trace on a plot with a
stream. See https://plot.ly/settings for more details.
The 'token' property is a string and must be specified as:
- A non-empty string
Returns
-------
str
"""
return self['token']
@token.setter
def token(self, val):
self['token'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'funnelarea'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
maxpoints
Sets the maximum number of points to keep on the plots
from an incoming stream. If `maxpoints` is set to 50,
only the newest 50 points will be displayed on the
plot.
token
The stream id number links a data trace on a plot with
a stream. See https://plot.ly/settings for more
details.
"""
def __init__(self, arg=None, maxpoints=None, token=None, **kwargs):
"""
Construct a new Stream object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.funnelarea.Stream
maxpoints
Sets the maximum number of points to keep on the plots
from an incoming stream. If `maxpoints` is set to 50,
only the newest 50 points will be displayed on the
plot.
token
The stream id number links a data trace on a plot with
a stream. See https://plot.ly/settings for more
details.
Returns
-------
Stream
"""
super(Stream, self).__init__('stream')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.funnelarea.Stream
constructor must be a dict or
an instance of plotly.graph_objs.funnelarea.Stream"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.funnelarea import (stream as v_stream)
# Initialize validators
# ---------------------
self._validators['maxpoints'] = v_stream.MaxpointsValidator()
self._validators['token'] = v_stream.TokenValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('maxpoints', None)
self['maxpoints'] = maxpoints if maxpoints is not None else _v
_v = arg.pop('token', None)
self['token'] = token if token is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Marker(_BaseTraceHierarchyType):
# colors
# ------
@property
def colors(self):
"""
Sets the color of each sector. If not specified, the default
trace color set is used to pick the sector colors.
The 'colors' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self['colors']
@colors.setter
def colors(self, val):
self['colors'] = val
# colorssrc
# ---------
@property
def colorssrc(self):
"""
Sets the source reference on plot.ly for colors .
The 'colorssrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['colorssrc']
@colorssrc.setter
def colorssrc(self, val):
self['colorssrc'] = val
# line
# ----
@property
def line(self):
"""
The 'line' property is an instance of Line
that may be specified as:
- An instance of plotly.graph_objs.funnelarea.marker.Line
- A dict of string/value properties that will be passed
to the Line constructor
Supported dict properties:
color
Sets the color of the line enclosing each
sector. Defaults to the `paper_bgcolor` value.
colorsrc
Sets the source reference on plot.ly for color
.
width
Sets the width (in px) of the line enclosing
each sector.
widthsrc
Sets the source reference on plot.ly for width
.
Returns
-------
plotly.graph_objs.funnelarea.marker.Line
"""
return self['line']
@line.setter
def line(self, val):
self['line'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'funnelarea'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
colors
Sets the color of each sector. If not specified, the
default trace color set is used to pick the sector
colors.
colorssrc
Sets the source reference on plot.ly for colors .
line
plotly.graph_objs.funnelarea.marker.Line instance or
dict with compatible properties
"""
def __init__(
self, arg=None, colors=None, colorssrc=None, line=None, **kwargs
):
"""
Construct a new Marker object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.funnelarea.Marker
colors
Sets the color of each sector. If not specified, the
default trace color set is used to pick the sector
colors.
colorssrc
Sets the source reference on plot.ly for colors .
line
plotly.graph_objs.funnelarea.marker.Line instance or
dict with compatible properties
Returns
-------
Marker
"""
super(Marker, self).__init__('marker')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.funnelarea.Marker
constructor must be a dict or
an instance of plotly.graph_objs.funnelarea.Marker"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.funnelarea import (marker as v_marker)
# Initialize validators
# ---------------------
self._validators['colors'] = v_marker.ColorsValidator()
self._validators['colorssrc'] = v_marker.ColorssrcValidator()
self._validators['line'] = v_marker.LineValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('colors', None)
self['colors'] = colors if colors is not None else _v
_v = arg.pop('colorssrc', None)
self['colorssrc'] = colorssrc if colorssrc is not None else _v
_v = arg.pop('line', None)
self['line'] = line if line is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Insidetextfont(_BaseTraceHierarchyType):
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, saddlebrown, salmon, sandybrown,
seagreen, seashell, sienna, silver, skyblue,
slateblue, slategray, slategrey, snow, springgreen,
steelblue, tan, teal, thistle, tomato, turquoise,
violet, wheat, white, whitesmoke, yellow,
yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self['color']
@color.setter
def color(self, val):
self['color'] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on plot.ly for color .
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['colorsrc']
@colorsrc.setter
def colorsrc(self, val):
self['colorsrc'] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The plotly service (at https://plot.ly or on-
premise) generates images on a server, where only a select
number of fonts are installed and supported. These include
"Arial", "Balto", "Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old Standard TT", "Open
Sans", "Overpass", "PT Sans Narrow", "Raleway", "Times New
Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self['family']
@family.setter
def family(self, val):
self['family'] = val
# familysrc
# ---------
@property
def familysrc(self):
"""
Sets the source reference on plot.ly for family .
The 'familysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['familysrc']
@familysrc.setter
def familysrc(self, val):
self['familysrc'] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|float|numpy.ndarray
"""
return self['size']
@size.setter
def size(self, val):
self['size'] = val
# sizesrc
# -------
@property
def sizesrc(self):
"""
Sets the source reference on plot.ly for size .
The 'sizesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['sizesrc']
@sizesrc.setter
def sizesrc(self, val):
self['sizesrc'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'funnelarea'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
"""
Construct a new Insidetextfont object
Sets the font used for `textinfo` lying inside the sector.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
plotly.graph_objs.funnelarea.Insidetextfont
color
colorsrc
Sets the source reference on plot.ly for color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The
plotly service (at https://plot.ly or on-premise)
generates images on a server, where only a select
number of fonts are installed and supported. These
include "Arial", "Balto", "Courier New", "Droid Sans",,
"Droid Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for family .
size
sizesrc
Sets the source reference on plot.ly for size .
Returns
-------
Insidetextfont
"""
super(Insidetextfont, self).__init__('insidetextfont')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.funnelarea.Insidetextfont
constructor must be a dict or
an instance of plotly.graph_objs.funnelarea.Insidetextfont"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.funnelarea import (
insidetextfont as v_insidetextfont
)
# Initialize validators
# ---------------------
self._validators['color'] = v_insidetextfont.ColorValidator()
self._validators['colorsrc'] = v_insidetextfont.ColorsrcValidator()
self._validators['family'] = v_insidetextfont.FamilyValidator()
self._validators['familysrc'] = v_insidetextfont.FamilysrcValidator()
self._validators['size'] = v_insidetextfont.SizeValidator()
self._validators['sizesrc'] = v_insidetextfont.SizesrcValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('color', None)
self['color'] = color if color is not None else _v
_v = arg.pop('colorsrc', None)
self['colorsrc'] = colorsrc if colorsrc is not None else _v
_v = arg.pop('family', None)
self['family'] = family if family is not None else _v
_v = arg.pop('familysrc', None)
self['familysrc'] = familysrc if familysrc is not None else _v
_v = arg.pop('size', None)
self['size'] = size if size is not None else _v
_v = arg.pop('sizesrc', None)
self['sizesrc'] = sizesrc if sizesrc is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Hoverlabel(_BaseTraceHierarchyType):
# align
# -----
@property
def align(self):
"""
Sets the horizontal alignment of the text content within hover
label box. Has an effect only if the hover label text spans
more two or more lines
The 'align' property is an enumeration that may be specified as:
- One of the following enumeration values:
['left', 'right', 'auto']
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
Any|numpy.ndarray
"""
return self['align']
@align.setter
def align(self, val):
self['align'] = val
# alignsrc
# --------
@property
def alignsrc(self):
"""
Sets the source reference on plot.ly for align .
The 'alignsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['alignsrc']
@alignsrc.setter
def alignsrc(self, val):
self['alignsrc'] = val
# bgcolor
# -------
@property
def bgcolor(self):
"""
Sets the background color of the hover labels for this trace
The 'bgcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, saddlebrown, salmon, sandybrown,
seagreen, seashell, sienna, silver, skyblue,
slateblue, slategray, slategrey, snow, springgreen,
steelblue, tan, teal, thistle, tomato, turquoise,
violet, wheat, white, whitesmoke, yellow,
yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self['bgcolor']
@bgcolor.setter
def bgcolor(self, val):
self['bgcolor'] = val
# bgcolorsrc
# ----------
@property
def bgcolorsrc(self):
"""
Sets the source reference on plot.ly for bgcolor .
The 'bgcolorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['bgcolorsrc']
@bgcolorsrc.setter
def bgcolorsrc(self, val):
self['bgcolorsrc'] = val
# bordercolor
# -----------
@property
def bordercolor(self):
"""
Sets the border color of the hover labels for this trace.
The 'bordercolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, saddlebrown, salmon, sandybrown,
seagreen, seashell, sienna, silver, skyblue,
slateblue, slategray, slategrey, snow, springgreen,
steelblue, tan, teal, thistle, tomato, turquoise,
violet, wheat, white, whitesmoke, yellow,
yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self['bordercolor']
@bordercolor.setter
def bordercolor(self, val):
self['bordercolor'] = val
# bordercolorsrc
# --------------
@property
def bordercolorsrc(self):
"""
Sets the source reference on plot.ly for bordercolor .
The 'bordercolorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['bordercolorsrc']
@bordercolorsrc.setter
def bordercolorsrc(self, val):
self['bordercolorsrc'] = val
# font
# ----
@property
def font(self):
"""
Sets the font used in hover labels.
The 'font' property is an instance of Font
that may be specified as:
- An instance of plotly.graph_objs.funnelarea.hoverlabel.Font
- A dict of string/value properties that will be passed
to the Font constructor
Supported dict properties:
color
colorsrc
Sets the source reference on plot.ly for color
.
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include "Arial",
"Balto", "Courier New", "Droid Sans",, "Droid
Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for
family .
size
sizesrc
Sets the source reference on plot.ly for size
.
Returns
-------
plotly.graph_objs.funnelarea.hoverlabel.Font
"""
return self['font']
@font.setter
def font(self, val):
self['font'] = val
# namelength
# ----------
@property
def namelength(self):
"""
Sets the default length (in number of characters) of the trace
name in the hover labels for all traces. -1 shows the whole
name regardless of length. 0-3 shows the first 0-3 characters,
and an integer >3 will show the whole name if it is less than
that many characters, but if it is longer, will truncate to
`namelength - 3` characters and add an ellipsis.
The 'namelength' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [-1, 9223372036854775807]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|numpy.ndarray
"""
return self['namelength']
@namelength.setter
def namelength(self, val):
self['namelength'] = val
# namelengthsrc
# -------------
@property
def namelengthsrc(self):
"""
Sets the source reference on plot.ly for namelength .
The 'namelengthsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['namelengthsrc']
@namelengthsrc.setter
def namelengthsrc(self, val):
self['namelengthsrc'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'funnelarea'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
align
Sets the horizontal alignment of the text content
within hover label box. Has an effect only if the hover
label text spans more two or more lines
alignsrc
Sets the source reference on plot.ly for align .
bgcolor
Sets the background color of the hover labels for this
trace
bgcolorsrc
Sets the source reference on plot.ly for bgcolor .
bordercolor
Sets the border color of the hover labels for this
trace.
bordercolorsrc
Sets the source reference on plot.ly for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of characters) of
the trace name in the hover labels for all traces. -1
shows the whole name regardless of length. 0-3 shows
the first 0-3 characters, and an integer >3 will show
the whole name if it is less than that many characters,
but if it is longer, will truncate to `namelength - 3`
characters and add an ellipsis.
namelengthsrc
Sets the source reference on plot.ly for namelength .
"""
def __init__(
self,
arg=None,
align=None,
alignsrc=None,
bgcolor=None,
bgcolorsrc=None,
bordercolor=None,
bordercolorsrc=None,
font=None,
namelength=None,
namelengthsrc=None,
**kwargs
):
"""
Construct a new Hoverlabel object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.funnelarea.Hoverlabel
align
Sets the horizontal alignment of the text content
within hover label box. Has an effect only if the hover
label text spans more two or more lines
alignsrc
Sets the source reference on plot.ly for align .
bgcolor
Sets the background color of the hover labels for this
trace
bgcolorsrc
Sets the source reference on plot.ly for bgcolor .
bordercolor
Sets the border color of the hover labels for this
trace.
bordercolorsrc
Sets the source reference on plot.ly for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of characters) of
the trace name in the hover labels for all traces. -1
shows the whole name regardless of length. 0-3 shows
the first 0-3 characters, and an integer >3 will show
the whole name if it is less than that many characters,
but if it is longer, will truncate to `namelength - 3`
characters and add an ellipsis.
namelengthsrc
Sets the source reference on plot.ly for namelength .
Returns
-------
Hoverlabel
"""
super(Hoverlabel, self).__init__('hoverlabel')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.funnelarea.Hoverlabel
constructor must be a dict or
an instance of plotly.graph_objs.funnelarea.Hoverlabel"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.funnelarea import (hoverlabel as v_hoverlabel)
# Initialize validators
# ---------------------
self._validators['align'] = v_hoverlabel.AlignValidator()
self._validators['alignsrc'] = v_hoverlabel.AlignsrcValidator()
self._validators['bgcolor'] = v_hoverlabel.BgcolorValidator()
self._validators['bgcolorsrc'] = v_hoverlabel.BgcolorsrcValidator()
self._validators['bordercolor'] = v_hoverlabel.BordercolorValidator()
self._validators['bordercolorsrc'
] = v_hoverlabel.BordercolorsrcValidator()
self._validators['font'] = v_hoverlabel.FontValidator()
self._validators['namelength'] = v_hoverlabel.NamelengthValidator()
self._validators['namelengthsrc'
] = v_hoverlabel.NamelengthsrcValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('align', None)
self['align'] = align if align is not None else _v
_v = arg.pop('alignsrc', None)
self['alignsrc'] = alignsrc if alignsrc is not None else _v
_v = arg.pop('bgcolor', None)
self['bgcolor'] = bgcolor if bgcolor is not None else _v
_v = arg.pop('bgcolorsrc', None)
self['bgcolorsrc'] = bgcolorsrc if bgcolorsrc is not None else _v
_v = arg.pop('bordercolor', None)
self['bordercolor'] = bordercolor if bordercolor is not None else _v
_v = arg.pop('bordercolorsrc', None)
self['bordercolorsrc'
] = bordercolorsrc if bordercolorsrc is not None else _v
_v = arg.pop('font', None)
self['font'] = font if font is not None else _v
_v = arg.pop('namelength', None)
self['namelength'] = namelength if namelength is not None else _v
_v = arg.pop('namelengthsrc', None)
self['namelengthsrc'
] = namelengthsrc if namelengthsrc is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Domain(_BaseTraceHierarchyType):
# column
# ------
@property
def column(self):
"""
If there is a layout grid, use the domain for this column in
the grid for this funnelarea trace .
The 'column' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [0, 9223372036854775807]
Returns
-------
int
"""
return self['column']
@column.setter
def column(self, val):
self['column'] = val
# row
# ---
@property
def row(self):
"""
If there is a layout grid, use the domain for this row in the
grid for this funnelarea trace .
The 'row' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [0, 9223372036854775807]
Returns
-------
int
"""
return self['row']
@row.setter
def row(self, val):
self['row'] = val
# x
# -
@property
def x(self):
"""
Sets the horizontal domain of this funnelarea trace (in plot
fraction).
The 'x' property is an info array that may be specified as:
* a list or tuple of 2 elements where:
(0) The 'x[0]' property is a number and may be specified as:
- An int or float in the interval [0, 1]
(1) The 'x[1]' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
list
"""
return self['x']
@x.setter
def x(self, val):
self['x'] = val
# y
# -
@property
def y(self):
"""
Sets the vertical domain of this funnelarea trace (in plot
fraction).
The 'y' property is an info array that may be specified as:
* a list or tuple of 2 elements where:
(0) The 'y[0]' property is a number and may be specified as:
- An int or float in the interval [0, 1]
(1) The 'y[1]' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
list
"""
return self['y']
@y.setter
def y(self, val):
self['y'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'funnelarea'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
column
If there is a layout grid, use the domain for this
column in the grid for this funnelarea trace .
row
If there is a layout grid, use the domain for this row
in the grid for this funnelarea trace .
x
Sets the horizontal domain of this funnelarea trace (in
plot fraction).
y
Sets the vertical domain of this funnelarea trace (in
plot fraction).
"""
def __init__(
self, arg=None, column=None, row=None, x=None, y=None, **kwargs
):
"""
Construct a new Domain object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.funnelarea.Domain
column
If there is a layout grid, use the domain for this
column in the grid for this funnelarea trace .
row
If there is a layout grid, use the domain for this row
in the grid for this funnelarea trace .
x
Sets the horizontal domain of this funnelarea trace (in
plot fraction).
y
Sets the vertical domain of this funnelarea trace (in
plot fraction).
Returns
-------
Domain
"""
super(Domain, self).__init__('domain')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.funnelarea.Domain
constructor must be a dict or
an instance of plotly.graph_objs.funnelarea.Domain"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.funnelarea import (domain as v_domain)
# Initialize validators
# ---------------------
self._validators['column'] = v_domain.ColumnValidator()
self._validators['row'] = v_domain.RowValidator()
self._validators['x'] = v_domain.XValidator()
self._validators['y'] = v_domain.YValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('column', None)
self['column'] = column if column is not None else _v
_v = arg.pop('row', None)
self['row'] = row if row is not None else _v
_v = arg.pop('x', None)
self['x'] = x if x is not None else _v
_v = arg.pop('y', None)
self['y'] = y if y is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.graph_objs.funnelarea import title
from plotly.graph_objs.funnelarea import marker
from plotly.graph_objs.funnelarea import hoverlabel
| [
"[email protected]"
] | |
9a350a1e02190f8e9335b851789a6fb8e807266f | ad8ded1e86419d13a692b974a4fe83786a4dbdb1 | /six_sept/programiz/one.py | b8beeb9195487f66e3c9d5e7ea608c9081490292 | [] | no_license | avengerryan/daily_practice_codes | 9934d7d6c1be2c8e89567e327ccd4af5a8eb89c2 | 34b1d8b772694de9414a15269fdc59284d74c11b | refs/heads/master | 2022-12-23T13:59:54.968089 | 2020-09-14T09:28:27 | 2020-09-14T09:28:27 | 295,375,091 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 563 | py |
# Illustrate different set operations
# define 2 set variables and let us perform set operations: union, intersection, differnece and symmetric difference
# Python offers a datatype called set, whose elements must ne unique.
# define three sets
E = {0, 2, 4, 6, 8}
N = {1, 2, 3, 4, 5}
# set union
print('Union of E and N is', E | N)
# set intersection
print('Intersection of E and N is', E & N)
# set difference
print('Difference of E and N is', E - N)
# set symmetric difference
print('Symmetric difference of E and N is', E ^ N)
| [
"[email protected]"
] | |
858ead6f49b3990656b4f1e64e109108e372eb19 | 3c46fc1dc783d852068e7cea4c2d889441f44e5c | /Yejun/a117_traversing_turtles.py | 132ca784ae84713a0710975993d48798c0c0924b | [] | no_license | YejunShin/ok | af0547da69c388aae57f0117a7522ae2e6a58734 | 3410b4bbba38fb94d223d471ebf976d6d55e6072 | refs/heads/main | 2023-08-20T23:28:33.031358 | 2021-10-27T19:57:18 | 2021-10-27T19:57:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 910 | py | import turtle as trtl
# create an empty list of turtles
my_turtles = []
# use interesting shapes and colors
turtle_shapes = ["arrow", "turtle", "circle", "square", "triangle", "classic", "arrow", "turtle", "circle", "square", "triangle", "classic"]
turtle_colors = ["red", "blue", "green", "orange", "purple", "gold", "red", "blue", "green", "orange", "purple", "gold"]
for s in turtle_shapes:
t = trtl.Turtle(shape=s)
my_turtles.append(t)
t_forward=50
t_right=0
t_thick=0
startx = t.xcor()
starty = t.ycor()
t.speed(0)
for t in my_turtles:
t.pensize(t_thick)
t.penup()
new_color = turtle_colors.pop()
t.fillcolor(new_color)
t.pencolor(new_color)
t.left(45)
t.goto(startx, starty)
t.pendown()
t.right(t_right)
t.forward(t_forward)
t.penup()
startx = t.xcor()
starty = t.ycor()
t_right+=45
t_forward+=10
t_thick+=2
wn = trtl.Screen()
wn.mainloop() | [
"[email protected]"
] | |
c258dad73d7f55d76bb09326f7f9827f92373fbe | 1ee26dbdab85038e6411712d9128aaa6500d1f51 | /rnn_model/model_tr.py | 1a725ebe4c6cdc9c838269d920034380d191ec2d | [] | no_license | YuxingZhang/prescription | a9ffa8f7d06f495010162f80319d9c532f4be685 | f5738846624b477f6fe80a3b953142479a3b6924 | refs/heads/master | 2021-05-04T10:45:26.556519 | 2017-01-31T10:17:23 | 2017-01-31T10:17:23 | 52,454,935 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 10,245 | py | # use linear 1-hidden layer neural network to combine lhs embedding with RNN embedding, then output the lhs embedding
import numpy as np
import theano
import theano.tensor as T
import lasagne
from collections import OrderedDict
from settings import CHAR_DIM, C2W_HDIM, WDIM, SCALE, N_BATCH, GRAD_CLIP, REGULARIZATION, LEARNING_RATE, MOMENTUM, GAMMA
NL1 = lasagne.nonlinearities.sigmoid
NL2 = lasagne.nonlinearities.tanh
NL3 = lasagne.nonlinearities.tanh
LR = lasagne.regularization.l2
WDIM = WDIM / 2
# margin cost defined in TransE
def margincost(pos_loss, neg_loss, margin):
out = margin + pos_loss - neg_loss
return T.sum(out * (out > 0))
# L2 distance between two Theano tensors, compute L2 distance for every row
def L2dist(left, right):
return T.sqrt(T.sum(T.sqr(left - right), axis=1))
class charLM(object):
def __init__(self, n_char, n_lhs, n_rel, n_rhs, emb_dim=WDIM, pretrained=None): # is WDIM the RNN embedding dimension? yes
# params
if pretrained==None:
self.params = OrderedDict()
self.params = init_params(self.params, n_char, n_lhs, n_rel, n_rhs, emb_dim) # define n_rhs, emb_dim
else:
self.params = load_params_shared(pretrained)
self.n_rhs = n_rhs
# model
in_lhs, in_lhsn, emb_lhs, emb_lhsn = embedding_lhs(self.params, n_lhs, emb_dim)
in_rhs, in_rhsn, emb_rhs, emb_rhsn = embedding_rhs(self.params, n_rhs, emb_dim)
in_rel, emb_rel = embedding_rel(self.params, n_rel, emb_dim)
# N_BATCH for input size? or just None, because later we need to do validation and testing, can uses any size
# up to this point, we have emb_lhs, emb_lhsn, emb_rhs, emb_rhsn, emb_rel
# define loss
pred_rhs = emb_lhs + emb_rel # true lhs + rel
pred_lhs = emb_lhsn + emb_rel # negative lhs + rel
pred_rel = emb_rhs - emb_lhs # predicted relation, rhs - lhs, for visualization
# TODO remove the dist(lhs, rhs - rel) terms in the loss function
pos_loss_r = L2dist(pred_rhs, emb_rhs) # positive triple distance
pos_loss_l = L2dist(emb_lhs, emb_rhs - emb_rel) # TODO remove
neg_loss_r = L2dist(pred_rhs, emb_rhsn) # negative triple distance with corrupted rhs
#neg_loss_l = L2dist(pred_lhs, emb_rhs) # negative triple distance with corrupted lhs TODO uncomment
neg_loss_l = L2dist(emb_lhsn, emb_rhs - emb_rel) # negative triple distance with corrupted lhs
loss_rn = margincost(pos_loss_r, neg_loss_r, GAMMA) # GAMMA is the margin, GAMMA = 1.0 in TransE
loss_ln = margincost(pos_loss_l, neg_loss_l, GAMMA) # TODO replace pos_loss_l with pos_loss_r
loss = loss_rn + loss_ln
# do we need loss_ln? Yes, and how do we sample random lhs embedding? build a dict too
self.cost = T.mean(loss)
# can we only add regularization to the RNN parameters? yes, only pass RNN parameters
cost_only = T.mean(loss)
'''get_output can specify input, so don't need to define another embedding layer'''
# updates
self.lr = LEARNING_RATE
self.mu = MOMENTUM
updates = lasagne.updates.nesterov_momentum(self.cost, self.params.values(), self.lr, momentum=self.mu)
# try different lr, momentum
# theano functions
self.inps = [in_lhs, in_lhsn, in_rel, in_rhs, in_rhsn] # inputs for the function
self.cost_fn = theano.function(self.inps,cost_only)
self.encode_fn = theano.function([in_lhs], emb_lhs) # compute RNN embeddings given word (drug name)
self.train_fn = theano.function(self.inps,self.cost,updates=updates)
self.pred_right_fn = theano.function([in_lhs, in_rel], pred_rhs) # compute lhs + rel as predicted rhs
self.emb_right_fn = theano.function([in_rhs], emb_rhs) # compute only rhs embedding
self.pred_rel_fn = theano.function([in_lhs, in_rhs], pred_rel)
def pred_rel(self, in_lhs, in_rhs):
return self.pred_rel_fn(in_lhs, in_rhs)
def train(self, in_lhs, in_lhsn, in_rel, in_rhs, in_rhsn):
return self.train_fn(in_lhs, in_lhsn, in_rel, in_rhs, in_rhsn)
def validate(self, in_lhs, in_lhsn, in_rel, in_rhs, in_rhsn):
return self.cost_fn(in_lhs, in_lhsn, in_rel, in_rhs, in_rhsn)
def compute_emb_right_all(self): # compute a (n_rhs * emb_dim) numpy matrix, each row is an embedding for a right hand side entity
in_rhs_all = np.arange(self.n_rhs).astype('int32') # input pretend to compute the embedding for all right hand side entities
self.emb_right_all = self.emb_right_fn(in_rhs_all)
def encode(self, in_lhs):
return self.encode_fn(in_lhs)
def rank_right(self, in_lhs, in_rel, in_rhs): # return a len(in_lhs) size list, each element is the rank of the true rhs among all the rhs
pred_rhs_batch = self.pred_right_fn(in_lhs, in_rel)
right_ranks = []
for i in range(pred_rhs_batch.shape[0]):
true_idx = in_rhs[i]
distances = np.zeros(self.emb_right_all.shape[0])
for j in range(self.emb_right_all.shape[0]):
distances[j] = np.linalg.norm(pred_rhs_batch[i, :] - self.emb_right_all[j, :], 2)
rank = np.argsort(np.argsort(distances))
right_ranks += [rank[true_idx]]
return right_ranks
def update_learningrate(self):
self.lr = max(1e-5,self.lr / 2)
updates = lasagne.updates.nesterov_momentum(self.cost, self.params.values(), self.lr, momentum=self.mu)
self.train_fn = theano.function(self.inps,self.cost,updates=updates)
def save_model(self,save_path):
saveparams = OrderedDict()
for kk,vv in self.params.iteritems():
saveparams[kk] = vv.get_value()
np.savez(save_path,**saveparams)
def print_params(self):
for kk,vv in self.params.iteritems():
print("Param {} Max {} Min {}".format(kk, np.max(vv.get_value()), np.min(vv.get_value())))
def init_params(params, n_char, n_lhs, n_rel, n_rhs, emb_dim):
np.random.seed(0)
# lookup table # TODO when using float 32, there will be an error in theano
# "An update must have the same type as the original shared variable", why is that
# Initialize parameters for lhs entity embedding
params['W_emb_lhs'] = theano.shared(np.random.normal(loc=0., scale=SCALE, size=(n_lhs, emb_dim)).astype('float64'), name='W_emb_lhs')
# Initialize parameters for rhs entity embedding
params['W_emb_rhs'] = theano.shared(np.random.normal(loc=0., scale=SCALE, size=(n_rhs, emb_dim)).astype('float64'), name='W_emb_rhs')
# Initialize parameters for relation embedding
params['W_emb_rel'] = theano.shared(np.random.normal(loc=0., scale=SCALE, size=(n_rel, emb_dim)).astype('float64'), name='W_emb_rel')
# Initialize parameters for dense layer
return params
# by Yuxing Zhang
def embedding_rhs(params, n_rhs, emb_dim):
'''
Embedding part for right hand side entity embedding and right hand side negative entity embedding
:param params: dict to store parameters
'''
# input variables that is right hand side entity
emb_in_rhs = T.ivector() # B * 1 vector, where each row is a number between 0 and (n_rhs - 1) as the index
emb_in_rhsn = T.ivector() # B * 1 vector, where each row is a number between 0 and (n_rhs - 1) as the index
# Input layer over entity
l_in_rhs = lasagne.layers.InputLayer(shape=(N_BATCH, ), name = 'rhs_input') # removing input_var to reuse it for negative rhs
# Embedding layer for rhs entity, and emb_dim should equal # the embedding dimension from RNN model.
l_emb_rhs = lasagne.layers.EmbeddingLayer(l_in_rhs, input_size=n_rhs, output_size=emb_dim, W=params['W_emb_rhs'])
return emb_in_rhs, emb_in_rhsn, lasagne.layers.get_output(l_emb_rhs, emb_in_rhs), lasagne.layers.get_output(l_emb_rhs, emb_in_rhsn)
# by Yuxing Zhang
def embedding_rel(params, n_rel, emb_dim):
'''
Embedding part for right hand side entity embedding
:param params: dict to store parameters
'''
# input variables that is the relation index
emb_in_rel = T.ivector() # B * 1 vector, where each row is a number between 0 and (n_rel - 1) as the index
# Input layer over relation
l_in_rel = lasagne.layers.InputLayer(shape=(N_BATCH, ), input_var=emb_in_rel, name = 'rel_input')
# Embedding layer for relation, and emb_dim should equal # the embedding dimension from RNN model.
l_emb_rel = lasagne.layers.EmbeddingLayer(l_in_rel, input_size=n_rel, output_size=emb_dim, W=params['W_emb_rel'])
return emb_in_rel, lasagne.layers.get_output(l_emb_rel)
# by Yuxing Zhang
def embedding_lhs(params, n_lhs, emb_dim):
'''
Embedding part for left hand side entity embedding and left hand side negative entity embedding
:param params: dict to store parameters
'''
# input variables that is right hand side entity
emb_in_lhs = T.ivector() # B * 1 vector, where each row is a number between 0 and (n_rhs - 1) as the index
emb_in_lhsn = T.ivector() # B * 1 vector, where each row is a number between 0 and (n_rhs - 1) as the index
# Input layer over entity
l_in_lhs = lasagne.layers.InputLayer(shape=(N_BATCH, ), name = 'lhs_input') # removing input_var to reuse it for negative rhs
# Embedding layer for rhs entity, and emb_dim should equal # the embedding dimension from RNN model.
l_emb_lhs = lasagne.layers.EmbeddingLayer(l_in_lhs, input_size=n_lhs, output_size=emb_dim, W=params['W_emb_lhs'])
# extra input for unseen entities 0
return emb_in_lhs, emb_in_lhsn, lasagne.layers.get_output(l_emb_lhs, emb_in_lhs), lasagne.layers.get_output(l_emb_lhs, emb_in_lhsn)
def load_params(path):
"""
Load previously saved model
"""
params = OrderedDict()
with open(path,'r') as f:
npzfile = np.load(f)
for kk, vv in npzfile.iteritems():
params[kk] = vv
return params
def load_params_shared(path):
"""
Load previously saved model
"""
params = OrderedDict()
with open(path,'r') as f:
npzfile = np.load(f)
for kk, vv in npzfile.iteritems():
params[kk] = theano.shared(vv, name=kk)
return params
| [
"[email protected]"
] | |
69876f7acf5e525e713fa8bb7a03cbeed0882f58 | 0a2702822c02f05a4e58342409adec9e2895a1ea | /svm.py | 50d962d6eddebdbcc7c75ff84269b2e2e08bb075 | [] | no_license | jerrybonnell/philograph | 3a8f4583955f8fb80ea79f62b8c62a42ad4d2088 | e855549f185f55ef09094ae40dc97d7e6e8695c6 | refs/heads/master | 2020-03-25T13:28:48.026375 | 2018-08-07T06:32:35 | 2018-08-07T06:32:35 | 143,827,760 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,022 | py | '''
name: svm.py
last modified: 14 mar 18
scikit support vector machine
'''
from sklearn.datasets import load_files
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn import svm
from sklearn.svm import SVC
from sklearn.pipeline import Pipeline
from sklearn.model_selection import GridSearchCV
from sklearn.metrics.pairwise import cosine_similarity
import numpy as np
from utility import write_file
from tqdm import tqdm
import random
import operator
import time
doc_to_num_incorrect = {}
doc_to_themes_incorrect = {}
documents = set([])
def run_svm(all_files, num_runs, out_file, verbose_level):
"""
interfaces with the first model
"""
out = ""
if out_file is not None:
write_file(out_file + "_svm.txt", time.ctime() + "\n===\n")
for i in range(0, len(all_files.target)):
doc_name = all_files.filenames[i]
doc_name = doc_name[doc_name.rfind("/") + 1:doc_name.rfind(".")]
doc_name_whole = doc_name[:doc_name.rfind("_part")]
# each document name serves as a key. the number of times
# it is incorrect when it serves in the testing set
# will be its value
documents.add(doc_name_whole)
doc_to_num_incorrect[doc_name] = 0
doc_to_themes_incorrect[doc_name] = []
# run the SVM classifier for a user-specified number of times
(avg_accuracy_rate, out) = avg_run(num_runs, all_files, out, verbose_level)
sorted_dict = sorted(doc_to_num_incorrect.items(),
key=operator.itemgetter(1), reverse=True)
if verbose_level > 0:
if out_file is None:
print(out)
else:
write_file(out_file + "_svm.txt", out)
format_output(all_files, out_file)
return avg_accuracy_rate
def format_output(all_files, out_file):
# a dictionary of dictionaries
theme_to_doc = {}
out = ""
# go through the entire corpus
for i in range(0, len(all_files.filenames)):
# get document name with path removed, e.g. true_saints_part_3
doc_name = all_files.filenames[i]
doc_name = doc_name[doc_name.rfind("/") + 1:doc_name.rfind(".")]
# "whole" document without any subdivison, e.g. true_saints
doc_name_whole = doc_name[:doc_name.rfind("_part")]
# get this document's target index
theme_index = all_files.target[i]
if theme_index in theme_to_doc:
# we have visited a document from this theme already. check to see if
# this document is part of a sermon that exists in its sub-directory
doc_to_array = theme_to_doc[theme_index]
if doc_name_whole in doc_to_array:
# if it is, then increment this sermon's number of parts
doc_to_array[doc_name_whole][0] += 1
# and keep a tally of the total # of times this sermon was wrong
# when any of its subdivisions served in the testing set
doc_to_array[doc_name_whole][1] += doc_to_num_incorrect[doc_name]
else:
# if it hasn't, initialize a new array
doc_to_array[doc_name_whole] = [1,doc_to_num_incorrect[doc_name]]
# if the target index is not in the dictionary, this means we have not yet
# visited any sermons from this theme. initialize a new dictionary and add
# a new key (this document's name) whose value is an array of size 2
# where
# [# parts, total # times wrong when its parts served in testing set]
else:
doc_to_array = {}
doc_to_array[doc_name_whole] = [1,doc_to_num_incorrect[doc_name]]
theme_to_doc[theme_index] = doc_to_array
# now print out the data
for theme_index, documents in theme_to_doc.items():
# sort from best to worst; note that we're sorting according
# to the second element in the array
sorted_dict = sorted(documents.items(),
key=lambda doc: doc[1][1], reverse=False)
# print theme name, e.g. sinners
out += all_files.target_names[theme_index] + "\n"
for document in sorted_dict:
# print the full sermon name
out += "{:>3s} {}".format(">",document[0]) + "\n"
for i in range(0,document[1][0]):
# finally, print out the subdivision information
doc_part = document[0] + "_part_" + str(i)
out += "{:>10s}part {:<6d} {:<2d}/{:>2d}{:>3s}".format("", i,
doc_to_num_incorrect[doc_part],
len(doc_to_themes_incorrect[doc_part]), "")
if doc_to_num_incorrect[doc_part] > 0:
out += str(doc_to_themes_incorrect[doc_part]) + "\n"
else:
out += "\n"
# print out total number of times this sermon was incorrect; note this
# is the value that is sorted by
out += '{:>3s} total {:<3d}\n'.format("*", document[1][1]) + "\n"
if out_file is None:
print(out)
else:
write_file(out_file + "_svm.txt", out)
def avg_run(avg_num, all_files, out, verbose_level):
total_accuracy = 0
# run the process for a user-specified number of times
for i in tqdm(range(0, avg_num)):
# run the classifier and obtain accuracy measures
(train_accuracy,
test_accuracy, out) = sermons_predict_param_search(all_files, out, verbose_level)
out += "\n run {}: train {} test {}\n".format(i + 1, train_accuracy, test_accuracy)
out += "------------------------\n"
total_accuracy += test_accuracy
print()
avg_accuracy_rate = total_accuracy / float(avg_num)
out += "\n"
out += "avg accuracy on testing set => {}\n".format(avg_accuracy_rate)
return (avg_accuracy_rate, out)
def sermons_predict_param_search(all_files, out, verbose_level):
# list of indices matching documents to serve in the testing set
#test_set_index = random.sample(range(0, len(all_files.target)), 40)
test_set_index = random.sample(range(0, len(all_files.target)),
int(0.4 * len(all_files.target)))
# list of indices matching documents to serve in the training set
# by taking the set difference of all numbers between 0 and the
# total number of documents with the testing set indices, we obtain
# the indices for the training set
training_set_index = list(
set(range(0, len(all_files.target))).difference(set(test_set_index)))
# list containing the content for each document in the test set
test_set_data = [all_files.data[i] for i in test_set_index]
# list containing the (target) label for each document in the test set
test_set_target = [all_files.target[i] for i in test_set_index]
# likewise for the training set
training_set_data = [all_files.data[i] for i in training_set_index]
training_set_target = [all_files.target[i] for i in training_set_index]
# calculate how many parts of a sermon serve in the training set
# (only for verbose mode)
doc_to_parts_in_training = {}
for i in range(0, len(all_files.target)):
doc_name = all_files.filenames[i]
doc_name = doc_name[doc_name.rfind("/") + 1:doc_name.rfind("_part")]
doc_to_parts_in_training[doc_name] = 0
for i in range(0, len(training_set_index)):
doc_name = all_files.filenames[training_set_index[i]]
doc_name = doc_name[doc_name.rfind("/") + 1:doc_name.rfind("_part")]
doc_to_parts_in_training[doc_name] += 1
# train SVM
# scikit-learn provides a pipeline to put everything together; performs
# all the steps of sermons_predict() at once
text_clf = Pipeline([('vect', CountVectorizer()),
('tfidf', TfidfTransformer()),
('clf', SVC(kernel='linear'))])
# select parameters for testing in SVM classifier
parameters = {
'tfidf__use_idf': (True, False),
'clf__C': (1, .5, .1),
'clf__gamma': (10, 1, .1)}
# for parameter tuning, use Grid Search to find optimal combination;
# the operation can be computationally prohibitive
gs_clf = GridSearchCV(text_clf, parameters, n_jobs=-1)
# fit the classifier according to training set
gs_clf = gs_clf.fit(training_set_data, training_set_target)
# predict testing set
predicted = gs_clf.predict(test_set_data)
# calculate accuracy on the testing set
total_wrong = 0
for i in range(0, len(predicted)):
# get name of documents that were mis-classified
doc_name = all_files.filenames[test_set_index[i]]
# document name includes path and extension; remove both
doc_name = doc_name[doc_name.rfind("/") + 1:doc_name.rfind(".")]
# add this misclassified theme to list of all misclassified for
# this document
doc_to_themes_incorrect[doc_name].append(
all_files.target_names[predicted[i]])
if predicted[i] != test_set_target[i]:
total_wrong += 1
# increment the ``incorrectness'' value for this document
doc_to_num_incorrect[doc_name] += 1
if verbose_level > 1:
out += "\"{}\" ({}) ==> {}\n".format(doc_name,
all_files.target_names[test_set_target[i]],
all_files.target_names[predicted[i]])
test_accuracy = ((len(predicted) - total_wrong) * 100 / len(predicted))
predicted = gs_clf.predict(training_set_data)
# calculate accuracy on the training set
total_wrong = 0
for i in range(0, len(predicted)):
if predicted[i] != training_set_target[i]:
total_wrong += 1
train_accuracy = ((len(predicted) - total_wrong) * 100 / len(predicted))
return (train_accuracy, test_accuracy, out) | [
"[email protected]"
] | |
4841102767e2a141ed6c0b60f40bb9fd4e63fccc | c406ea0f453d74842f38b50fc19e73a7be5eb552 | /plotWindow.py | f9f8ff9929738bf5d187c2788bbbfb0f532c6b1b | [] | no_license | sanatanSharma/Packet-Sniffer | a31f24ea6cad05c61b3ffb6319a820ad59ead445 | b4f0204a595276754585509e6fab37fbe61c9a08 | refs/heads/master | 2021-01-10T03:57:30.102301 | 2016-03-27T20:07:29 | 2016-03-27T20:07:29 | 53,147,365 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,799 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'plotWindow.ui'
#
# Created: Wed Mar 9 22:13:04 2016
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
import netifaces
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_plotWindow(object):
def setupUi(self, plotWindow):
plotWindow.setObjectName(_fromUtf8("plotWindow"))
plotWindow.resize(441, 286)
self.centralwidget = QtGui.QWidget(plotWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.dateEdit = QtGui.QDateEdit(self.centralwidget)
self.dateEdit.setGeometry(QtCore.QRect(200, 90, 121, 31))
self.dateEdit.setObjectName(_fromUtf8("dateEdit"))
self.label = QtGui.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(120, 100, 81, 17))
self.label.setObjectName(_fromUtf8("label"))
self.pushButton = QtGui.QPushButton(self.centralwidget)
self.pushButton.setGeometry(QtCore.QRect(170, 150, 121, 27))
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.label_2 = QtGui.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(70, 50, 151, 17))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.comboBox = QtGui.QComboBox(self.centralwidget)
self.comboBox.setGeometry(QtCore.QRect(230, 50, 161, 27))
self.comboBox.setObjectName(_fromUtf8("comboBox"))
plotWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtGui.QStatusBar(plotWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
plotWindow.setStatusBar(self.statusbar)
self.retranslateUi(plotWindow)
QtCore.QMetaObject.connectSlotsByName(plotWindow)
def retranslateUi(self, plotWindow):
plotWindow.setWindowTitle(_translate("plotWindow", "MainWindow", None))
self.label.setText(_translate("plotWindow", "Enter Date:", None))
self.pushButton.setText(_translate("plotWindow", "Show Plot", None))
self.label_2.setText(_translate("plotWindow", "Choose Your Interface:", None))
def setOptions(self):
l = netifaces.interfaces()
for i in range(0,len(l)):
self.comboBox.addItem(_fromUtf8(""))
self.comboBox.setItemText(i, _translate("Sniffer", l[i], None))
| [
"[email protected]"
] | |
f6012aea9501255f4a81796810ce43a16f9d87ce | 305c91b5cf6c8df96c6f0f0190f7258f784e97fe | /build/lib/intersect/featureExtractors/video/eyesTracking.py | 66b9518deae55afaf4614e2d478eeae74b0f2aed | [] | no_license | sarat-ravi/old-intersect | aed55731da35d3d83f2d52779486ea1291262cec | f5529146831ba860c6e462dc4f5d1c9cadb900b7 | refs/heads/master | 2021-05-27T11:15:25.963102 | 2012-10-16T16:26:38 | 2012-10-16T16:26:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,225 | py | import cv
haarEyes = cv.Load('../../util/opencv/data/haarcascades/haarcascade_eye.xml')
#haarNose = cv.Load('../../util/opencv/data/haarcascades/haarcascade_mcs_nose.xml')
#create window
cv.NamedWindow("w1", cv.CV_WINDOW_AUTOSIZE)
#get camera
camera = cv.CaptureFromCAM(-1)
storage = cv.CreateMemStorage()
#the index of every frame
frame_index = 0
#one in every 5 frames sent for haar detection
downsamplingFactor = 2
frame = cv.QueryFrame(camera)
detectedEyes = cv.HaarDetectObjects(frame, haarEyes, storage)
def processFrame():
global detectedEyes
global frame
frame = cv.QueryFrame(camera)
if frame_index % downsamplingFactor == 0:
temp = cv.HaarDetectObjects(frame, haarEyes, storage)
if temp:
detectedEyes = temp
if True:
for face in detectedEyes:
cv.Rectangle(frame,(face[0][0],face[0][1]),
(face[0][0]+face[0][2],face[0][1]+face[0][3]),
cv.RGB(155, 55, 200),2)
cv.ShowImage("w1", frame)
c = cv.WaitKey(1)
c = cv.WaitKey(1)
c = cv.WaitKey(1)
c = cv.WaitKey(1)
c = cv.WaitKey(1)
c = cv.WaitKey(1)
while True:
frame_index = frame_index + 1
processFrame()
| [
"[email protected]"
] | |
cf9672714199bd408118c63132d3fa48bad96896 | 9ce8a834e0c3bc10d5bf68102dc1d4ca68969221 | /naukri_com/naukri_com/pipelines.py | d3d841758aea57710c738d5f6c05e5a0bf7730e3 | [] | no_license | goyal-vi/Scrapy | 9d7795a21a085386cb7f19e560da69091c01630d | 10f533a5b4826ceaea6dae810d23be9bcb0b59a1 | refs/heads/master | 2020-03-29T06:43:05.606244 | 2018-10-19T16:34:11 | 2018-10-19T16:34:11 | 149,637,401 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,004 | py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import datetime
class NullValueHandlerPipeline(object):
def process_item(self, item, spider):
attributes = ["job_title", "experience_required", "location", "company_name", "job_description", "key_skills", "job_description_url", "salary", "posted_on", "posted_by"]
for attr in attributes:
try:
item[attr]
except KeyError:
item[attr] = ""
return item
#class CalculatorPostedOn(object):
# def process_item(self, item, spider):
# today_date = datetime.datetime.today()
# if item['posted_on']:
# value = item['posted_on'][0]
# if 'day' in value:
# num = int(value.split()[0])
# item['posted_on'][0] = today_date - datetime.timedelta(days = num)
| [
"[email protected]"
] | |
5a8dd459ebdd018f8c66fc542395695651c5d86e | 6951c0757c8e1e96ac0506739995de084a8f3a9b | /overview/p22_functions/test_task2.py | 8a5250b3e45900f9b33daff2e968b36912cc10ef | [] | no_license | ElazarNeeman/python_zth | 3b6df77e76ed60ae39027c7c4d551f7dd0b5959d | cf50b5b8fb7ce27ebdd4bf7b0fb8642fff5a675a | refs/heads/master | 2022-01-10T18:45:19.216481 | 2019-07-15T05:58:41 | 2019-07-15T05:58:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 220 | py |
from unittest import TestCase
from utils import check_output
class TestTask(TestCase):
def test_task(self):
check_output("""[34, 15]
sample is 12, avg is 11.0
True
sample is 10, avg is 11.0
False
""")
| [
"[email protected]"
] | |
c5cef68b6cec709ede680fcc8ca5b24855609bb7 | f5c0e62aa391ab22136a3cb1549c124ed64fde3c | /python/020/problem_020a.py | 794b8fb8400f46cf2f747ce9d96a0d3aa57fffcc | [] | no_license | vivekpabani/codeabbey | cc2381d5f99c396a4f182b8ee9f2b1a8f1997451 | 410acb4c30edcdeadb0013cac614c5b44a8f7e75 | refs/heads/master | 2021-01-15T22:29:13.974958 | 2015-06-13T22:27:54 | 2015-06-13T22:27:54 | 29,312,571 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | #!/usr/bin/env python
"""
Problem Definition :
"""
__author__ = 'vivek'
import time
startTime = time.clock()
def count(s):
return str(len(filter(lambda x : x in 'aeiouy', s)))
filename = 'input.txt'
data = open(filename, 'rt').readlines()
print ' '.join(count(data) for _ in xrange(int(data[0])))
print "Run time...{} secs \n".format(round(time.clock() - startTime, 4))
| [
"[email protected]"
] | |
37fefa600fcdc4c403857557f6a6eb94e25b1234 | d3daa3f262ff7735fbb831b72c7c21f7b9f34496 | /Visualization of COVID19 data/world.py | 23cfed7ce5a87428320bfd00eecc430fa199eb60 | [] | no_license | chandbud5/ML-geek | b5f83cb750057635f7d7bfc249c3177e59a4ed6d | 2d3ccc70ea5ff56a33a64b7ea542e9746636269b | refs/heads/master | 2022-08-26T10:32:11.496920 | 2020-05-28T09:30:59 | 2020-05-28T09:30:59 | 264,594,076 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,013 | py | import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
df = pd.read_csv("D:\Datasets\Corona\owid-covid-data.csv")
dfw = df.loc[df['location']=='World']
dt = dfw['date'].values
y = dfw['total_cases'].values
plt.plot(dt,y,'r--')
plt.xticks(np.arange(0, 138, 10), rotation=-90)
plt.title("Total cases in World")
plt.savefig("Total-cases-in-World.png",dpi=200)
plt.show()
plt.plot(dfw['date'], dfw['new_cases'], '--')
plt.xticks(np.arange(0, 138, 10), rotation=-90)
plt.title("New cases in World")
plt.savefig("New-cases-in-World.png",dpi=200)
plt.show()
dfi = df.loc[df['location']=="India"]
plt.plot(dfi['date'], dfi['total_cases'],'r--')
plt.xticks(np.arange(0, 138, 10), rotation=-90)
plt.title("Total cases in India")
plt.savefig("Total-cases-in-India.png",dpi=200)
plt.show()
plt.plot(dfi['date'], dfi['new_cases'], '--')
plt.xticks(np.arange(0, 138, 10), rotation=-90)
plt.title("New cases in India")
plt.savefig("New-cases-in-India.png",dpi=200)
plt.show() | [
"[email protected]"
] | |
25dd3a7f073792d6cc69bc98506aae38efb5cb1d | e52d0feabfa7a4799bc2e8b5fd3b084986372b62 | /wyw/wyw/wsgi.py | 42ccf60fee3c34f101be8dee89fcc798633a5397 | [] | no_license | AManMustHaveACode/test | c54250bb974f0dabd1454deb6a4341f9fdd04cf9 | f018bea6b8e5bdd22923c2fddf2c9dda5df3ac12 | refs/heads/master | 2021-05-13T16:59:19.425465 | 2018-01-10T14:00:09 | 2018-01-10T14:00:09 | 109,942,467 | 0 | 0 | null | 2017-11-08T07:39:04 | 2017-11-08T07:29:28 | null | UTF-8 | Python | false | false | 383 | py | """
WSGI config for wyw project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wyw.settings")
application = get_wsgi_application()
| [
"[email protected]"
] | |
77e03ec4b537fd119009cff84d08e25fe63682a2 | 95b1c0959b66d87a5adb63019394757ad44e0a0f | /Blog/migrations/0007_alter_content_subject.py | ac2a4cc896d0e2a6273f1cd4ca1f013866a07f7e | [] | no_license | RashedEmon/E-Learning | f365ecb829f0232f9c5d04ed569271d4105c3b6a | 1a27f44fdc53eba5538f1e282dbb0bf006f2aedf | refs/heads/master | 2023-08-10T22:01:48.786845 | 2021-09-13T06:25:28 | 2021-09-13T06:25:28 | 405,856,364 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 465 | py | # Generated by Django 3.2.4 on 2021-06-29 17:11
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Blog', '0006_content_subject'),
]
operations = [
migrations.AlterField(
model_name='content',
name='subject',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Blog.subject'),
),
]
| [
"[email protected]"
] | |
45f21066e6b40766a385b6d44a52ee81c90e9038 | f337bc5f179b25969ba73e7680ffb0a0616e3b97 | /python/Programmers/압축.py | d885803689d55ac754078635c0323baf65573504 | [] | no_license | raiders032/PS | 31771c5496a70f4730402698f743bbdc501e49a3 | 08e1384655975b868e80521167ec876b96fa01c8 | refs/heads/master | 2023-06-08T10:21:00.230154 | 2023-06-04T01:38:08 | 2023-06-04T01:38:08 | 349,925,005 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 639 | py | """
https://school.programmers.co.kr/learn/courses/30/lessons/17684
[3차] 압축
풀이1.100점
"""
def solution(msg):
answer = []
dictionary = dict()
for i in range(1, 27):
dictionary[chr(i - 1 + ord('A'))] = i
last_sequence = 27
i = 0
while i < len(msg):
w = msg[i]
while i + 1 < len(msg) and w + msg[i + 1] in dictionary:
w += msg[i + 1]
i += 1
answer.append(dictionary[w])
if i + 1 < len(msg):
dictionary[w + msg[i + 1]] = last_sequence
last_sequence += 1
i += 1
return answer
print(solution('KAKAO')) | [
"[email protected]"
] | |
480fd5595117d08f8f1e74d0ddbda121f7dadff2 | 4c4785ed8f5fed43891872f69d01fa3bc1192136 | /16_Machine Learning with Tree-Based Models in Python/09_Gradient_Boosting.py | 891da5c5aea3f935a733b8efe99ef932f0776d21 | [] | no_license | CodeInDna/Data_Scientist_With_Python | 6ed04534948fe692169b56c72b4021b32eb35751 | 9820f4ae232317cec4d13a3eb26e3a59ba98c31b | refs/heads/master | 2020-09-23T14:19:37.426778 | 2019-12-23T15:39:15 | 2019-12-23T15:39:15 | 225,519,493 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,723 | py | # Define the GB regressor
# You'll now revisit the Bike Sharing Demand dataset that was introduced in the previous chapter. Recall that your task is to predict the bike rental demand using historical weather data from the Capital Bikeshare program in Washington, D.C.. For this purpose, you'll be using a gradient boosting regressor.
# As a first step, you'll start by instantiating a gradient boosting regressor which you will train in the next exercise.
# Import GradientBoostingRegressor
from sklearn.ensemble import GradientBoostingRegressor
# Instantiate gb
gb = GradientBoostingRegressor(n_estimators=200,
max_depth=4,
random_state=2)
# Train the GB regressor
# You'll now train the gradient boosting regressor gb that you instantiated in the previous exercise and predict test set labels.
# The dataset is split into 80% train and 20% test. Feature matrices X_train and X_test, as well as the arrays y_train and y_test are available in your workspace. In addition, we have also loaded the model instance gb that you defined in the previous exercise.
# Fit gb to the training set
gb.fit(X_train, y_train)
# Predict test set labels
y_pred = gb.predict(X_test)
# Evaluate the GB regressor
# Now that the test set predictions are available, you can use them to evaluate the test set Root Mean Squared Error (RMSE) of gb.
# y_test and predictions y_pred are available in your workspace.
# Import mean_squared_error as MSE
from sklearn.metrics import mean_squared_error as MSE
# Compute MSE
mse_test = MSE(y_test, y_pred)
# Compute RMSE
rmse_test = mse_test ** (0.5)
# Print RMSE
print('Test set RMSE of gb: {:.3f}'.format(rmse_test))
# <script.py> output:
# Test set RMSE of gb: 52.065 | [
"[email protected]"
] | |
44d1c356795a3f33a7b06a0a997d7a64361c2e5a | a043b5c950596cbfe65720840b4122d76fbb9a33 | /baselines/clinc_intent/sngp.py | 503a5bc840cd22ecdacd70b659a09f6160b0b5be | [
"Apache-2.0"
] | permissive | lilujunai/uncertainty-baselines | e5a73b4ec4c8b9b6f3b54b02064598406298253d | 7f529d426d67f27185b1526e74a5cfcfe0e937f7 | refs/heads/master | 2023-01-19T07:03:54.055881 | 2020-11-19T23:51:34 | 2020-11-19T23:52:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,832 | py | # coding=utf-8
# Copyright 2020 The Uncertainty Baselines Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bidirectional encoder representations from transformers (BERT) with SNGP.
Spectral-normalized neural Gaussian process (SNGP) [1] is a simple method to
improve a deterministic neural network's uncertainty. It simply applies spectral
normalization to the hidden layers, and then replace the dense output layer
with a Gaussian process layer.
## References:
[1]: Jeremiah Liu et al. Simple and Principled Uncertainty Estimation with
Deterministic Deep Learning via Distance Awareness.
_arXiv preprint arXiv:2006.10108_, 2020.
https://arxiv.org/abs/2006.10108
[2]: Zhiyun Lu, Eugene Ie, Fei Sha. Uncertainty Estimation with Infinitesimal
Jackknife. _arXiv preprint arXiv:2006.07584_, 2020.
https://arxiv.org/abs/2006.07584
"""
import os
import time
from absl import app
from absl import flags
from absl import logging
import edward2 as ed
import tensorflow as tf
import uncertainty_baselines as ub
import bert_utils # local file import
import uncertainty_metrics as um
# Data flags
flags.DEFINE_string(
'data_dir', None,
'Directory containing the TFRecord datasets and the tokenizer for Clinc '
'Intent Detection Data.')
# Checkpoint flags
flags.DEFINE_string(
'bert_dir', None,
'Directory to BERT pre-trained checkpoints and config files.')
flags.DEFINE_string(
'bert_ckpt_dir', None, 'Directory to BERT pre-trained checkpoints. '
'If None then then default to {bert_dir}/bert_model.ckpt.')
flags.DEFINE_string(
'bert_config_dir', None, 'Directory to BERT config files. '
'If None then then default to {bert_dir}/bert_config.json.')
# Normalization flags.
flags.DEFINE_bool(
'use_layer_norm_att', True,
'Whether to apply layer normalization to the self-attention layers.')
flags.DEFINE_bool(
'use_layer_norm_ffn', True,
'Whether to apply layer normalization to the feedforward layers.')
flags.DEFINE_bool(
'use_spec_norm_att', False,
'Whether to apply spectral normalization to the self-attention layers.')
flags.DEFINE_bool(
'use_spec_norm_ffn', False,
'Whether to apply spectral normalization to the feedforward layers.')
flags.DEFINE_bool(
'use_spec_norm_plr', True,
'Whether to apply spectral normalization to the final CLS pooler layer.')
flags.DEFINE_integer(
'spec_norm_iteration', 1,
'Number of power iterations to perform for estimating '
'the spectral norm of weight matrices.')
flags.DEFINE_float('spec_norm_bound', .95,
'Upper bound to spectral norm of weight matrices.')
# Gaussian process flags.
flags.DEFINE_bool('use_gp_layer', True,
'Whether to use Gaussian process as the output layer.')
flags.DEFINE_float('gp_bias', 0., 'The bias term for GP layer.')
flags.DEFINE_float(
'gp_scale', 2.,
'The length-scale parameter for the RBF kernel of the GP layer.')
flags.DEFINE_integer(
'gp_hidden_dim', 2048,
'The hidden dimension of the GP layer, which corresponds to the number of '
'random features used for the approximation.')
flags.DEFINE_bool(
'gp_input_normalization', True,
'Whether to normalize the input using LayerNorm for GP layer.'
'This is similar to automatic relevance determination (ARD) in the classic '
'GP learning.')
flags.DEFINE_float('gp_cov_ridge_penalty', 1e-3,
'Ridge penalty parameter for GP posterior covariance.')
flags.DEFINE_float(
'gp_cov_discount_factor', 0.999,
'The discount factor to compute the moving average of precision matrix.')
flags.DEFINE_float(
'gp_mean_field_factor', 1e-4,
'The tunable multiplicative factor used in the mean-field approximation '
'for the posterior mean of softmax Gaussian process. If -1 then use '
'posterior mode instead of posterior mean. See [2] for detail.')
# Optimization and evaluation flags
flags.DEFINE_integer('seed', 42, 'Random seed.')
flags.DEFINE_integer('per_core_batch_size', 64, 'Batch size per TPU core/GPU.')
flags.DEFINE_integer('eval_batch_size', 512, 'Batch size for CPU evaluation.')
flags.DEFINE_float(
'base_learning_rate', 5e-5,
'Base learning rate when total batch size is 128. It is '
'scaled by the ratio of the total batch size to 128.')
flags.DEFINE_integer(
'checkpoint_interval', 40,
'Number of epochs between saving checkpoints. Use -1 to '
'never save checkpoints.')
flags.DEFINE_integer('evaluation_interval', 2,
'Number of epochs between evaluation.')
flags.DEFINE_integer('num_bins', 15, 'Number of bins for ECE.')
flags.DEFINE_string('output_dir', '/tmp/clinc_intent', 'Output directory.')
flags.DEFINE_integer('train_epochs', 40, 'Number of training epochs.')
flags.DEFINE_float(
'warmup_proportion', 0.1,
'Proportion of training to perform linear learning rate warmup for. '
'E.g., 0.1 = 10% of training.')
flags.DEFINE_integer(
'num_mc_samples', 1,
'Number of Monte Carlo forward passes to collect for ensemble prediction.'
'Currently can only be 1 since the model is deterministic.')
# Accelerator flags.
flags.DEFINE_bool('use_gpu', False, 'Whether to run on GPU or otherwise TPU.')
flags.DEFINE_bool('use_bfloat16', False, 'Whether to use mixed precision.')
flags.DEFINE_integer('num_cores', 8, 'Number of TPU cores or number of GPUs.')
flags.DEFINE_string('tpu', None,
'Name of the TPU. Only used if use_gpu is False.')
FLAGS = flags.FLAGS
# TODO(jereliu): Add support for Monte Carlo Dropout.
def resolve_bert_ckpt_and_config_dir(bert_dir, bert_config_dir, bert_ckpt_dir):
"""Resolves BERT checkpoint and config file directories."""
missing_ckpt_or_config_dir = not (bert_ckpt_dir and bert_config_dir)
if missing_ckpt_or_config_dir:
if not bert_dir:
raise ValueError('bert_dir cannot be empty.')
if not bert_config_dir:
bert_config_dir = os.path.join(bert_dir, 'bert_config.json')
if not bert_ckpt_dir:
bert_ckpt_dir = os.path.join(bert_dir, 'bert_model.ckpt')
return bert_config_dir, bert_ckpt_dir
def main(argv):
del argv # unused arg
tf.io.gfile.makedirs(FLAGS.output_dir)
logging.info('Saving checkpoints at %s', FLAGS.output_dir)
tf.random.set_seed(FLAGS.seed)
if FLAGS.use_gpu:
logging.info('Use GPU')
strategy = tf.distribute.MirroredStrategy()
else:
logging.info('Use TPU at %s',
FLAGS.tpu if FLAGS.tpu is not None else 'local')
resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu=FLAGS.tpu)
tf.config.experimental_connect_to_cluster(resolver)
tf.tpu.experimental.initialize_tpu_system(resolver)
strategy = tf.distribute.TPUStrategy(resolver)
batch_size = FLAGS.per_core_batch_size * FLAGS.num_cores
train_dataset_builder = ub.datasets.ClincIntentDetectionDataset(
batch_size=FLAGS.per_core_batch_size,
eval_batch_size=FLAGS.per_core_batch_size,
data_dir=FLAGS.data_dir,
data_mode='ind')
ind_dataset_builder = ub.datasets.ClincIntentDetectionDataset(
batch_size=batch_size,
eval_batch_size=FLAGS.eval_batch_size,
data_dir=FLAGS.data_dir,
data_mode='ind')
ood_dataset_builder = ub.datasets.ClincIntentDetectionDataset(
batch_size=batch_size,
eval_batch_size=FLAGS.eval_batch_size,
data_dir=FLAGS.data_dir,
data_mode='ood')
all_dataset_builder = ub.datasets.ClincIntentDetectionDataset(
batch_size=batch_size,
eval_batch_size=FLAGS.eval_batch_size,
data_dir=FLAGS.data_dir,
data_mode='all')
dataset_builders = {
'clean': ind_dataset_builder,
'ood': ood_dataset_builder,
'all': all_dataset_builder
}
train_dataset = train_dataset_builder.build(
split=ub.datasets.base.Split.TRAIN)
ds_info = train_dataset_builder.info
feature_size = ds_info['feature_size']
# num_classes is number of valid intents plus out-of-scope intent
num_classes = ds_info['num_classes'] + 1
steps_per_epoch = ds_info['num_train_examples'] // batch_size
test_datasets = {}
steps_per_eval = {}
for dataset_name, dataset_builder in dataset_builders.items():
test_datasets[dataset_name] = dataset_builder.build(
split=ub.datasets.base.Split.TEST)
steps_per_eval[dataset_name] = (
dataset_builder.info['num_test_examples'] // FLAGS.eval_batch_size)
if FLAGS.use_bfloat16:
policy = tf.keras.mixed_precision.experimental.Policy('mixed_bfloat16')
tf.keras.mixed_precision.experimental.set_policy(policy)
summary_writer = tf.summary.create_file_writer(
os.path.join(FLAGS.output_dir, 'summaries'))
with strategy.scope():
logging.info('Building BERT model')
logging.info('use_gp_layer=%s', FLAGS.use_gp_layer)
logging.info('use_spec_norm_att=%s', FLAGS.use_spec_norm_att)
logging.info('use_spec_norm_ffn=%s', FLAGS.use_spec_norm_ffn)
logging.info('use_layer_norm_att=%s', FLAGS.use_layer_norm_att)
logging.info('use_layer_norm_ffn=%s', FLAGS.use_layer_norm_ffn)
bert_config_dir, bert_ckpt_dir = resolve_bert_ckpt_and_config_dir(
FLAGS.bert_dir, FLAGS.bert_config_dir, FLAGS.bert_ckpt_dir)
bert_config = bert_utils.create_config(bert_config_dir)
gp_layer_kwargs = dict(
num_inducing=FLAGS.gp_hidden_dim,
gp_kernel_scale=FLAGS.gp_scale,
gp_output_bias=FLAGS.gp_bias,
normalize_input=FLAGS.gp_input_normalization,
gp_cov_momentum=FLAGS.gp_cov_discount_factor,
gp_cov_ridge_penalty=FLAGS.gp_cov_ridge_penalty)
spec_norm_kwargs = dict(
iteration=FLAGS.spec_norm_iteration,
norm_multiplier=FLAGS.spec_norm_bound)
model, bert_encoder = ub.models.SngpBertBuilder(
num_classes=num_classes,
bert_config=bert_config,
gp_layer_kwargs=gp_layer_kwargs,
spec_norm_kwargs=spec_norm_kwargs,
use_gp_layer=FLAGS.use_gp_layer,
use_spec_norm_att=FLAGS.use_spec_norm_att,
use_spec_norm_ffn=FLAGS.use_spec_norm_ffn,
use_layer_norm_att=FLAGS.use_layer_norm_att,
use_layer_norm_ffn=FLAGS.use_layer_norm_ffn,
use_spec_norm_plr=FLAGS.use_spec_norm_plr)
optimizer = bert_utils.create_optimizer(
FLAGS.base_learning_rate,
steps_per_epoch=steps_per_epoch,
epochs=FLAGS.train_epochs,
warmup_proportion=FLAGS.warmup_proportion)
logging.info('Model input shape: %s', model.input_shape)
logging.info('Model output shape: %s', model.output_shape)
logging.info('Model number of weights: %s', model.count_params())
metrics = {
'train/negative_log_likelihood': tf.keras.metrics.Mean(),
'train/accuracy': tf.keras.metrics.SparseCategoricalAccuracy(),
'train/loss': tf.keras.metrics.Mean(),
'train/ece': um.ExpectedCalibrationError(num_bins=FLAGS.num_bins),
}
checkpoint = tf.train.Checkpoint(model=model, optimizer=optimizer)
latest_checkpoint = tf.train.latest_checkpoint(FLAGS.output_dir)
initial_epoch = 0
if latest_checkpoint:
# checkpoint.restore must be within a strategy.scope() so that optimizer
# slot variables are mirrored.
checkpoint.restore(latest_checkpoint)
logging.info('Loaded checkpoint %s', latest_checkpoint)
initial_epoch = optimizer.iterations.numpy() // steps_per_epoch
else:
# load BERT from initial checkpoint
bert_encoder, _, _ = bert_utils.load_bert_weight_from_ckpt(
bert_model=bert_encoder,
bert_ckpt_dir=bert_ckpt_dir,
repl_patterns=ub.models.bert_sngp.CHECKPOINT_REPL_PATTERNS)
logging.info('Loaded BERT checkpoint %s', bert_ckpt_dir)
# Finally, define test metrics outside the accelerator scope for CPU eval.
metrics.update({
'test/negative_log_likelihood': tf.keras.metrics.Mean(),
'test/accuracy': tf.keras.metrics.SparseCategoricalAccuracy(),
'test/ece': um.ExpectedCalibrationError(num_bins=FLAGS.num_bins),
'test/stddev': tf.keras.metrics.Mean(),
})
for dataset_name, test_dataset in test_datasets.items():
if dataset_name != 'clean':
metrics.update({
'test/nll_{}'.format(dataset_name):
tf.keras.metrics.Mean(),
'test/accuracy_{}'.format(dataset_name):
tf.keras.metrics.SparseCategoricalAccuracy(),
'test/ece_{}'.format(dataset_name):
um.ExpectedCalibrationError(num_bins=FLAGS.num_bins),
'test/stddev_{}'.format(dataset_name):
tf.keras.metrics.Mean(),
})
metrics.update({
'test/auroc_all': tf.keras.metrics.AUC(curve='ROC'),
'test/auprc_all': tf.keras.metrics.AUC(curve='PR')
})
@tf.function
def train_step(iterator):
"""Training StepFn."""
def step_fn(inputs):
"""Per-Replica StepFn."""
features, labels = bert_utils.create_feature_and_label(
inputs, feature_size)
with tf.GradientTape() as tape:
# Set learning phase to enable dropout etc during training.
logits = model(features, training=True)
if isinstance(logits, tuple):
# If model returns a tuple of (logits, covmat), extract logits
logits, _ = logits
if FLAGS.use_bfloat16:
logits = tf.cast(logits, tf.float32)
negative_log_likelihood = tf.reduce_mean(
tf.keras.losses.sparse_categorical_crossentropy(
labels, logits, from_logits=True))
l2_loss = sum(model.losses)
loss = negative_log_likelihood + l2_loss
# Scale the loss given the TPUStrategy will reduce sum all gradients.
scaled_loss = loss / strategy.num_replicas_in_sync
grads = tape.gradient(scaled_loss, model.trainable_variables)
optimizer.apply_gradients(zip(grads, model.trainable_variables))
probs = tf.nn.softmax(logits)
metrics['train/ece'].update_state(labels, probs)
metrics['train/loss'].update_state(loss)
metrics['train/negative_log_likelihood'].update_state(
negative_log_likelihood)
metrics['train/accuracy'].update_state(labels, logits)
strategy.run(step_fn, args=(next(iterator),))
@tf.function
def test_step(iterator, dataset_name):
"""Evaluation StepFn."""
def step_fn(inputs):
"""Per-Replica StepFn."""
features, labels = bert_utils.create_feature_and_label(
inputs, feature_size)
# Compute ensemble prediction over Monte Carlo forward-pass samples.
logits_list = []
stddev_list = []
for _ in range(FLAGS.num_mc_samples):
logits = model(features, training=False)
if isinstance(logits, tuple):
# If model returns a tuple of (logits, covmat), extract both.
logits, covmat = logits
else:
covmat = tf.eye(FLAGS.eval_batch_size)
if FLAGS.use_bfloat16:
logits = tf.cast(logits, tf.float32)
covmat = tf.cast(covmat, tf.float32)
logits = ed.layers.utils.mean_field_logits(
logits, covmat, mean_field_factor=FLAGS.gp_mean_field_factor)
stddev = tf.sqrt(tf.linalg.diag_part(covmat))
logits_list.append(logits)
stddev_list.append(stddev)
# Logits dimension is (num_samples, batch_size, num_classes).
logits_list = tf.stack(logits_list, axis=0)
stddev_list = tf.stack(stddev_list, axis=0)
stddev = tf.reduce_mean(stddev_list, axis=0)
probs_list = tf.nn.softmax(logits_list)
probs = tf.reduce_mean(probs_list, axis=0)
labels_broadcasted = tf.broadcast_to(
labels, [FLAGS.num_mc_samples, labels.shape[0]])
log_likelihoods = -tf.keras.losses.sparse_categorical_crossentropy(
labels_broadcasted, logits_list, from_logits=True)
negative_log_likelihood = tf.reduce_mean(
-tf.reduce_logsumexp(log_likelihoods, axis=[0]) +
tf.math.log(float(FLAGS.num_mc_samples)))
if dataset_name == 'clean':
metrics['test/negative_log_likelihood'].update_state(
negative_log_likelihood)
metrics['test/accuracy'].update_state(labels, probs)
metrics['test/ece'].update_state(labels, probs)
metrics['test/stddev'].update_state(stddev)
else:
metrics['test/nll_{}'.format(dataset_name)].update_state(
negative_log_likelihood)
metrics['test/accuracy_{}'.format(dataset_name)].update_state(
labels, probs)
metrics['test/ece_{}'.format(dataset_name)].update_state(labels, probs)
metrics['test/stddev_{}'.format(dataset_name)].update_state(stddev)
if dataset_name == 'all':
ood_labels = tf.cast(labels == 150, labels.dtype)
ood_probs = 1. - tf.reduce_max(probs, axis=-1)
metrics['test/auroc_{}'.format(dataset_name)].update_state(
ood_labels, ood_probs)
metrics['test/auprc_{}'.format(dataset_name)].update_state(
ood_labels, ood_probs)
step_fn(next(iterator))
train_iterator = iter(train_dataset)
start_time = time.time()
for epoch in range(initial_epoch, FLAGS.train_epochs):
logging.info('Starting to run epoch: %s', epoch)
for step in range(steps_per_epoch):
train_step(train_iterator)
current_step = epoch * steps_per_epoch + (step + 1)
max_steps = steps_per_epoch * FLAGS.train_epochs
time_elapsed = time.time() - start_time
steps_per_sec = float(current_step) / time_elapsed
eta_seconds = (max_steps - current_step) / steps_per_sec
message = ('{:.1%} completion: epoch {:d}/{:d}. {:.1f} steps/s. '
'ETA: {:.0f} min. Time elapsed: {:.0f} min'.format(
current_step / max_steps, epoch + 1, FLAGS.train_epochs,
steps_per_sec, eta_seconds / 60, time_elapsed / 60))
if step % 20 == 0:
logging.info(message)
if epoch % FLAGS.evaluation_interval == 0:
for dataset_name, test_dataset in test_datasets.items():
test_iterator = iter(test_dataset)
logging.info('Testing on dataset %s', dataset_name)
for step in range(steps_per_eval[dataset_name]):
if step % 20 == 0:
logging.info('Starting to run eval step %s of epoch: %s', step,
epoch)
test_step(test_iterator, dataset_name)
logging.info('Done with testing on %s', dataset_name)
logging.info('Train Loss: %.4f, Accuracy: %.2f%%',
metrics['train/loss'].result(),
metrics['train/accuracy'].result() * 100)
logging.info('Test NLL: %.4f, Accuracy: %.2f%%',
metrics['test/negative_log_likelihood'].result(),
metrics['test/accuracy'].result() * 100)
total_results = {
name: metric.result() for name, metric in metrics.items()
}
with summary_writer.as_default():
for name, result in total_results.items():
tf.summary.scalar(name, result, step=epoch + 1)
for metric in metrics.values():
metric.reset_states()
if (FLAGS.checkpoint_interval > 0 and
(epoch + 1) % FLAGS.checkpoint_interval == 0):
checkpoint_name = checkpoint.save(
os.path.join(FLAGS.output_dir, 'checkpoint'))
logging.info('Saved checkpoint to %s', checkpoint_name)
if __name__ == '__main__':
app.run(main)
| [
"[email protected]"
] | |
0d221ea9321948901060631c15e09af6c5d79f3e | 8040e42ae636f971fb8018f5a2b17b28e864b6c8 | /Lesson 3/closedsummation.py | 7f4aaab0febc822db9ec89282958d15b8fd0e3a3 | [] | no_license | mariesstl/SWDV-610-3W-20SP2 | 9d5aee54ea63975301b53a7c6f73ffbb052f6d0d | 4f75296e43caf58123123ee6cbf28779d975714a | refs/heads/master | 2021-05-18T07:08:44.655391 | 2020-05-03T06:25:57 | 2020-05-03T06:25:57 | 251,172,572 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 152 | py | def sumofNumber(number):
return(number*(number+1))/2
for i in range(5):
print("Sum is %d required:%10.7f seconds:"%sumofNumber(100000))
| [
"[email protected]"
] | |
a946b4dc8f65dae073a32eb56d2cd8c524cc983d | a8c59aa8bbc7fd6435e1c97c21f7a5ce4fa9c219 | /TreeDS/InOrder.py | 495e19f576201085578bac236f918ee3d68cf3f9 | [] | no_license | vsikarwar/Algorithms | 7529843245f7c813cb85ad8d32c5ec77bdc1f681 | c8331de29c92ce0cdb6f28b31d3913b126dc7d53 | refs/heads/master | 2021-05-31T11:25:50.579943 | 2016-06-20T07:43:48 | 2016-06-20T07:43:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 163 | py | '''
http://www.geeksforgeeks.org/inorder-tree-traversal-without-recursion-and-without-stack/
Inorder Tree Traversal without recursion and without stack!
'''
#TODO | [
"[email protected]"
] | |
9a9082d5a71fccbd64436f9906a2f481e3e6bae2 | 17fb6b997c2ab707651fe719f1456dfae3aae052 | /uvaonlinejudge/10731 - Test.py | 6a37032df15370809e370189129a179521a58861 | [] | no_license | dmitry-pechersky/algorithms | 40b6184400bcf818c2590b9413ce1bf510b3b355 | 74f13f6e256c2e0c1503ec4bafa6b3e0f227fe2c | refs/heads/master | 2023-05-31T14:20:51.882551 | 2023-05-11T06:59:53 | 2023-05-11T06:59:53 | 122,305,451 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,665 | py | def read_test_case():
m = int(input())
if m == 0:
return None
adj_list = []
dic, words = {}, []
for i in range(m):
letters = input().split()
for letter in letters:
if letter not in dic:
words.append(letter)
dic[letter] = len(words) - 1
adj_list.append([])
adj_list[dic[letters[-1]]].extend(dic[letter] for letter in letters[:-1])
return len(words), adj_list, words
def strong_connect(adj_list, v, depth, low, d, stack, on_stack, components):
depth[v], low[v] = d + 1, d + 1
stack.append(v)
on_stack[v] = True
for u in adj_list[v]:
if depth[u] == 0:
strong_connect(adj_list, u, depth, low, d + 1, stack, on_stack, components)
if on_stack[u]:
low[v] = min(low[v], low[u])
if depth[v] == low[v]:
component = []
while True:
u = stack.pop()
on_stack[u] = False
component.append(u)
if v == u:
break
components.append(component)
def tarjan_scc(n, adj_list):
low, depth = [0] * n, [0] * n
stack, on_stack = [], [False] * n
components = []
for v in range(n):
if depth[v] == 0:
strong_connect(adj_list, v, depth, low, 0, stack, on_stack, components)
return components
if __name__ == '__main__':
for i, (n, adj_list, words) in enumerate(iter(read_test_case, None)):
if i > 0:
print()
components = tarjan_scc(n, adj_list)
for row in sorted(sorted(words[i] for i in component) for component in components):
print(*row)
| [
"[email protected]"
] | |
10f26951642d1479230b1d7838ed6428fc350b43 | a2de196944d01850b7796d86e76daa48fa0e6e02 | /faceCount/cameraShow.py | 4ee69041758aeba473e222ba84fab77f6629b992 | [] | no_license | kimasenbeck/Stanchion | 98a829f60878a312d81b1034c5e17786c38a08dc | 27599914368c395239676ee3d0ccf8017e659dd2 | refs/heads/master | 2020-05-30T14:26:32.709007 | 2015-09-20T08:54:16 | 2015-09-20T08:54:16 | 42,785,659 | 0 | 0 | null | 2015-09-20T14:13:44 | 2015-09-19T18:55:03 | Python | UTF-8 | Python | false | false | 260 | py | import cv2
cap = cv2.VideoCapture(1)
while (True):
ret, image = cap.read()
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
cv2.imshow("camera", image)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
| [
"[email protected]"
] | |
d5253492be0fb197bfd0496a00963b2b762e1b3d | 08822413c041552656e322e5fdbfbdc69f39c0da | /build_db.py | dfd50d50855ea3b9d7dee96e481f5e1a2540ff92 | [] | no_license | hwa0jang/CareU | 2844c8d171fc681f22bd985dcd033d198d1a2fae | dc91766b276780b6c500d02ee6d0940ebb6fc048 | refs/heads/main | 2023-02-01T10:41:59.567277 | 2020-12-15T21:18:34 | 2020-12-15T21:18:34 | 320,557,679 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 767 | py | import sqlite3
import argparse
parser = argparse.ArgumentParser(description="arguments for building a test database.")
parser.add_argument('--db', default='hospital.db', help='sqlite3 database')
parser.add_argument('--schema', default='schema.sql', help='schema definition')
parser.add_argument('--data', default='hospital.sql', help='data')
args = parser.parse_args()
def init_db(dump_bulkdata=False):
db = sqlite3.connect(
args.db,
detect_types=sqlite3.PARSE_DECLTYPES
)
with open(args.schema, 'rt') as f_schema:
db.executescript(f_schema.read())
if (dump_bulkdata == True):
with open(args.data) as f_data:
db.executescript(f_data.read())
if __name__ == "__main__":
init_db(dump_bulkdata=True) | [
"[email protected]"
] | |
c7b46febd9a80ad6259fb48260fe8a7b196963ef | 15cedab2ca5660fa7282036fee61974d47c1862f | /property/migrations/0001_initial.py | 790661255d8cccecc4df1887f95e7b915d0ecaaf | [] | no_license | Gaurav-bh/Property_management | 586066b90f4b3a16e8471c807223320cfe855cda | 16f3fd4dfa0eb387bd8b7e7738e697834225ea73 | refs/heads/master | 2022-11-25T21:00:17.328613 | 2020-07-30T04:41:53 | 2020-07-30T04:41:53 | 281,425,726 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 926 | py | # Generated by Django 3.0 on 2020-03-25 17:11
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Property',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('Property_type', models.CharField(choices=[('S', 'sale'), ('R', 'rent')], max_length=10)),
('price', models.PositiveIntegerField()),
('area', models.DecimalField(decimal_places=2, max_digits=5)),
('beds_number', models.PositiveIntegerField()),
('baths_number', models.PositiveIntegerField()),
('garages_number', models.PositiveIntegerField()),
],
),
]
| [
"[email protected]"
] | |
c5baea97447b402d4a27d5d287f85562afbc6f20 | 139bcabc7f724e5ff163eb065bbca5f78034450d | /tests/test_meaning.py | 907337c0356afa4b5febc8a9b1949ed63376663b | [
"Apache-2.0"
] | permissive | cdpierse/breame | b612a6e6a960030dafe643b3e7ed25d271c6cb27 | 1bd0758ff625b155e5bc3687e6c614a8755b0e6d | refs/heads/main | 2023-08-17T07:46:28.213314 | 2021-10-04T10:21:06 | 2021-10-04T10:21:06 | 408,470,171 | 8 | 0 | null | null | null | null | UTF-8 | Python | false | false | 623 | py | from breame.data.meaning_constants import DIFFERENT_MEANINGS_US_UK_COMMON
from breame.meanings import different_meanings_exist, get_meaning_definitions
def test_different_meanings_exist():
for meaning in DIFFERENT_MEANINGS_US_UK_COMMON:
assert different_meanings_exist(meaning)
def test_different_meanings_exist_empty():
assert not different_meanings_exist("house")
def test_get_meaning_definitions():
for meaning in DIFFERENT_MEANINGS_US_UK_COMMON:
assert get_meaning_definitions(meaning) != {}
def test_get_meaning_definitions_empty():
assert get_meaning_definitions("test") == {}
| [
"[email protected]"
] | |
4854ad6f74c351ff18efff9437bc795e9de8c5e7 | 8da0ad6aed9c7f25ad6c2938f5106b174c943cd8 | /trainer.py | ed3aff1e263c0d49a75cd8d3dd755aed2dab5189 | [] | no_license | CreaterLL/Pretraining-CLIR | 4d01f7af6cd2431aca18f3d4efe159b3460ff099 | 32423a7af33beb0412e08064c0883f9740029e6f | refs/heads/master | 2023-06-24T11:15:14.873534 | 2021-07-20T17:46:29 | 2021-07-20T17:46:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,488 | py | import os
import math
import time
import copy
import numpy as np
from logging import getLogger
from collections import OrderedDict
import apex
import torch
from torch import nn
from torch.nn import functional as F
from torch.nn.utils import clip_grad_norm_
from torch.utils.data import DataLoader
from torch.utils.data.distributed import DistributedSampler
from src.optim import get_optimizer
from src.utils import to_cuda, dict_to_cuda
from src.dataset.wiki_dataset import wiki_rr_trainset, wiki_qlm_trainset, DataCollatorForRelevanceRanking, DataCollatorForMaskedQueryPrediction
logger = getLogger()
class Trainer(object):
def __init__(self, model, tokenizer, params):
"""
Initialize trainer.
"""
self.model = model
self.params = params
# epoch / iteration size
self.epoch_size = params.epoch_size
# tokenizer
self.tokenizer = tokenizer
# data iterators
self.iterators = {}
# data collators
self.rr_collator = DataCollatorForRelevanceRanking(self.tokenizer, "long" in params.model_type)
self.qlm_collator = DataCollatorForMaskedQueryPrediction(self.tokenizer, params.mlm_probability, "long" in params.model_type, params.qlm_mask_mode)
# set parameters
self.set_parameters()
# float16 / distributed (no AMP)
assert params.amp >= 1 or not params.fp16
assert params.amp >= 0 or params.accumulate_gradients == 1
if params.multi_gpu and params.amp == -1:
logger.info("Using nn.parallel.DistributedDataParallel ...")
self.model = nn.parallel.DistributedDataParallel(
self.model,
device_ids=[params.local_rank],
output_device=params.local_rank,
broadcast_buffers=True,
find_unused_parameters=True
)
# set optimizers
self.set_optimizers()
# float16 / distributed (AMP)
if params.amp >= 0:
self.init_amp()
if params.multi_gpu:
logger.info("Using apex.parallel.DistributedDataParallel ...")
self.model = apex.parallel.DistributedDataParallel(self.model, delay_allreduce=True)
# training statistics
self.epoch = 0
self.n_iter = 0
self.n_total_iter = 0
self.n_pairs = 0
stat_keys = [('processed_p', 0)]
if params.qlm_steps is not None:
stat_keys += [('QLM-%s' % lang_pair, []) for lang_pair in params.qlm_steps]
if params.rr_steps is not None:
stat_keys += [('RR-%s' % lang_pair, []) for lang_pair in params.rr_steps]
self.stats = OrderedDict(stat_keys)
stat_keys.pop(0)
self.epoch_scores = OrderedDict(copy.deepcopy(stat_keys))
self.last_time = time.time()
def set_parameters(self):
"""
Set parameters.
"""
params = self.params
self.parameters = {}
named_params = [(k, p) for k, p in self.model.named_parameters() if p.requires_grad]
# model (excluding memory values)
self.parameters['model'] = [p for k, p in named_params]
# log
for k, v in self.parameters.items():
logger.info("Found %i parameters in %s." % (len(v), k))
assert len(v) >= 1
def set_optimizers(self):
"""
Set optimizers.
"""
params = self.params
self.optimizers = {}
# model optimizer (excluding memory values)
self.optimizers['model'] = get_optimizer(self.parameters['model'], params.optimizer)
# log
logger.info("Optimizers: %s" % ", ".join(self.optimizers.keys()))
def init_amp(self):
"""
Initialize AMP optimizer.
"""
params = self.params
assert params.amp == 0 and params.fp16 is False or params.amp in [1, 2, 3] and params.fp16 is True
opt_names = self.optimizers.keys()
self.model, optimizers = apex.amp.initialize(
self.model,
[self.optimizers[k] for k in opt_names],
opt_level=('O%i' % params.amp)
)
self.optimizers = {
opt_name: optimizer
for opt_name, optimizer in zip(opt_names, optimizers)
}
def optimize(self, loss):
"""
Optimize.
"""
# check NaN
if (loss != loss).data.any():
logger.warning("NaN detected")
# exit()
params = self.params
# optimizers
names = self.optimizers.keys()
optimizers = [self.optimizers[k] for k in names]
# regular optimization
if params.amp == -1:
for optimizer in optimizers:
optimizer.zero_grad()
loss.backward()
if params.clip_grad_norm > 0:
for name in names:
clip_grad_norm_(self.parameters[name], params.clip_grad_norm)
for optimizer in optimizers:
optimizer.step()
# AMP optimization
else:
if self.n_iter % params.accumulate_gradients == 0:
with apex.amp.scale_loss(loss, optimizers) as scaled_loss:
scaled_loss.backward()
if params.clip_grad_norm > 0:
for name in names:
clip_grad_norm_(apex.amp.master_params(self.optimizers[name]), params.clip_grad_norm)
for optimizer in optimizers:
optimizer.step()
optimizer.zero_grad()
else:
with apex.amp.scale_loss(loss, optimizers, delay_unscale=True) as scaled_loss:
scaled_loss.backward()
def iter(self):
"""
End of iteration.
"""
self.n_iter += 1
self.n_total_iter += 1
self.print_stats()
def print_stats(self):
"""
Print statistics about the training.
"""
if self.n_total_iter % 5 != 0:
return
s_iter = "%7i - " % self.n_total_iter
s_stat = ' || '.join([
'{}: {:7.3f}'.format(k, np.mean(v)) for k, v in self.stats.items()
if type(v) is list and len(v) > 0
])
for k in self.stats.keys():
if type(self.stats[k]) is list:
del self.stats[k][:]
# learning rates
s_lr = " - "
for k, v in self.optimizers.items():
s_lr = s_lr + (" - %s LR: " % k) + " / ".join("{:.3e}".format(group['lr']) for group in v.param_groups)
# processing speed
new_time = time.time()
diff = new_time - self.last_time
p_speed = "{:7.2f} qd pair/s - ".format(
self.stats['processed_p'] * 1.0 / diff
)
self.stats['processed_p'] = 0
self.last_time = new_time
# log speed + stats + learning rate
logger.info(s_iter + p_speed + s_stat + s_lr)
def save_checkpoint(self):
"""
Save the model / checkpoints.
"""
if not self.params.is_master:
return
# huggingface saves (more useful in our case for finetuning)
logger.info(f"Saving epoch {self.epoch} ...")
path = os.path.join(self.params.dump_path, f"huggingface-{self.epoch}")
if not os.path.exists(path): os.makedirs(path)
model_to_save = self.model.module if hasattr(self.model, 'module') else self.model
model_to_save.save_pretrained(path)
self.tokenizer.save_pretrained(path)
def end_epoch(self):
"""
End the epoch.
"""
# print epoch loss
self.epoch_stat = ' || '.join([
'{}: {:7.3f}'.format(k, np.mean(v)) for k, v in self.epoch_scores.items()
if type(v) is list and len(v) > 0
])
for k in self.epoch_scores.keys():
if type(self.epoch_scores[k]) is list:
del self.epoch_scores[k][:]
logger.info("EPOCH LOSS: " + self.epoch_stat)
self.save_checkpoint()
self.epoch += 1
self.n_iter = 0
def get_iterator(self, obj_name, lang_pair):
params = self.params
if obj_name == "rr":
dataset = wiki_rr_trainset(
lang_pair = lang_pair,
num_neg = params.num_neg,
neg_val = params.neg_val,
params=params
)
elif obj_name == "qlm":
dataset = wiki_qlm_trainset(
lang_pair = lang_pair,
neg_val = params.neg_val,
params=params
)
sampler = DistributedSampler(dataset, shuffle=True)
dataloader = DataLoader(
dataset,
batch_size = params.batch_size,
shuffle = False,
num_workers = 0,
collate_fn = self.rr_collator if obj_name == "rr" else self.qlm_collator,
sampler = sampler
)
iterator = iter(dataloader)
self.iterators[(obj_name, lang_pair)] = iterator
logger.info("Created new training data iterator (%s) ..." % ','.join([str(x) for x in [obj_name, lang_pair]]))
return iterator
def get_batch(self, obj_name, lang_pair):
iterator = self.iterators.get(
(obj_name, lang_pair),
None
)
if iterator is None:
iterator = self.get_iterator(obj_name, lang_pair) # if there is no such iterator, create one
try:
x = next(iterator)
except StopIteration:
iterator = self.get_iterator(obj_name, lang_pair)
x = next(iterator)
return x
def qlm_step(self, lang_pair, lambda_coeff):
assert lambda_coeff >= 0
if lambda_coeff == 0:
return
params = self.params
self.model.train()
inputs = self.get_batch("qlm", lang_pair)
# if 'long' in params.model_type:
# if self.check_for_long_queries(inputs['attention_mask']):
# ## fail the test: long queries detected
# logger.info("QLM step skipping long queries")
# return
if 'long' in params.model_type:
inputs['attention_mask'] = self.global_attention_safety_check(inputs['attention_mask'])
inputs = dict_to_cuda(inputs)
inputs["mode"] = "mlm"
outputs = self.model(inputs)
loss = outputs[0]
self.stats[('QLM-%s' % lang_pair)].append(loss.item())
self.epoch_scores[('QLM-%s' % lang_pair)].append(loss.item())
loss = lambda_coeff * loss
# optimize
self.optimize(loss)
# number of processed sentences / words
self.stats['processed_p'] += inputs["attention_mask"].size(0)
self.n_pairs += inputs["attention_mask"].size(0)
def rr_step(self, lang_pair, lambda_coeff):
assert lambda_coeff >= 0
if lambda_coeff == 0:
return
params = self.params
self.model.train()
inputs = self.get_batch("rr", lang_pair)
# if 'long' in params.model_type:
# if self.check_for_long_queries(inputs['attention_mask']):
# ## fail the test: long queries detected
# logger.info("RR step skipping long queries")
# return
if 'long' in params.model_type:
inputs['attention_mask'] = self.global_attention_safety_check(inputs['attention_mask'])
inputs = dict_to_cuda(inputs)
inputs["mode"] = "seqcls"
outputs = self.model(inputs)
loss = outputs[0]
self.stats[('RR-%s' % lang_pair)].append(loss.item())
self.epoch_scores[('RR-%s' % lang_pair)].append(loss.item())
loss = lambda_coeff * loss
# optimize
self.optimize(loss)
# number of processed sentences / words
qd_pairs = inputs["attention_mask"].size(0)
pos_qd_pairs = int(qd_pairs / (1 + params.num_neg))
self.stats['processed_p'] += inputs["attention_mask"].size(0)
self.n_pairs += pos_qd_pairs
def check_for_long_queries(self, tensor, length=128):
## for models that use longformer attention mechanism
## when query is obnormally long, it may cause unusual high GPU memory usage
## and therefore program failure
## thus, we check for those long queries and skip them!
## 07/24/2020: deprecating this method because skipping batches can cause waiting with DDP
if 'long' not in self.params.model_type:
assert False, "only check for long queries with mBERT-long!"
return any((tensor==2).sum(dim=1) >= length)
def global_attention_safety_check(self, tensor):
if 'long' not in self.params.model_type:
return tensor
else:
idxs = ((tensor==2).sum(dim=1) >= 256).nonzero().squeeze()
if len(idxs.shape) != 0:
if idxs.shape[0] == 0:
return tensor
else:
# just one row to replace
idxs = idxs.unsqueeze(dim=0)
replacement_attention_mask = torch.LongTensor([1]*512 + [0]*512)
for idx in idxs:
tensor[idx] = replacement_attention_mask
return tensor | [
"[email protected]"
] | |
c37871887b09d2659db01a5f99c19f028d015c89 | 2180a6e2ab8245a9ed980cdf4b95279043b44ee4 | /optimization/parse_table_datacmds.py | 91df74b76d09b3fc80ae721f8c6ec26d6d2f7f71 | [] | no_license | jessicasunya/zice-2014 | 5d9f09fc74a0e7752cb6c5ce6d912ae22c83c20e | 4aa450c465044600a604d40290fa927096e45a41 | refs/heads/master | 2020-06-11T01:47:34.072195 | 2014-02-05T19:29:52 | 2014-02-05T19:29:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 42,256 | py |
# /Users/clarissasweet/Dropbox/zice-2014/optimization/parse_table_datacmds.py
# This file is automatically generated. Do not edit.
_tabversion = '3.2'
_lr_method = 'LALR'
_lr_signature = '\xa29\x9a\x8d\xac\x17er\x03\xef\xcd\x7f \t8\xce'
_lr_action_items = {'QUOTEDSTRING':([5,6,9,11,12,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,58,59,60,61,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,107,108,130,132,133,135,138,153,159,167,172,175,176,177,179,191,196,222,],[22,22,31,55,22,-117,-122,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,-125,-126,-120,76,-113,-123,31,31,31,31,-99,-104,31,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,-107,-108,-102,-95,-105,31,144,76,76,-93,-92,-89,-88,76,31,144,144,-91,-90,-87,-86,31,144,144,31,]),'LBRACKET':([9,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,58,59,60,61,64,66,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,107,108,113,121,130,132,133,135,138,153,159,167,172,175,176,177,179,191,196,222,],[32,-117,-122,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,-125,-126,-120,77,-113,-123,32,32,32,32,118,118,-99,-104,32,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,-107,-108,-102,-95,-105,32,77,77,118,118,-93,-92,-89,-88,77,32,118,118,-91,-90,-87,-86,32,118,118,32,]),'COLON':([9,10,17,20,21,22,24,25,26,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,51,53,58,59,60,61,63,65,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,97,100,107,108,120,130,132,133,135,138,140,141,142,143,144,146,149,153,172,175,176,177,179,183,185,186,199,209,211,212,219,222,227,228,231,],[38,-70,59,64,66,-132,-133,-131,68,-117,-122,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,-125,-126,-120,84,-113,-123,-64,-69,38,38,38,38,113,121,-99,-104,38,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,-107,-108,-102,-95,-105,38,-56,-68,84,84,-134,-93,-92,-89,-88,84,-75,-71,-72,-74,-73,180,-55,38,-91,-90,-87,-86,38,-59,-54,-63,-59,-58,-61,-62,-58,38,-57,-60,-57,]),'COLONEQ':([10,17,18,19,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,53,76,77,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,97,100,108,130,132,133,135,140,141,142,143,144,145,147,172,175,176,177,180,181,186,204,205,211,212,221,228,237,240,242,],[-70,58,60,61,-117,-122,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,-125,-126,-120,78,-113,-123,95,-64,-69,-99,-104,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,-107,-108,-102,-95,-105,-50,-68,153,-93,-92,-89,-88,-75,-71,-72,-74,-73,179,-49,-91,-90,-87,-86,-67,-48,-63,-66,222,-61,-62,-65,-60,-53,-52,-51,]),'DATA':([0,1,4,14,16,29,67,70,71,99,101,102,103,104,106,110,112,127,151,152,154,155,171,178,188,190,213,],[2,2,-4,-3,-22,-23,-16,-17,2,-19,-20,-21,-18,2,-8,-12,-14,2,2,-7,-11,-13,-6,-15,-5,-10,-9,]),'STRING':([5,6,9,12,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,58,59,60,61,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,107,108,130,132,133,135,138,153,159,167,172,175,176,177,179,191,196,222,],[25,25,42,25,-117,-122,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,-125,-126,-120,88,-113,-123,42,42,42,42,-99,-104,42,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,-107,-108,-102,-95,-105,42,142,88,88,-93,-92,-89,-88,88,42,142,142,-91,-90,-87,-86,42,142,142,42,]),'SET':([0,1,4,14,16,29,67,70,71,96,99,101,102,103,104,106,110,112,127,151,152,154,155,159,167,171,178,188,190,191,196,213,],[3,3,-4,-3,-22,-23,-16,-17,3,140,-19,-20,-21,-18,3,-8,-12,-14,3,3,-7,-11,-13,140,140,-6,-15,-5,-10,140,140,-9,]),'WORD':([3,5,6,8,9,10,11,12,15,20,21,22,24,25,26,27,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,53,58,59,60,61,64,66,69,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,98,100,107,108,113,114,115,116,118,119,120,121,122,123,129,130,132,133,135,138,140,141,142,143,144,148,153,156,157,159,160,164,165,167,169,172,175,176,177,179,180,182,191,193,195,196,198,204,214,216,217,222,225,235,239,],[17,20,26,30,35,50,54,20,57,62,62,-132,-133,-131,-130,62,-117,73,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,75,-126,-120,81,-113,-123,62,35,35,35,35,114,122,125,-99,73,35,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,75,-108,-102,-95,-105,35,141,150,-68,81,81,156,-78,-79,160,162,160,-134,164,-78,160,173,-93,-92,-89,-88,81,-75,-71,-72,-74,-73,184,35,-78,160,141,-78,-78,160,141,200,-91,-90,-87,-86,35,204,207,141,160,-81,141,160,204,160,-80,160,35,234,238,241,]),'SEMICOLON':([2,7,20,21,22,23,24,25,26,27,28,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,48,49,52,53,54,55,56,58,60,61,63,65,69,76,77,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,100,105,107,109,111,114,115,117,119,120,122,124,125,126,130,132,133,135,137,138,139,140,141,142,143,144,149,153,156,158,160,161,163,164,166,168,170,172,175,176,177,185,186,189,192,197,199,203,211,212,215,218,219,228,229,230,231,232,],[16,29,-38,-30,-132,67,-133,-131,-130,-70,70,-117,-122,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,-125,-126,-120,-113,-123,99,-69,101,102,103,106,110,112,-37,-29,-28,-99,-104,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,-107,-108,-102,-95,-105,-68,152,-24,154,155,-78,-79,-44,-77,-134,-78,-36,-27,-56,-93,-92,-89,-88,178,-25,-47,-75,-71,-72,-74,-73,-55,190,-78,-43,-78,-42,-76,-78,-35,-34,-26,-91,-90,-87,-86,-54,-63,213,-41,-33,-59,-45,-61,-62,-40,-32,-58,-60,-39,-31,-57,-46,]),'TR':([9,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,58,59,60,61,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,107,108,130,132,133,135,138,153,172,175,176,177,179,222,],[37,-117,-122,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,-125,-126,-120,83,-113,-123,37,37,37,37,-99,-104,37,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,-107,-108,-102,-95,-105,37,83,83,-93,-92,-89,-88,83,37,-91,-90,-87,-86,37,37,]),'FILENAME':([5,6,12,64,68,],[24,24,24,120,120,]),'COMMA':([9,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,58,59,60,61,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,107,108,130,132,133,135,138,150,153,162,172,173,174,175,176,177,179,184,200,207,222,],[40,-117,-122,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,-125,-126,-120,86,-113,-123,40,40,40,40,129,129,129,129,-99,-104,40,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,-107,-108,-102,-95,-105,40,86,86,-93,-92,-89,-88,86,129,40,129,-91,129,129,-90,-87,-86,40,129,129,129,40,]),'WORDWITHSQUOTEDINDEX':([3,9,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,58,59,60,61,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,107,108,130,132,133,135,138,153,172,175,176,177,179,222,],[19,39,-117,-122,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,-125,-126,-120,85,-113,-123,39,39,39,39,-99,-104,39,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,-107,-108,-102,-95,-105,39,85,85,-93,-92,-89,-88,85,39,-91,-90,-87,-86,39,39,]),'IMPORT':([0,1,4,14,16,29,67,70,71,99,101,102,103,104,106,110,112,127,151,152,154,155,171,178,188,190,213,],[5,5,-4,-3,-22,-23,-16,-17,5,-19,-20,-21,-18,5,-8,-12,-14,5,5,-7,-11,-13,-6,-15,-5,-10,-9,]),'$end':([0,1,4,13,14,16,29,67,70,99,101,102,103,106,110,112,152,154,155,171,178,188,190,213,],[-2,-1,-4,0,-3,-22,-23,-16,-17,-19,-20,-21,-18,-8,-12,-14,-7,-11,-13,-6,-15,-5,-10,-9,]),'LOAD':([0,1,4,14,16,29,67,70,71,99,101,102,103,104,106,110,112,127,151,152,154,155,171,178,188,190,213,],[6,6,-4,-3,-22,-23,-16,-17,6,-19,-20,-21,-18,6,-8,-12,-14,6,6,-7,-11,-13,-6,-15,-5,-10,-9,]),'NONWORD':([9,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,58,59,60,61,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,107,108,130,132,133,135,138,153,172,175,176,177,179,222,],[41,-117,-122,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,-125,-126,-120,87,-113,-123,41,41,41,41,-99,-104,41,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,-107,-108,-102,-95,-105,41,87,87,-93,-92,-89,-88,87,41,-91,-90,-87,-86,41,41,]),'END':([0,1,4,14,16,29,67,70,71,99,101,102,103,104,106,110,112,127,151,152,154,155,171,178,188,190,213,],[7,7,-4,-3,-22,-23,-16,-17,7,-19,-20,-21,-18,7,-8,-12,-14,7,7,-7,-11,-13,-6,-15,-5,-10,-9,]),'RBRACE':([4,9,14,16,29,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,58,59,60,61,67,70,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,99,101,102,103,106,107,108,110,112,127,130,132,133,135,138,150,151,152,153,154,155,171,172,173,174,175,176,177,178,179,187,188,190,201,202,213,222,234,238,241,],[-4,34,-3,-22,-23,-117,-122,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,-125,-126,-120,80,-113,-123,34,34,34,34,-16,-17,-99,-104,34,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,-107,-108,-102,-95,-105,34,-19,-20,-21,-18,-8,80,80,-12,-14,171,-93,-92,-89,-88,80,186,188,-7,34,-11,-13,-6,-91,-84,-85,-90,-87,-86,-15,34,212,-5,-10,-82,-83,-9,34,237,240,242,]),'ASTERISK':([9,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,58,59,60,61,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,107,108,129,130,132,133,135,138,153,172,175,176,177,179,222,],[43,-117,72,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,74,-126,-120,89,-113,-123,43,43,43,43,-99,72,43,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,74,-108,-102,-95,-105,43,89,89,174,-93,-92,-89,-88,89,43,-91,-90,-87,-86,43,43,]),'NAMESPACE':([0,1,4,14,16,29,67,70,71,99,101,102,103,104,106,110,112,127,151,152,154,155,171,178,188,190,213,],[8,15,-4,-3,-22,-23,-16,-17,8,-19,-20,-21,-18,8,-8,-12,-14,15,15,-7,-11,-13,-6,-15,-5,-10,-9,]),'PARAM':([0,1,4,14,16,29,67,70,71,96,99,101,102,103,104,106,110,112,127,151,152,154,155,159,167,171,178,188,190,191,196,213,],[9,9,-4,-3,-22,-23,-16,-17,9,143,-19,-20,-21,-18,9,-8,-12,-14,9,9,-7,-11,-13,143,143,-6,-15,-5,-10,143,143,-9,]),'LPAREN':([9,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,58,59,60,61,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,107,108,130,132,133,135,138,153,172,175,176,177,179,222,],[44,-117,-122,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,-125,-126,-120,90,-113,-123,44,44,44,44,-99,-104,44,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,-107,-108,-102,-95,-105,44,90,90,-93,-92,-89,-88,90,44,-91,-90,-87,-86,44,44,]),'TABLE':([0,1,4,14,16,29,67,70,71,99,101,102,103,104,106,110,112,127,151,152,154,155,171,178,188,190,213,],[10,10,-4,-3,-22,-23,-16,-17,10,-19,-20,-21,-18,10,-8,-12,-14,10,10,-7,-11,-13,-6,-15,-5,-10,-9,]),'RPAREN':([9,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,58,59,60,61,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,107,108,130,132,133,134,135,136,138,148,153,169,172,173,174,175,176,177,179,182,184,200,201,202,207,210,220,222,224,],[45,-117,-122,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,-125,-126,-120,91,-113,-123,45,45,45,45,133,135,-99,-104,45,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,-107,-108,-102,-95,-105,45,91,91,-93,-92,-89,176,-88,177,91,183,45,199,-91,-84,-85,-90,-87,-86,45,206,209,219,-82,-83,223,227,231,45,233,]),'INCLUDE':([0,1,4,14,16,29,67,70,71,99,101,102,103,104,106,110,112,127,151,152,154,155,171,178,188,190,213,],[11,11,-4,-3,-22,-23,-16,-17,11,-19,-20,-21,-18,11,-8,-12,-14,11,11,-7,-11,-13,-6,-15,-5,-10,-9,]),'EQ':([50,62,114,122,156,160,164,183,206,209,223,227,233,],[96,96,159,167,191,96,196,208,208,226,226,236,236,]),'STORE':([0,1,4,14,16,29,67,70,71,99,101,102,103,104,106,110,112,127,151,152,154,155,171,178,188,190,213,],[12,12,-4,-3,-22,-23,-16,-17,12,-19,-20,-21,-18,12,-8,-12,-14,12,12,-7,-11,-13,-6,-15,-5,-10,-9,]),'LBRACE':([9,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,57,58,59,60,61,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,107,108,130,132,133,135,138,153,172,175,176,177,179,208,222,226,236,],[46,71,-117,-122,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,-125,-126,-120,92,-113,-123,104,46,46,46,46,-99,-104,46,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,-107,-108,-102,-95,-105,46,92,92,-93,-92,-89,-88,92,46,-91,-90,-87,-86,46,225,46,235,239,]),'WORDWITHLPAREN':([10,22,24,25,26,27,51,53,69,97,100,120,126,140,141,142,143,144,147,149,183,186,199,209,211,212,219,227,228,231,237,240,242,],[-70,-132,-133,-131,-130,-70,-64,-69,-64,148,-68,-134,169,-75,-71,-72,-74,-73,182,169,-59,-63,-59,-58,-61,-62,-58,-57,-60,-57,-53,-52,-51,]),'WORDWITHINDEX':([3,9,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,58,59,60,61,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,107,108,130,132,133,135,138,153,172,175,176,177,179,222,],[18,48,-117,-122,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,-125,-126,-120,93,-113,-123,48,48,48,48,-99,-104,48,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,-107,-108,-102,-95,-105,48,93,93,-93,-92,-89,-88,93,48,-91,-90,-87,-86,48,48,]),'WORDWITHEQBRACE':([10,22,24,25,26,27,51,53,69,100,120,140,141,142,143,144,186,212,],[-70,-132,-133,-131,-130,-70,98,-69,98,-68,-134,-75,-71,-72,-74,-73,98,98,]),'RBRACKET':([9,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,58,59,60,61,72,73,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,107,108,128,130,131,132,133,135,138,153,162,172,173,174,175,176,177,179,194,201,202,222,],[49,-117,-122,-128,-121,-112,-129,-124,-119,-114,-118,-115,-116,-127,-125,-126,-120,94,-113,-123,49,49,49,49,130,132,-99,-104,49,-110,-103,-94,-111,-106,-101,-96,-100,-97,-98,-109,-107,-108,-102,-95,-105,49,94,94,172,-93,175,-92,-89,-88,94,49,195,-91,-84,-85,-90,-87,-86,49,216,-82,-83,49,]),}
_lr_action = { }
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = { }
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'setdecl':([58,60,61,153,],[105,109,111,189,]),'statements':([0,71,104,],[1,127,151,]),'table_indices':([51,69,186,212,],[97,126,211,228,]),'labeled_table_values':([97,126,149,],[146,170,185,]),'tabledecl':([10,],[52,]),'paramdecl':([78,95,179,222,],[137,139,203,232,]),'set_template':([9,47,58,59,60,61,78,95,107,108,138,153,179,222,],[33,79,33,33,33,33,33,33,79,79,79,33,33,33,]),'table_labels':([180,204,],[205,221,]),'labeled_table_value':([97,126,149,],[149,149,149,]),'param_template':([9,47,58,59,60,61,78,95,107,108,138,153,179,222,],[36,82,36,36,36,36,36,36,82,82,82,36,36,36,]),'filename':([5,6,12,],[21,27,21,]),'unlabeled_table_value':([97,147,],[147,147,]),'statement':([0,1,71,104,127,151,],[4,14,4,4,14,14,]),'option':([10,20,21,27,53,64,66,113,116,119,121,123,157,165,193,198,214,217,],[53,53,53,53,53,115,115,115,115,115,115,115,115,115,115,115,115,115,]),'unlabeled_table_values':([97,147,],[145,181,]),'index_list':([72,73,74,75,150,162,173,174,184,200,207,],[128,131,134,136,187,194,201,202,210,220,224,]),'variable':([64,66,113,116,119,121,123,157,165,193,198,214,217,],[119,119,119,119,119,119,119,119,119,119,119,119,119,]),'importdecl':([5,12,],[23,56,]),'import_options':([10,20,21,27,53,],[51,63,65,69,100,]),'expr':([0,],[13,]),'bracket_indices':([64,66,113,121,159,167,191,196,],[116,123,157,165,193,198,214,217,]),'variable_options':([64,66,113,116,119,121,123,157,165,193,198,214,217,],[117,124,158,161,163,166,168,192,197,215,218,229,230,]),'items':([9,58,59,60,61,78,95,153,179,222,],[47,107,108,107,107,138,138,107,138,138,]),'loaddecl':([6,],[28,]),}
_lr_goto = { }
for _k, _v in _lr_goto_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_goto: _lr_goto[_x] = { }
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> expr","S'",1,None,None,None),
('expr -> statements','expr',1,'p_expr','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',175),
('expr -> <empty>','expr',0,'p_expr','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',176),
('statements -> statements statement','statements',2,'p_statements','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',190),
('statements -> statement','statements',1,'p_statements','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',191),
('statements -> statements NAMESPACE WORD LBRACE statements RBRACE','statements',6,'p_statements','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',192),
('statements -> NAMESPACE WORD LBRACE statements RBRACE','statements',5,'p_statements','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',193),
('statement -> SET WORD COLONEQ setdecl SEMICOLON','statement',5,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',215),
('statement -> SET WORD COLONEQ SEMICOLON','statement',4,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',216),
('statement -> SET WORD COLON items COLONEQ setdecl SEMICOLON','statement',7,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',217),
('statement -> SET WORD COLON items COLONEQ SEMICOLON','statement',6,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',218),
('statement -> SET WORDWITHINDEX COLONEQ setdecl SEMICOLON','statement',5,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',219),
('statement -> SET WORDWITHINDEX COLONEQ SEMICOLON','statement',4,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',220),
('statement -> SET WORDWITHSQUOTEDINDEX COLONEQ setdecl SEMICOLON','statement',5,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',221),
('statement -> SET WORDWITHSQUOTEDINDEX COLONEQ SEMICOLON','statement',4,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',222),
('statement -> PARAM items COLONEQ paramdecl SEMICOLON','statement',5,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',223),
('statement -> IMPORT importdecl SEMICOLON','statement',3,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',224),
('statement -> LOAD loaddecl SEMICOLON','statement',3,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',225),
('statement -> STORE importdecl SEMICOLON','statement',3,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',226),
('statement -> TABLE tabledecl SEMICOLON','statement',3,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',227),
('statement -> INCLUDE WORD SEMICOLON','statement',3,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',228),
('statement -> INCLUDE QUOTEDSTRING SEMICOLON','statement',3,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',229),
('statement -> DATA SEMICOLON','statement',2,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',230),
('statement -> END SEMICOLON','statement',2,'p_statement','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',231),
('setdecl -> items','setdecl',1,'p_setdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',250),
('paramdecl -> items','paramdecl',1,'p_paramdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',254),
('loaddecl -> filename import_options table_indices labeled_table_values','loaddecl',4,'p_loaddecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',258),
('loaddecl -> filename import_options WORD','loaddecl',3,'p_loaddecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',259),
('loaddecl -> filename import_options','loaddecl',2,'p_loaddecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',260),
('importdecl -> filename import_options','importdecl',2,'p_importdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',274),
('importdecl -> filename','importdecl',1,'p_importdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',275),
('importdecl -> filename import_options COLON WORD EQ bracket_indices variable_options','importdecl',7,'p_importdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',276),
('importdecl -> filename COLON WORD EQ bracket_indices variable_options','importdecl',6,'p_importdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',277),
('importdecl -> filename import_options COLON bracket_indices variable_options','importdecl',5,'p_importdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',278),
('importdecl -> filename COLON bracket_indices variable_options','importdecl',4,'p_importdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',279),
('importdecl -> filename import_options COLON variable_options','importdecl',4,'p_importdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',280),
('importdecl -> filename COLON variable_options','importdecl',3,'p_importdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',281),
('importdecl -> WORD import_options','importdecl',2,'p_importdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',282),
('importdecl -> WORD','importdecl',1,'p_importdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',283),
('importdecl -> WORD import_options COLON WORD EQ bracket_indices variable_options','importdecl',7,'p_importdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',284),
('importdecl -> WORD COLON WORD EQ bracket_indices variable_options','importdecl',6,'p_importdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',285),
('importdecl -> WORD import_options COLON bracket_indices variable_options','importdecl',5,'p_importdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',286),
('importdecl -> WORD COLON bracket_indices variable_options','importdecl',4,'p_importdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',287),
('importdecl -> WORD import_options COLON variable_options','importdecl',4,'p_importdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',288),
('importdecl -> WORD COLON variable_options','importdecl',3,'p_importdecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',289),
('tabledecl -> import_options table_indices unlabeled_table_values COLONEQ paramdecl','tabledecl',5,'p_tabledecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',317),
('tabledecl -> import_options table_indices labeled_table_values COLON table_labels COLONEQ paramdecl','tabledecl',7,'p_tabledecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',318),
('tabledecl -> WORD COLONEQ paramdecl','tabledecl',3,'p_tabledecl','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',319),
('unlabeled_table_values -> unlabeled_table_value unlabeled_table_values','unlabeled_table_values',2,'p_unlabeled_table_values','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',342),
('unlabeled_table_values -> unlabeled_table_value','unlabeled_table_values',1,'p_unlabeled_table_values','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',343),
('unlabeled_table_values -> <empty>','unlabeled_table_values',0,'p_unlabeled_table_values','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',344),
('unlabeled_table_value -> WORDWITHLPAREN WORD index_list RPAREN EQ LBRACE WORD RBRACE','unlabeled_table_value',8,'p_unlabeled_table_value','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',355),
('unlabeled_table_value -> WORDWITHLPAREN WORD RPAREN EQ LBRACE WORD RBRACE','unlabeled_table_value',7,'p_unlabeled_table_value','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',356),
('unlabeled_table_value -> WORDWITHLPAREN RPAREN EQ LBRACE WORD RBRACE','unlabeled_table_value',6,'p_unlabeled_table_value','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',357),
('labeled_table_values -> labeled_table_value labeled_table_values','labeled_table_values',2,'p_labeled_table_values','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',367),
('labeled_table_values -> labeled_table_value','labeled_table_values',1,'p_labeled_table_values','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',368),
('labeled_table_values -> <empty>','labeled_table_values',0,'p_labeled_table_values','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',369),
('labeled_table_value -> WORDWITHLPAREN WORD index_list RPAREN','labeled_table_value',4,'p_labeled_table_value','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',380),
('labeled_table_value -> WORDWITHLPAREN WORD RPAREN','labeled_table_value',3,'p_labeled_table_value','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',381),
('labeled_table_value -> WORDWITHLPAREN RPAREN','labeled_table_value',2,'p_labeled_table_value','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',382),
('table_indices -> WORDWITHEQBRACE WORD index_list RBRACE table_indices','table_indices',5,'p_table_indices','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',392),
('table_indices -> WORDWITHEQBRACE WORD RBRACE table_indices','table_indices',4,'p_table_indices','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',393),
('table_indices -> WORDWITHEQBRACE WORD index_list RBRACE','table_indices',4,'p_table_indices','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',394),
('table_indices -> WORDWITHEQBRACE WORD RBRACE','table_indices',3,'p_table_indices','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',395),
('table_indices -> <empty>','table_indices',0,'p_table_indices','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',396),
('table_labels -> WORD table_labels','table_labels',2,'p_table_labels','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',413),
('table_labels -> WORD','table_labels',1,'p_table_labels','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',414),
('table_labels -> <empty>','table_labels',0,'p_table_labels','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',415),
('import_options -> option import_options','import_options',2,'p_import_options','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',426),
('import_options -> option','import_options',1,'p_import_options','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',427),
('import_options -> <empty>','import_options',0,'p_import_options','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',428),
('option -> WORD EQ WORD','option',3,'p_option','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',439),
('option -> WORD EQ STRING','option',3,'p_option','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',440),
('option -> WORD EQ QUOTEDSTRING','option',3,'p_option','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',441),
('option -> WORD EQ PARAM','option',3,'p_option','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',442),
('option -> WORD EQ SET','option',3,'p_option','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',443),
('variable_options -> variable variable_options','variable_options',2,'p_variable_options','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',448),
('variable_options -> variable','variable_options',1,'p_variable_options','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',449),
('variable -> WORD','variable',1,'p_variable','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',458),
('variable -> option','variable',1,'p_variable','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',459),
('bracket_indices -> LBRACKET WORD index_list RBRACKET','bracket_indices',4,'p_bracket_indices','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',467),
('bracket_indices -> LBRACKET WORD RBRACKET','bracket_indices',3,'p_bracket_indices','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',468),
('index_list -> COMMA WORD index_list','index_list',3,'p_index_list','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',477),
('index_list -> COMMA ASTERISK index_list','index_list',3,'p_index_list','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',478),
('index_list -> COMMA WORD','index_list',2,'p_index_list','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',479),
('index_list -> COMMA ASTERISK','index_list',2,'p_index_list','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',480),
('set_template -> LPAREN WORD index_list RPAREN','set_template',4,'p_set_template','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',489),
('set_template -> LPAREN ASTERISK index_list RPAREN','set_template',4,'p_set_template','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',490),
('set_template -> LPAREN WORD RPAREN','set_template',3,'p_set_template','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',491),
('set_template -> LPAREN ASTERISK RPAREN','set_template',3,'p_set_template','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',492),
('param_template -> LBRACKET WORD index_list RBRACKET','param_template',4,'p_param_template','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',500),
('param_template -> LBRACKET ASTERISK index_list RBRACKET','param_template',4,'p_param_template','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',501),
('param_template -> LBRACKET WORD RBRACKET','param_template',3,'p_param_template','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',502),
('param_template -> LBRACKET ASTERISK RBRACKET','param_template',3,'p_param_template','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',503),
('items -> items WORD','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',514),
('items -> items WORDWITHINDEX','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',515),
('items -> items WORDWITHSQUOTEDINDEX','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',516),
('items -> items NONWORD','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',517),
('items -> items STRING','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',518),
('items -> items QUOTEDSTRING','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',519),
('items -> items COMMA','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',520),
('items -> items COLON','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',521),
('items -> items LBRACE','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',522),
('items -> items RBRACE','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',523),
('items -> items LBRACKET','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',524),
('items -> items RBRACKET','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',525),
('items -> items TR','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',526),
('items -> items LPAREN','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',527),
('items -> items RPAREN','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',528),
('items -> items ASTERISK','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',529),
('items -> items set_template','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',530),
('items -> items param_template','items',2,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',531),
('items -> WORD','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',532),
('items -> WORDWITHINDEX','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',533),
('items -> WORDWITHSQUOTEDINDEX','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',534),
('items -> NONWORD','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',535),
('items -> STRING','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',536),
('items -> QUOTEDSTRING','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',537),
('items -> COMMA','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',538),
('items -> COLON','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',539),
('items -> LBRACE','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',540),
('items -> RBRACE','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',541),
('items -> LBRACKET','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',542),
('items -> RBRACKET','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',543),
('items -> TR','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',544),
('items -> LPAREN','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',545),
('items -> RPAREN','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',546),
('items -> ASTERISK','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',547),
('items -> set_template','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',548),
('items -> param_template','items',1,'p_items','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',549),
('filename -> WORD','filename',1,'p_filename','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',571),
('filename -> STRING','filename',1,'p_filename','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',572),
('filename -> QUOTEDSTRING','filename',1,'p_filename','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',573),
('filename -> FILENAME','filename',1,'p_filename','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',574),
('filename -> WORD COLON FILENAME','filename',3,'p_filename','/Users/clarissasweet/Library/Enthought/Canopy_32bit/User/lib/python2.7/site-packages/coopr/pyomo/data/parse_datacmds.py',575),
]
| [
"[email protected]"
] | |
89461bcdad9cff6bebe4cc3fd352934d287418fb | 81c07a76c5013583280c6279d656be7638f17792 | /apps/echo_server/test.py | d1358c8c1c2e47d310e4590b0149f0161d782a11 | [
"MIT"
] | permissive | AjayBrahmakshatriya/trusted_browser | 41c24a4a765cc42250044371416a4c404381f1c8 | 256bb2373e3abe7032faaba84706219e22d89d53 | refs/heads/master | 2021-07-17T23:28:09.005244 | 2020-06-19T15:16:20 | 2020-06-19T15:16:20 | 181,587,960 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 612 | py | from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.chrome.options import Options
import time
import sys
def run_test():
print "Running echo server test..."
chrome_options = Options()
chrome_options.add_argument("--headless")
domain = "localhost"
if len(sys.argv) > 1:
domain = sys.argv[1]
driver = webdriver.Chrome(chrome_options = chrome_options)
driver.get("http://"+domain+":8000/echo_server/")
time.sleep(20)
log = driver.find_element_by_id("log")
print log.text
print "Test finished"
driver.quit()
if __name__ == "__main__":
run_test()
| [
"[email protected]"
] | |
ff1c5517b56920922b2e61460fdb4203c04ac9a1 | 33febf8b617ef66d7086765f1c0bf6523667a959 | /probpy/inference/conjugate/bernoulli.py | 9ef2ab5b1170d6b7d6b6e3e7c07efbca855f18ab | [] | no_license | JonasRSV/probpy | 857201c7f122461463b75d63e5c688e011615292 | 5203063db612b2b2bc0434a7f2a02c9d2e27ed6a | refs/heads/master | 2022-07-07T06:17:44.504570 | 2020-04-15T14:52:20 | 2020-04-15T14:52:20 | 245,820,195 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 919 | py | from probpy.core import RandomVariable
from typing import Tuple
from probpy.distributions import beta, bernoulli
from probpy.inference.conjugate.identification import _check_no_none_parameters, _check_only_none_is
class BernoulliBeta_PPrior:
"""predictive conjugate for bernoulli likelihood with beta parameter prior"""
@staticmethod
def is_conjugate(likelihood: RandomVariable, priors: Tuple[RandomVariable]):
if priors[0].cls is beta \
and _check_no_none_parameters(priors[0]) \
and _check_only_none_is(likelihood, [bernoulli.probability]):
return True
return False
@staticmethod
def posterior(_: RandomVariable, priors: Tuple[RandomVariable]) -> RandomVariable:
prior = priors[0]
a = prior.parameters[beta.a].value
b = prior.parameters[beta.b].value
return bernoulli.med(probability=(a / (a + b)))
| [
"[email protected]"
] | |
524e8f75d190e053c9c468f5cf67f92f1260aa13 | 94ee1e5383201c5911a6c76095df19541dd13366 | /ChorusGUI/run_probMW.py | c1bd0e977879d1ea98a64966bc188a6b3d51f01f | [
"MIT"
] | permissive | zhangtaolab/Chorus2 | f74ee55104683f9492454a127e29bc486cdf7191 | 8d695578e9f302daea99a63b5be0990650ad3479 | refs/heads/master | 2023-07-12T03:09:59.158606 | 2023-07-03T03:01:29 | 2023-07-03T03:01:29 | 173,018,157 | 13 | 12 | MIT | 2020-07-26T13:11:23 | 2019-02-28T01:25:50 | Python | UTF-8 | Python | false | false | 12,813 | py | import numpy as np
from PyQt5 import QtWidgets, QtGui, QtCore
import sys
# from ChorusGUI.probMW import Ui_MainWindow
# from ChorusGUI.Probes import Probes
from probMW import Ui_MainWindow
from Probes import Probes
from matplotlib.widgets import SpanSelector
import pandas as pd
import os
class DesMainWD(QtWidgets.QMainWindow, Ui_MainWindow):
def __init__(self, parent=None):
super(DesMainWD, self).__init__(parent)
self.nowchr = 'Chromosome'
self.setupUi(self)
self.dockWidget_OV.setVisible(False)
self.actionLoad_probe.triggered.connect(self.select_bedfile)
self.comboBox_selectchr.activated[str].connect(self.onActionvated)
self.pushButton_loadchr.clicked.connect(self.draw_graph)
self.horizontalSlider_start.valueChanged['int'].connect(self.update_graph)
self.horizontalSlider_end.valueChanged['int'].connect(self.update_graph)
self.selectedregionlength = 0
self.probedir = ''
# self.spinBox_start.valueChanged['int'].connect(self.update_graph)
# self.spinBox_end.valueChanged['int'].connect(self.update_graph)
self.pushButton_addpb.clicked.connect(self.add_probes)
self.pushButton_delete.clicked.connect(self.del_probes)
self.pushButton_show.clicked.connect(self.draw_overview)
self.pushButton_projectdir.clicked.connect(self.setProjetDir)
self.pushButton_probed.clicked.connect(self.select_bedfile)
self.pushButton_progenome.clicked.connect(self.setGenomefile)
self.pushButton_proprobedir.clicked.connect(self.setProbeDir)
self.pushButton_saveprobe.clicked.connect(self.saveProbe)
self.sortedperkbcount = object()
def select_bedfile(self):
file, _ = QtWidgets.QFileDialog.getOpenFileName()
print(file)
if file:
# self.lineEdit.setText(str(file[0]))
chrlist = self.get_chr(file)
self.comboBox_selectchr.addItems(chrlist)
self.comboBox_selectchr.setCurrentIndex(0)
self.nowchr = chrlist[0]
#str(file[0])
self.label_filename.setText(file)
self.probeset = self.probe.probe
self.max_range = int(self.probe.maxlength/1000) + 1
self.spinBox_end.setMaximum(self.max_range)
self.horizontalSlider_end.setMaximum(self.max_range)
self.spinBox_start.setMaximum(self.max_range)
self.horizontalSlider_start.setMaximum(self.max_range)
self.label_bedfile.setText(file)
# self.label.setText(self.nowchr)
def get_chr(self, filename):
self.probe = Probes(filename)
chrlist = self.probe.chrs
return chrlist
def onActionvated(self, text):
self.statusbar.showMessage(text)
self.nowchr = text
def draw_graph(self):
self.nowprobe = self.probeset[self.probeset[0] == self.nowchr]
self.sortedprobe = self.nowprobe.sort(columns=1)
self.perkbprobe = self.sortedprobe[3].value_counts(sort=False)
self.sortedperkbcount = pd.DataFrame(self.perkbprobe).sort_index()
self.sortedperkbcount = self.sortedperkbcount.reindex(index=range(0, self.max_range), fill_value=0)
#
self.spinBox_end.setMaximum(self.probe.chrlens[self.nowchr])
self.horizontalSlider_end.setMaximum(self.probe.chrlens[self.nowchr])
self.spinBox_start.setMaximum(self.probe.chrlens[self.nowchr])
self.horizontalSlider_start.setMaximum(self.probe.chrlens[self.nowchr])
self.spinBox_start.setValue(0)
self.spinBox_end.setValue(self.probe.chrlens[self.nowchr])
self.horizontalSlider_start.setValue(0)
self.horizontalSlider_end.setValue(self.probe.chrlens[self.nowchr])
self.widget.canvas.ax1.clear()
self.widget.canvas.ax2.clear()
self.widget.canvas.ax1.plot(pd.rolling_mean(self.sortedperkbcount.Kb,100))
self.widget.canvas.ax1.set_xlim(0, self.probe.chrlens[self.nowchr])
self.widget.canvas.ax1.set_title(self.nowchr)
self.widget.canvas.line2, = self.widget.canvas.ax2.plot(self.sortedperkbcount.Kb)
# self.widget.canvas.ax2.plot(self.sortedperkbcount.Kb)
self.widget.canvas.ax2.set_xlim(0, self.probe.chrlens[self.nowchr])
self.widget.canvas.draw()
def update_graph(self):
self.widget.canvas.ax2.clear()
self.widget.canvas.ax2.plot(self.sortedperkbcount.Kb)
self.widget.canvas.ax2.set_xlim(self.spinBox_start.value(), self.spinBox_end.value())
self.widget.canvas.ax1.clear()
self.widget.canvas.ax1.set_title(self.nowchr)
self.widget.canvas.ax1.plot(pd.rolling_mean(self.sortedperkbcount.Kb,100))
self.widget.canvas.ax1.axvspan(self.spinBox_start.value(), self.spinBox_end.value(), facecolor=self.comboBox_color.currentText(), alpha=0.5)
self.widget.canvas.ax1.set_xlim(0, self.probe.chrlens[self.nowchr])
self.subplotprob = self.nowprobe[self.nowprobe[3] > self.spinBox_start.value()]
self.subplotprob = self.subplotprob[self.subplotprob[3] < self.spinBox_end.value()]
self.subplottotalprobe = len(self.subplotprob.index)
self.horizontalSlider_start.setMaximum(self.spinBox_end.value()-1)
self.horizontalSlider_end.setMinimum(self.spinBox_start.value()+1)
self.label_totalpb.setText(str(self.subplottotalprobe))
self.spinBox_pbnumber.setMaximum(self.subplottotalprobe)
self.spinBox_pbnumber.setValue(self.subplottotalprobe)
regionlength = self.horizontalSlider_end.value() - self.horizontalSlider_start.value() + 1
self.selectedregionlength = regionlength
mes = "Region Length: "+str(regionlength)+'kb'
self.statusbar.showMessage(mes)
self.widget.canvas.draw()
def oneselect(self, xmins, xmaxs):
xmins = int(xmins)
xmaxs = int(xmaxs)
self.widget.canvas.ax2.clear()
self.widget.canvas.ax2.plot(self.sortedperkbcount.Kb)
self.widget.canvas.ax2.set_xlim(xmins, xmaxs)
self.spinBox_start.setValue(xmins)
self.spinBox_end.setValue(xmaxs)
self.subplotprob = self.nowprobe[self.nowprobe[3] < xmaxs]
self.subplotprob = self.subplotprob[self.subplotprob[3] > xmins]
self.spinBox_start.setValue(xmins)
self.spinBox_end.setValue(xmaxs)
self.subplottotalprobe = len(self.subplotprob.index)
self.label_totalpb.setText(str(self.subplottotalprobe))
self.horizontalSlider_start.setMaximum(self.spinBox_end.value()-1)
self.horizontalSlider_end.setMinimum(self.spinBox_start.value()+1)
self.spinBox_pbnumber.setMaximum(self.subplottotalprobe)
self.spinBox_pbnumber.setValue(self.subplottotalprobe)
# print(self.subplotprob)
self.widget.canvas.ax1.clear()
self.widget.canvas.ax1.set_title(self.nowchr)
self.widget.canvas.ax1.plot(pd.rolling_mean(self.sortedperkbcount.Kb,100))
self.widget.canvas.ax1.set_xlim(0, self.probe.chrlens[self.nowchr])
self.widget.canvas.ax1.axvspan(xmins, xmaxs, facecolor=self.comboBox_color.currentText(), alpha=0.5)
regionlength = self.horizontalSlider_end.value() - self.horizontalSlider_start.value() + 1
mes = "Region Length: "+str(regionlength)+'kb'
self.selectedregionlength = regionlength
self.statusbar.showMessage(mes)
self.widget.canvas.draw()
def add_probes(self):
rowcount = self.tableWidget.rowCount()
self.tableWidget.insertRow(rowcount)
#probe density per kb
pbd = round(self.spinBox_pbnumber.value()/self.selectedregionlength, 1)
itchr = QtWidgets.QTableWidgetItem(self.nowchr)
itstart = QtWidgets.QTableWidgetItem(self.spinBox_start.text())
itend = QtWidgets.QTableWidgetItem(self.spinBox_end.text())
itcolor = QtWidgets.QTableWidgetItem(self.comboBox_color.currentText())
ittp = QtWidgets.QTableWidgetItem(self.label_totalpb.text())
itsp = QtWidgets.QTableWidgetItem(self.spinBox_pbnumber.text())
itrgl = QtWidgets.QTableWidgetItem(str(self.selectedregionlength))
itpbd = QtWidgets.QTableWidgetItem(str(pbd))
qcolor = QtGui.QColor(0,0,0)
if self.comboBox_color.currentText() == 'green':
qcolor = QtGui.QColor(0, 255,0)
if self.comboBox_color.currentText() == 'red':
qcolor = QtGui.QColor(255, 0,0)
itcolor.setBackground(qcolor)
self.tableWidget.setItem(rowcount, 0, itchr)
self.tableWidget.setItem(rowcount, 1, itstart)
self.tableWidget.setItem(rowcount, 2, itend)
self.tableWidget.setItem(rowcount, 3, itcolor)
self.tableWidget.setItem(rowcount, 4, ittp)
self.tableWidget.setItem(rowcount, 5, itsp)
self.tableWidget.setItem(rowcount, 6, itrgl)
self.tableWidget.setItem(rowcount, 7, itpbd)
def del_probes(self):
nowItem = self.tableWidget.currentItem()
nowit = nowItem.row()
self.tableWidget.removeRow(nowit)
def draw_overview(self):
self.widget_OV.canvas.ax.clear()
self.widget_OV.canvas.ax.plot(pd.rolling_mean(self.sortedperkbcount.Kb,100))
rowcount = self.tableWidget.rowCount()
self.dockWidget_OV.setVisible(True)
self.widget_OV.canvas.ax.set_title(self.nowchr)
self.widget_OV.canvas.ax.set_xlim(0, self.probe.chrlens[self.nowchr])
print("nowchr", self.nowchr)
for i in range(rowcount):
itchr = self.tableWidget.item(i, 0).text()
if itchr == self.nowchr:
itstart = int(self.tableWidget.item(i,1).text())
itend = int(self.tableWidget.item(i,2).text())
itcolor = self.tableWidget.item(i,3).text()
print(itchr, itstart, itend, itcolor)
self.widget_OV.canvas.ax.axvspan(itstart, itend, facecolor=itcolor, alpha=0.95)
regionlength = self.horizontalSlider_end.value() - self.horizontalSlider_start.value() + 1
self.selectedregionlength = regionlength
mes = "Region Length: "+str(regionlength)+'kb'
self.statusbar.showMessage(mes)
self.widget_OV.canvas.draw()
def setProjetDir(self):
# options = QtWidgets.QFileDialog.DontResolveSymlinks | QtWidgets.QFileDialog.ShowDirsOnly
projectdir = QtWidgets.QFileDialog.getExistingDirectory()
if projectdir:
self.projectdir = projectdir
self.label_prodir.setText(self.projectdir)
def setGenomefile(self):
genomefile, _ = QtWidgets.QFileDialog.getOpenFileName()
if genomefile:
self.genomefile = genomefile
self.label_genomefile.setText(self.genomefile)
def setProbeDir(self):
probedir = QtWidgets.QFileDialog.getExistingDirectory(self, "Choose Probe Set Directory")
if probedir:
self.probedir = probedir
self.label_probedir.setText(self.probedir)
def saveProbe(self):
rowcount = self.tableWidget.rowCount()
if not self.probedir:
# self.setProbeDir()
probedir = QtWidgets.QFileDialog.getExistingDirectory(self, "Choose Probe Set Directory")
if probedir:
self.probedir = probedir
self.label_probedir.setText(self.probedir)
for i in range(rowcount):
itchr = self.tableWidget.item(i,0).text()
itstart = int(self.tableWidget.item(i,1).text())
itend = int(self.tableWidget.item(i,2).text())
itcolor = self.tableWidget.item(i,3).text()
#self.subplotprob = self.nowprobe[self.nowprobe[3] > self.spinBox_start.value()]
#self.subplotprob = self.subplotprob[self.subplotprob[3] < self.spinBox_end.value()]
nowprobes = self.probeset[self.probeset[0]==itchr]
nowprobes = nowprobes[nowprobes[3] > itstart]
nowprobes = nowprobes[nowprobes[3] < itend]
nowprobes = nowprobes.drop(3, 1)
# print(nowprobes)
outfilename = itcolor + '_' + itchr + '_' + str(itstart) + '_' + str(itend) + '.bed'
absfile = os.path.join(self.probedir, outfilename)
nowprobes.to_csv(path_or_buf=absfile, sep='\t', index = False, index_label= False, header=False)
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
tb = DesMainWD()
tb.show()
span = SpanSelector(tb.widget.canvas.ax1, tb.oneselect, 'horizontal', useblit=True,
rectprops=dict(alpha=0.3, facecolor='grey'))
sys.exit(app.exec_()) | [
"[email protected]"
] | |
c5f7a99036bd3fcc65d1ae352d6c445659ba366d | 1a7b47bb1ff483b236211aaa5cc648b9c7854853 | /Lab4/dataset/marmot_test/raw.py | 96fc2b54d1d0a07c059282ce25a8077877ea62b9 | [] | no_license | ruoyuryc/Deep-Learning-and-Practice-2020-Spring | 4929ed3aa83312029daf724d76554f8768a23e4c | 7ac9fc9163635aff8ae386c610396ae4646f8095 | refs/heads/master | 2022-10-20T04:45:37.648427 | 2020-06-15T15:24:25 | 2020-06-15T15:24:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 224,172 | py | import json
import re
raw = [('abandonned','abandoned'),('aberation','aberration'),('abilties','abilities'),('abilty','ability'),('abondon','abandon'),('abbout','about'),('abotu','about'),('abouta','about a'),('aboutit','about it'),('aboutthe','about the'),('abscence','absence'),('abondoned','abandoned'),('abondoning','abandoning'),('abondons','abandons'),('aborigene','aborigine'),('accesories','accessories'),('accidant','accident'),('abortificant','abortifacient'),('abreviate','abbreviate'),('abreviated','abbreviated'),('abreviation','abbreviation'),('abritrary','arbitrary'),('absail','abseil'),('absailing','abseiling'),('absense','absence'),('absolutly','absolutely'),('absorbsion','absorption'),('absorbtion','absorption'),('abudance','abundance'),('abundacies','abundances'),('abundancies','abundances'),('abundunt','abundant'),('abutts','abuts'),('acadamy','academy'),('acadmic','academic'),('accademic','academic'),('accademy','academy'),('acccused','accused'),('accelleration','acceleration'),('accension','accession'),('acceptence','acceptance'),('acceptible','acceptable'),('accessable','accessible'),('acident','accident'),('accidentaly','accidentally'),('accidently','accidentally'),('acclimitization','acclimatization'),('accomadate','accommodate'),('accomadated','accommodated'),('accomadates','accommodates'),('accomadating','accommodating'),('accomadation','accommodation'),('accomadations','accommodations'),('accomdate','accommodate'),('accomodate','accommodate'),('accomodated','accommodated'),('accomodates','accommodates'),('accomodating','accommodating'),('accomodation','accommodation'),('accomodations','accommodations'),('accompanyed','accompanied'),('accordeon','accordion'),('accordian','accordion'),('accoring','according'),('accoustic','acoustic'),('accquainted','acquainted'),('accrediation','accreditation'),('accredidation','accreditation'),('accross','across'),('accussed','accused'),('acedemic','academic'),('acheive','achieve'),('acheived','achieved'),('acheivement','achievement'),('acheivements','achievements'),('acheives','achieves'),('acheiving','achieving'),('acheivment','achievement'),('acheivments','achievements'),('achievment','achievement'),('achievments','achievements'),('achive','achieve'),('achived','achieved'),('achivement','achievement'),('achivements','achievements'),('acknowldeged','acknowledged'),('acknowledgeing','acknowledging'),('ackward','awkward'),('acommodate','accommodate'),('acomplish','accomplish'),('acomplished','accomplished'),('acomplishment','accomplishment'),('acomplishments','accomplishments'),('acording','according'),('acordingly','accordingly'),('acquaintence','acquaintance'),('acquaintences','acquaintances'),('acquiantence','acquaintance'),('acquiantences','acquaintances'),('acquited','acquitted'),('activites','activities'),('activly','actively'),('actualy','actually'),('acuracy','accuracy'),('acused','accused'),('acustom','accustom'),('acustommed','accustomed'),('adavanced','advanced'),('adbandon','abandon'),('addional','additional'),('addionally','additionally'),('additinally','additionally'),('additionaly','additionally'),('additonal','additional'),('additonally','additionally'),('addmission','admission'),('addopt','adopt'),('addopted','adopted'),('addoptive','adoptive'),('addres','address'),('addresable','addressable'),('addresed','addressed'),('addresing','addressing'),('addressess','addresses'),('addtion','addition'),('addtional','additional'),('adecuate','adequate'),('adequit','adequate'),('adhearing','adhering'),('adherance','adherence'),('admendment','amendment'),('admininistrative','administrative'),('adminstered','administered'),('adminstrate','administrate'),('adminstration','administration'),('adminstrative','administrative'),('adminstrator','administrator'),('admissability','admissibility'),('admissable','admissible'),('admited','admitted'),('admitedly','admittedly'),('adn','and'),('adolecent','adolescent'),('adquire','acquire'),('adquired','acquired'),('adquires','acquires'),('adquiring','acquiring'),('adres','address'),('adresable','addressable'),('adresing','addressing'),('adress','address'),('adressable','addressable'),('adressed','addressed'),('adressing','addressing'),('adventrous','adventurous'),('advertisment','advertisement'),('advertisments','advertisements'),('advesary','adversary'),('adviced','advised'),('aeriel','aerial'),('aeriels','aerials'),('afair','affair'),('afficianados','aficionados'),('afficionado','aficionado'),('afficionados','aficionados'),('affilate','affiliate'),('affilliate','affiliate'),('affort','afford'),('aforememtioned','aforementioned'),('againnst','against'),('agains','against'),('agaisnt','against'),('aganist','against'),('aggaravates','aggravates'),('aggreed','agreed'),('aggreement','agreement'),('aggregious','egregious'),('aggresive','aggressive'),('agian','again'),('agianst','against'),('agin','again'),('agina','again'),('aginst','against'),('agravate','aggravate'),('agre','agree'),('agred','agreed'),('agreeement','agreement'),('agreemnt','agreement'),('agregate','aggregate'),('agregates','aggregates'),('agreing','agreeing'),('agression','aggression'),('agressive','aggressive'),('agressively','aggressively'),('agressor','aggressor'),('agricultue','agriculture'),('agriculure','agriculture'),('agricuture','agriculture'),('agrieved','aggrieved'),('ahev','have'),('ahppen','happen'),('ahve','have'),('aicraft','aircraft'),('aiport','airport'),('airbourne','airborne'),('aircaft','aircraft'),('aircrafts','aircraft'),('aircrafts\'','aircraft\'s'),('airporta','airports'),('airrcraft','aircraft'),('aisian','asian'),('albiet','albeit'),('alchohol','alcohol'),('alchoholic','alcoholic'),('alchol','alcohol'),('alcholic','alcoholic'),('alcohal','alcohol'),('alcoholical','alcoholic'),('aledge','allege'),('aledged','alleged'),('aledges','alleges'),('alege','allege'),('aleged','alleged'),('alegience','allegiance'),('algebraical','algebraic'),('algorhitms','algorithms'),('algoritm','algorithm'),('algoritms','algorithms'),('alientating','alienating'),('alledge','allege'),('alledged','alleged'),('alledgedly','allegedly'),('alledges','alleges'),('allegedely','allegedly'),('allegedy','allegedly'),('allegely','allegedly'),('allegence','allegiance'),('allegience','allegiance'),('allign','align'),('alligned','aligned'),('alliviate','alleviate'),('allopone','allophone'),('allopones','allophones'),('allready','already'),('allthough','although'),('alltime','all-time'),('alltogether','altogether'),('almsot','almost'),('alochol','alcohol'),('alomst','almost'),('alot','a lot'),('alotted','allotted'),('alowed','allowed'),('alowing','allowing'),('alreayd','already'),('alse','else'),('alsot','also'),('alternitives','alternatives'),('altho','although'),('althought','although'),('altough','although'),('alusion','allusion'),('alwasy','always'),('alwyas','always'),('amalgomated','amalgamated'),('amatuer','amateur'),('amature','armature'),('amendmant','amendment'),('Amercia','America'),('amerliorate','ameliorate'),('amke','make'),('amking','making'),('ammend','amend'),('ammended','amended'),('ammendment','amendment'),('ammendments','amendments'),('ammount','amount'),('ammused','amused'),('amoung','among'),('amoungst','amongst'),('amung','among'),('amunition','ammunition'),('analagous','analogous'),('analitic','analytic'),('analogeous','analogous'),('anarchim','anarchism'),('anarchistm','anarchism'),('anbd','and'),('ancestory','ancestry'),('ancilliary','ancillary'),('andd','and'),('androgenous','androgynous'),('androgeny','androgyny'),('anihilation','annihilation'),('aniversary','anniversary'),('annoint','anoint'),('annointed','anointed'),('annointing','anointing'),('annoints','anoints'),('annouced','announced'),('annualy','annually'),('annuled','annulled'),('anohter','another'),('anomolies','anomalies'),('anomolous','anomalous'),('anomoly','anomaly'),('anonimity','anonymity'),('anounced','announced'),('anouncement','announcement'),('ansalisation','nasalisation'),('ansalization','nasalization'),('ansestors','ancestors'),('antartic','antarctic'),('anthromorphization','anthropomorphization'),('anthropolgist','anthropologist'),('anthropolgy','anthropology'),('anual','annual'),('anulled','annulled'),('anwsered','answered'),('anyhwere','anywhere'),('anyother','any other'),('anytying','anything'),('aparent','apparent'),('aparment','apartment'),('apenines','apennines'),('aplication','application'),('aplied','applied'),('apolegetics','apologetics'),('apon','upon'),('apparant','apparent'),('apparantly','apparently'),('appart','apart'),('appartment','apartment'),('appartments','apartments'),('appealling','appealing'),('appeareance','appearance'),('appearence','appearance'),('appearences','appearances'),('appenines','apennines'),('apperance','appearance'),('apperances','appearances'),('appereance','appearance'),('appereances','appearances'),('applicaiton','application'),('applicaitons','applications'),('appologies','apologies'),('appology','apology'),('apprearance','appearance'),('apprieciate','appreciate'),('approachs','approaches'),('appropiate','appropriate'),('appropraite','appropriate'),('appropropiate','appropriate'),('approproximate','approximate'),('approxamately','approximately'),('approxiately','approximately'),('approximitely','approximately'),('aprehensive','apprehensive'),('apropriate','appropriate'),('aproval','approval'),('aproximate','approximate'),('aproximately','approximately'),('aquaduct','aqueduct'),('aquaintance','acquaintance'),('aquainted','acquainted'),('aquiantance','acquaintance'),('aquire','acquire'),('aquired','acquired'),('aquiring','acquiring'),('aquisition','acquisition'),('aquitted','acquitted'),('aranged','arranged'),('arangement','arrangement'),('arbitarily','arbitrarily'),('arbitary','arbitrary'),('archaelogical','archaeological'),('archaelogists','archaeologists'),('archaelogy','archaeology'),('archaoelogy','archeology'),('archaology','archeology'),('archeaologist','archeologist'),('archeaologists','archeologists'),('archetect','architect'),('archetects','architects'),('archetectural','architectural'),('archetecturally','architecturally'),('archetecture','architecture'),('archiac','archaic'),('archictect','architect'),('archimedian','archimedean'),('architecht','architect'),('architechturally','architecturally'),('architechture','architecture'),('architechtures','architectures'),('architectual','architectural'),('archtype','archetype'),('archtypes','archetypes'),('aready','already'),('areodynamics','aerodynamics'),('argubly','arguably'),('arguement','argument'),('arguements','arguments'),('arised','arose'),('arival','arrival'),('armamant','armament'),('armistace','armistice'),('arogant','arrogant'),('arogent','arrogant'),('aroud','around'),('arrangment','arrangement'),('arrangments','arrangements'),('arrengement','arrangement'),('arrengements','arrangements'),('arround','around'),('artcile','article'),('artical','article'),('artice','article'),('articel','article'),('artifical','artificial'),('artifically','artificially'),('artillary','artillery'),('arund','around'),('asetic','ascetic'),('asfar','as far'),('asign','assign'),('aslo','also'),('asociated','associated'),('asorbed','absorbed'),('asphyxation','asphyxiation'),('assasin','assassin'),('assasinate','assassinate'),('assasinated','assassinated'),('assasinates','assassinates'),('assasination','assassination'),('assasinations','assassinations'),('assasined','assassinated'),('assasins','assassins'),('assassintation','assassination'),('assemple','assemble'),('assertation','assertion'),('asside','aside'),('assisnate','assassinate'),('assit','assist'),('assitant','assistant'),('assocation','association'),('assoicate','associate'),('assoicated','associated'),('assoicates','associates'),('assosication','assassination'),('asssassans','assassins'),('assualt','assault'),('assualted','assaulted'),('assymetric','asymmetric'),('assymetrical','asymmetrical'),('asteriod','asteroid'),('asthetic','aesthetic'),('asthetical','aesthetical'),('asthetically','aesthetically'),('asume','assume'),('aswell','as well'),('atain','attain'),('atempting','attempting'),('atheistical','atheistic'),('athenean','athenian'),('atheneans','athenians'),('athiesm','atheism'),('athiest','atheist'),('atorney','attorney'),('atribute','attribute'),('atributed','attributed'),('atributes','attributes'),('attaindre','attainder'),('attemp','attempt'),('attemped','attempted'),('attemt','attempt'),('attemted','attempted'),('attemting','attempting'),('attemts','attempts'),('attendence','attendance'),('attendent','attendant'),('attendents','attendants'),('attened','attended'),('attension','attention'),('attitide','attitude'),('attributred','attributed'),('attrocities','atrocities'),('audeince','audience'),('auromated','automated'),('austrailia','Australia'),('austrailian','Australian'),('auther','author'),('authobiographic','autobiographic'),('authobiography','autobiography'),('authorative','authoritative'),('authorites','authorities'),('authorithy','authority'),('authoritiers','authorities'),('authoritive','authoritative'),('authrorities','authorities'),('autochtonous','autochthonous'),('autoctonous','autochthonous'),('automaticly','automatically'),('automibile','automobile'),('automonomous','autonomous'),('autor','author'),('autority','authority'),('auxilary','auxiliary'),('auxillaries','auxiliaries'),('auxillary','auxiliary'),('auxilliaries','auxiliaries'),('auxilliary','auxiliary'),('availabe','available'),('availablity','availability'),('availaible','available'),('availble','available'),('availiable','available'),('availible','available'),('avalable','available'),('avalance','avalanche'),('avaliable','available'),('avation','aviation'),('avengence','a vengeance'),('averageed','averaged'),('avilable','available'),('awared','awarded'),('awya','away'),('baceause','because'),('backgorund','background'),('backrounds','backgrounds'),('bakc','back'),('banannas','bananas'),('bandwith','bandwidth'),('bankrupcy','bankruptcy'),('banruptcy','bankruptcy'),('baout','about'),('basicaly','basically'),('basicly','basically'),('bcak','back'),('beachead','beachhead'),('beacuse','because'),('beastiality','bestiality'),('beatiful','beautiful'),('beaurocracy','bureaucracy'),('beaurocratic','bureaucratic'),('beautyfull','beautiful'),('becamae','became'),('becames','becomes'),('becasue','because'),('beccause','because'),('becomeing','becoming'),('becomming','becoming'),('becouse','because'),('becuase','because'),('bedore','before'),('beeing','being'),('befoer','before'),('beggin','begin'),('begginer','beginner'),('begginers','beginners'),('beggining','beginning'),('begginings','beginnings'),('beggins','begins'),('begining','beginning'),('beginnig','beginning'),('behavour','behavior'),('beleagured','beleaguered'),('beleif','belief'),('beleive','believe'),('beleived','believed'),('beleives','believes'),('beleiving','believing'),('beligum','belgium'),('belive','believe'),('belived','believed'),('belives','believes'),('belligerant','belligerent'),('bellweather','bellwether'),('bemusemnt','bemusement'),('beneficary','beneficiary'),('beng','being'),('benificial','beneficial'),('benifit','benefit'),('benifits','benefits'),('bergamont','bergamot'),('Bernouilli','Bernoulli'),('beseige','besiege'),('beseiged','besieged'),('beseiging','besieging'),('beteen','between'),('betwen','between'),('beween','between'),('bewteen','between'),('bigining','beginning'),('biginning','beginning'),('bilateraly','bilaterally'),('billingualism','bilingualism'),('binominal','binomial'),('bizzare','bizarre'),('blaim','blame'),('blaimed','blamed'),('blessure','blessing'),('Blitzkreig','Blitzkrieg'),('boaut','bout'),('bodydbuilder','bodybuilder'),('bombardement','bombardment'),('bombarment','bombardment'),('bondary','boundary'),('Bonnano','Bonanno'),('boook','book'),('borke','broke'),('boundry','boundary'),('bouyancy','buoyancy'),('bouyant','buoyant'),('boyant','buoyant'),('Brasillian','Brazilian'),('breakthough','breakthrough'),('breakthroughts','breakthroughs'),('breif','brief'),('breifly','briefly'),('brethen','brethren'),('bretheren','brethren'),('briliant','brilliant'),('brillant','brilliant'),('brimestone','brimstone'),('Britian','Britain'),('Brittish','British'),('broacasted','broadcast'),('broadacasting','broadcasting'),('broady','broadly'),('Buddah','Buddha'),('Buddist','Buddhist'),('buisness','business'),('buisnessman','businessman'),('buoancy','buoyancy'),('buring','burying'),('burried','buried'),('busines','business'),('busineses','business'),('busness','business'),('bussiness','business'),('caculater','calculator'),('cacuses','caucuses'),('cahracters','characters'),('calaber','caliber'),('calander','calendar'),('calculater','calculator'),('calculs','calculus'),('calenders','calendars'),('caligraphy','calligraphy'),('caluclate','calculate'),('caluclated','calculated'),('caluculate','calculate'),('caluculated','calculated'),('calulate','calculate'),('calulated','calculated'),('calulater','calculator'),('Cambrige','Cambridge'),('camoflage','camouflage'),('campain','campaign'),('campains','campaigns'),('candadate','candidate'),('candiate','candidate'),('candidiate','candidate'),('cannister','canister'),('cannisters','canisters'),('cannnot','cannot'),('cannonical','canonical'),('cannotation','connotation'),('cannotations','connotations'),('cant','cannot'),('caost','coast'),('caperbility','capability'),('Capetown','Cape Town'),('capible','capable'),('captial','capital'),('captued','captured'),('capturd','captured'),('carachter','character'),('caracterized','characterized'),('carcas','carcass'),('carefull','careful'),('careing','caring'),('carismatic','charismatic'),('Carmalite','Carmelite'),('Carnagie','Carnegie'),('Carnagie-Mellon','Carnegie-Mellon'),('carnege','carnage'),('carnige','carnage'),('Carnigie','Carnegie'),('Carnigie-Mellon','Carnegie-Mellon'),('carniverous','carnivorous'),('carreer','career'),('carrers','careers'),('Carribbean','Caribbean'),('Carribean','Caribbean'),('cartdridge','cartridge'),('Carthagian','Carthaginian'),('carthographer','cartographer'),('cartilege','cartilage'),('cartilidge','cartilage'),('cartrige','cartridge'),('casette','cassette'),('casion','caisson'),('cassawory','cassowary'),('cassowarry','cassowary'),('casue','cause'),('casued','caused'),('casues','causes'),('casuing','causing'),('casulaties','casualties'),('casulaty','casualty'),('catagories','categories'),('catagorized','categorized'),('catagory','category'),('Cataline','Catiline'),('catapillar','caterpillar'),('catapillars','caterpillars'),('catapiller','caterpillar'),('catapillers','caterpillars'),('catepillar','caterpillar'),('catepillars','caterpillars'),('catergorize','categorize'),('catergorized','categorized'),('caterpilar','caterpillar'),('caterpilars','caterpillars'),('caterpiller','caterpillar'),('caterpillers','caterpillars'),('cathlic','catholic'),('catholocism','catholicism'),('catterpilar','caterpillar'),('catterpilars','caterpillars'),('catterpillar','caterpillar'),('catterpillars','caterpillars'),('cattleship','battleship'),('causalities','casualties'),('Ceasar','Caesar'),('Celcius','Celsius'),('cellpading','cellpadding'),('cementary','cemetery'),('cemetarey','cemetery'),('cemetaries','cemeteries'),('cemetary','cemetery'),('cencus','census'),('censur','censor'),('cententenial','centennial'),('centruies','centuries'),('centruy','century'),('centuties','centuries'),('centuty','century'),('ceratin','certain'),('cerimonial','ceremonial'),('cerimonies','ceremonies'),('cerimonious','ceremonious'),('cerimony','ceremony'),('ceromony','ceremony'),('certainity','certainty'),('certian','certain'),('cervial','cervical'),('chalenging','challenging'),('challange','challenge'),('challanged','challenged'),('challege','challenge'),('Champange','Champagne'),('changable','changeable'),('charachter','character'),('charachters','characters'),('charactersistic','characteristic'),('charactor','character'),('charactors','characters'),('charasmatic','charismatic'),('charaterized','characterized'),('chariman','chairman'),('charistics','characteristics'),('chasr','chaser'),('cheif','chief'),('cheifs','chiefs'),('chemcial','chemical'),('chemcially','chemically'),('chemestry','chemistry'),('chemicaly','chemically'),('childbird','childbirth'),('childen','children'),('choosen','chosen'),('chracter','character'),('chuch','church'),('churchs','churches'),('Cincinatti','Cincinnati'),('Cincinnatti','Cincinnati'),('circulaton','circulation'),('circumsicion','circumcision'),('circut','circuit'),('ciricuit','circuit'),('ciriculum','curriculum'),('civillian','civilian'),('claer','clear'),('claerer','clearer'),('claerly','clearly'),('claimes','claims'),('clas','class'),('clasic','classic'),('clasical','classical'),('clasically','classically'),('cleareance','clearance'),('clera','clear'),('clincial','clinical'),('clinicaly','clinically'),('cmo','com'),('cmoputer','computer'),('co-incided','coincided'),('Coca Cola','Coca-Cola'),('coctail','cocktail'),('coform','conform'),('cognizent','cognizant'),('coincedentally','coincidentally'),('colaborations','collaborations'),('colateral','collateral'),('colelctive','collective'),('collaberative','collaborative'),('collecton','collection'),('collegue','colleague'),('collegues','colleagues'),('collonade','colonnade'),('collonies','colonies'),('collony','colony'),('collosal','colossal'),('colonizators','colonizers'),('comander','commander'),('comando','commando'),('comandos','commandos'),('comany','company'),('comapany','company'),('comback','comeback'),('combanations','combinations'),('combinatins','combinations'),('combusion','combustion'),('comdemnation','condemnation'),('comemmorates','commemorates'),('comemoretion','commemoration'),('comision','commission'),('comisioned','commissioned'),('comisioner','commissioner'),('comisioning','commissioning'),('comisions','commissions'),('comission','commission'),('comissioned','commissioned'),('comissioner','commissioner'),('comissioning','commissioning'),('comissions','commissions'),('comited','committed'),('comiting','committing'),('comitted','committed'),('comittee','committee'),('comitting','committing'),('commandoes','commandos'),('commedic','comedic'),('commemerative','commemorative'),('commemmorate','commemorate'),('commemmorating','commemorating'),('commerical','commercial'),('commerically','commercially'),('commericial','commercial'),('commericially','commercially'),('commerorative','commemorative'),('comming','coming'),('comminication','communication'),('commision','commission'),('commisioned','commissioned'),('commisioner','commissioner'),('commisioning','commissioning'),('commisions','commissions'),('commited','committed'),('commitee','committee'),('commiting','committing'),('committe','committee'),('committment','commitment'),('committments','commitments'),('commmemorated','commemorated'),('commongly','commonly'),('commonweath','commonwealth'),('commuications','communications'),('commuinications','communications'),('communciation','communication'),('communiation','communication'),('communites','communities'),('compability','compatibility'),('comparision','comparison'),('comparisions','comparisons'),('comparitive','comparative'),('comparitively','comparatively'),('compatabilities','compatibilities'),('compatability','compatibility'),('compatable','compatible'),('compatablities','compatibilities'),('compatablity','compatibility'),('compatiable','compatible'),('compatiblities','compatibilities'),('compatiblity','compatibility'),('compeitions','competitions'),('compensantion','compensation'),('competance','competence'),('competant','competent'),('competative','competitive'),('competion','competition'),('competitiion','competition'),('competive','competitive'),('competiveness','competitiveness'),('comphrehensive','comprehensive'),('compitent','competent'),('completedthe','completed the'),('completelyl','completely'),('completetion','completion'),('complier','compiler'),('componant','component'),('comprable','comparable'),('comprimise','compromise'),('compulsary','compulsory'),('compulsery','compulsory'),('computarized','computerized'),('concensus','consensus'),('concider','consider'),('concidered','considered'),('concidering','considering'),('conciders','considers'),('concieted','conceited'),('concieved','conceived'),('concious','conscious'),('conciously','consciously'),('conciousness','consciousness'),('condamned','condemned'),('condemmed','condemned'),('condidtion','condition'),('condidtions','conditions'),('conditionsof','conditions of'),('conected','connected'),('conection','connection'),('conesencus','consensus'),('confidental','confidential'),('confidentally','confidentially'),('confids','confides'),('configureable','configurable'),('confortable','comfortable'),('congradulations','congratulations'),('congresional','congressional'),('conived','connived'),('conjecutre','conjecture'),('conjuction','conjunction'),('Conneticut','Connecticut'),('conotations','connotations'),('conquerd','conquered'),('conquerer','conqueror'),('conquerers','conquerors'),('conqured','conquered'),('conscent','consent'),('consciouness','consciousness'),('consdider','consider'),('consdidered','considered'),('consdiered','considered'),('consectutive','consecutive'),('consenquently','consequently'),('consentrate','concentrate'),('consentrated','concentrated'),('consentrates','concentrates'),('consept','concept'),('consequentually','consequently'),('consequeseces','consequences'),('consern','concern'),('conserned','concerned'),('conserning','concerning'),('conservitive','conservative'),('consiciousness','consciousness'),('consicousness','consciousness'),('considerd','considered'),('consideres','considered'),('consious','conscious'),('consistant','consistent'),('consistantly','consistently'),('consituencies','constituencies'),('consituency','constituency'),('consituted','constituted'),('consitution','constitution'),('consitutional','constitutional'),('consolodate','consolidate'),('consolodated','consolidated'),('consonent','consonant'),('consonents','consonants'),('consorcium','consortium'),('conspiracys','conspiracies'),('conspiriator','conspirator'),('constaints','constraints'),('constanly','constantly'),('constarnation','consternation'),('constatn','constant'),('constinually','continually'),('constituant','constituent'),('constituants','constituents'),('constituion','constitution'),('constituional','constitutional'),('consttruction','construction'),('constuction','construction'),('contstruction','construction'),('consulant','consultant'),('consumate','consummate'),('consumated','consummated'),('contaiminate','contaminate'),('containes','contains'),('contamporaries','contemporaries'),('contamporary','contemporary'),('contempoary','contemporary'),('contemporaneus','contemporaneous'),('contempory','contemporary'),('contendor','contender'),('contibute','contribute'),('contibuted','contributed'),('contibutes','contributes'),('contigent','contingent'),('contined','continued'),('continential','continental'),('continous','continuous'),('continously','continuously'),('continueing','continuing'),('contravercial','controversial'),('contraversy','controversy'),('contributer','contributor'),('contributers','contributors'),('contritutions','contributions'),('controled','controlled'),('controling','controlling'),('controll','control'),('controlls','controls'),('controvercial','controversial'),('controvercy','controversy'),('controveries','controversies'),('controversal','controversial'),('controversey','controversy'),('controvertial','controversial'),('controvery','controversy'),('contruction','construction'),('conveinent','convenient'),('convenant','covenant'),('convential','conventional'),('convertables','convertibles'),('convertion','conversion'),('conviced','convinced'),('convienient','convenient'),('coordiantion','coordination'),('coorperation','cooperation'),('coorperations','corporations'),('copmetitors','competitors'),('coputer','computer'),('copywrite','copyright'),('coridal','cordial'),('cornmitted','committed'),('corosion','corrosion'),('corparate','corporate'),('corperations','corporations'),('correcters','correctors'),('correponding','corresponding'),('correposding','corresponding'),('correspondant','correspondent'),('correspondants','correspondents'),('corridoors','corridors'),('corrispond','correspond'),('corrispondant','correspondent'),('corrispondants','correspondents'),('corrisponded','corresponded'),('corrisponding','corresponding'),('corrisponds','corresponds'),('costitution','constitution'),('coucil','council'),('coudl','could'),('councellor','councillor'),('councellors','councillors'),('counries','countries'),('countains','contains'),('countires','countries'),('countrie\'s','countries'),('coururier','courier'),('coverted','converted'),('cpoy','coy'),('creaeted','created'),('creche','crèche'),('creedence','credence'),('critereon','criterion'),('criterias','criteria'),('criticists','critics'),('critising','criticising'),('critisising','criticising'),('critisism','criticism'),('critisisms','criticisms'),('critisize','criticise'),('critisized','criticised'),('critisizes','criticises'),('critisizing','criticising'),('critized','criticized'),('critizing','criticizing'),('crockodiles','crocodiles'),('crowm','crown'),('crtical','critical'),('crticised','criticised'),('crucifiction','crucifixion'),('crusies','cruises'),('crystalisation','crystallisation'),('culiminating','culminating'),('cumulatative','cumulative'),('curch','church'),('curcuit','circuit'),('currenly','currently'),('curriculem','curriculum'),('cxan','cyan'),('cyclinder','cylinder'),('dacquiri','daiquiri'),('daed','dead'),('dael','deal'),('dalmation','dalmatian'),('damenor','demeanor'),('dammage','damage'),('Dardenelles','Dardanelles'),('daugher','daughter'),('debateable','debatable'),('decendant','descendant'),('decendants','descendants'),('decendent','descendant'),('decendents','descendants'),('decideable','decidable'),('decidely','decidedly'),('decieved','deceived'),('decison','decision'),('decomissioned','decommissioned'),('decomposit','decompose'),('decomposited','decomposed'),('decompositing','decomposing'),('decomposits','decomposes'),('decress','decrees'),('decribe','describe'),('decribed','described'),('decribes','describes'),('decribing','describing'),('dectect','detect'),('defendent','defendant'),('defendents','defendants'),('deffensively','defensively'),('deffine','define'),('deffined','defined'),('definance','defiance'),('definate','definite'),('definately','definitely'),('definatly','definitely'),('definetly','definitely'),('definining','defining'),('definit','definite'),('definitly','definitely'),('definiton','definition'),('defintion','definition'),('degrate','degrade'),('delagates','delegates'),('delapidated','dilapidated'),('delerious','delirious'),('delevopment','development'),('deliberatly','deliberately'),('delusionally','delusively'),('demenor','demeanor'),('demographical','demographic'),('demolision','demolition'),('demorcracy','democracy'),('demostration','demonstration'),('denegrating','denigrating'),('densly','densely'),('deparment','department'),('deparmental','departmental'),('deparments','departments'),('dependance','dependence'),('dependancy','dependency'),('dependant','dependent'),('deram','dram'),('deriviated','derived'),('derivitive','derivative'),('derogitory','derogatory'),('descendands','descendants'),('descibed','described'),('descision','decision'),('descisions','decisions'),('descriibes','describes'),('descripters','descriptors'),('descripton','description'),('desctruction','destruction'),('descuss','discuss'),('desgined','designed'),('deside','decide'),('desigining','designing'),('desinations','destinations'),('desintegrated','disintegrated'),('desintegration','disintegration'),('desireable','desirable'),('desitned','destined'),('desktiop','desktop'),('desorder','disorder'),('desoriented','disoriented'),('desparate','desperate'),('despict','depict'),('despiration','desperation'),('dessicated','desiccated'),('dessigned','designed'),('destablized','destabilized'),('destory','destroy'),('detailled','detailed'),('detatched','detached'),('deteoriated','deteriorated'),('deteriate','deteriorate'),('deterioriating','deteriorating'),('determinining','determining'),('detremental','detrimental'),('devasted','devastated'),('develope','develop'),('developement','development'),('developped','developed'),('develpment','development'),('devels','delves'),('devestated','devastated'),('devestating','devastating'),('devide','divide'),('devided','divided'),('devistating','devastating'),('devolopement','development'),('diablical','diabolical'),('diamons','diamonds'),('diaster','disaster'),('dichtomy','dichotomy'),('diconnects','disconnects'),('dicover','discover'),('dicovered','discovered'),('dicovering','discovering'),('dicovers','discovers'),('dicovery','discovery'),('dictionarys','dictionaries'),('dicussed','discussed'),('didnt','didn\'t'),('diea','idea'),('dieing','dying'),('dieties','deities'),('diety','deity'),('diferent','different'),('diferrent','different'),('differentiatiations','differentiations'),('differnt','different'),('difficulity','difficulty'),('diffrent','different'),('dificulties','difficulties'),('dificulty','difficulty'),('dimenions','dimensions'),('dimention','dimension'),('dimentional','dimensional'),('dimentions','dimensions'),('dimesnional','dimensional'),('diminuitive','diminutive'),('dimunitive','diminutive'),('diosese','diocese'),('diphtong','diphthong'),('diphtongs','diphthongs'),('diplomancy','diplomacy'),('dipthong','diphthong'),('dipthongs','diphthongs'),('dirived','derived'),('disagreeed','disagreed'),('disapeared','disappeared'),('disapointing','disappointing'),('disappearred','disappeared'),('disaproval','disapproval'),('disasterous','disastrous'),('disatisfaction','dissatisfaction'),('disatisfied','dissatisfied'),('disatrous','disastrous'),('discontentment','discontent'),('discribe','describe'),('discribed','described'),('discribes','describes'),('discribing','describing'),('disctinction','distinction'),('disctinctive','distinctive'),('disemination','dissemination'),('disenchanged','disenchanted'),('disiplined','disciplined'),('disobediance','disobedience'),('disobediant','disobedient'),('disolved','dissolved'),('disover','discover'),('dispair','despair'),('disparingly','disparagingly'),('dispence','dispense'),('dispenced','dispensed'),('dispencing','dispensing'),('dispicable','despicable'),('dispite','despite'),('dispostion','disposition'),('disproportiate','disproportionate'),('disputandem','disputandum'),('disricts','districts'),('dissagreement','disagreement'),('dissapear','disappear'),('dissapearance','disappearance'),('dissapeared','disappeared'),('dissapearing','disappearing'),('dissapears','disappears'),('dissappear','disappear'),('dissappears','disappears'),('dissappointed','disappointed'),('dissarray','disarray'),('dissobediance','disobedience'),('dissobediant','disobedient'),('dissobedience','disobedience'),('dissobedient','disobedient'),('distiction','distinction'),('distingish','distinguish'),('distingished','distinguished'),('distingishes','distinguishes'),('distingishing','distinguishing'),('distingquished','distinguished'),('distrubution','distribution'),('distruction','destruction'),('distructive','destructive'),('ditributed','distributed'),('diversed','diverse'),('divice','device'),('divinition','divination'),('divison','division'),('divisons','divisions'),('dum','dumb'),('doccument','document'),('doccumented','documented'),('doccuments','documents'),('docrines','doctrines'),('doctines','doctrines'),('documenatry','documentary'),('doens','does'),('doesnt','doesn\'t'),('doign','doing'),('dominaton','domination'),('dominent','dominant'),('dominiant','dominant'),('donig','doing'),('dosen\'t','doesn\'t'),('doub','doubt'),('doulbe','double'),('dowloads','downloads'),('dramtic','dramatic'),('draughtman','draughtsman'),('Dravadian','Dravidian'),('dreasm','dreams'),('driectly','directly'),('drnik','drink'),('druming','drumming'),('drummless','drumless'),('dupicate','duplicate'),('durig','during'),('durring','during'),('duting','during'),('dyas','dryas'),('eahc','each'),('ealier','earlier'),('earlies','earliest'),('earnt','earned'),('ecclectic','eclectic'),('eceonomy','economy'),('ecidious','deciduous'),('eclispe','eclipse'),('ecomonic','economic'),('ect','etc'),('eearly','early'),('efel','evil'),('effeciency','efficiency'),('effecient','efficient'),('effeciently','efficiently'),('efficency','efficiency'),('efficent','efficient'),('efficently','efficiently'),('efford','effort'),('effords','efforts'),('effulence','effluence'),('eigth','eighth'),('eiter','either'),('elction','election'),('electic','eclectic'),('electon','election'),('electrial','electrical'),('electricly','electrically'),('electricty','electricity'),('elementay','elementary'),('eleminated','eliminated'),('eleminating','eliminating'),('eles','eels'),('eletricity','electricity'),('elicided','elicited'),('eligable','eligible'),('elimentary','elementary'),('ellected','elected'),('elphant','elephant'),('embarass','embarrass'),('embarassed','embarrassed'),('embarassing','embarrassing'),('embarassment','embarrassment'),('embargos','embargoes'),('embarras','embarrass'),('embarrased','embarrassed'),('embarrasing','embarrassing'),('embarrasment','embarrassment'),('embezelled','embezzled'),('emblamatic','emblematic'),('eminate','emanate'),('eminated','emanated'),('emision','emission'),('emited','emitted'),('emiting','emitting'),('emition','emission'),('emmediately','immediately'),('emmigrated','emigrated'),('emminent','eminent'),('emminently','eminently'),('emmisaries','emissaries'),('emmisarries','emissaries'),('emmisarry','emissary'),('emmisary','emissary'),('emmision','emission'),('emmisions','emissions'),('emmited','emitted'),('emmiting','emitting'),('emmitted','emitted'),('emmitting','emitting'),('emnity','enmity'),('emperical','empirical'),('emphaised','emphasised'),('emphsis','emphasis'),('emphysyma','emphysema'),('empirial','empirical'),('emporer','emperor'),('emprisoned','imprisoned'),('enameld','enameled'),('enchancement','enhancement'),('encouraing','encouraging'),('encryptiion','encryption'),('encylopedia','encyclopedia'),('endevors','endeavors'),('endevour','endeavour'),('endig','ending'),('endolithes','endoliths'),('enduce','induce'),('ened','need'),('enforceing','enforcing'),('engagment','engagement'),('engeneer','engineer'),('engeneering','engineering'),('engieneer','engineer'),('engieneers','engineers'),('enlargment','enlargement'),('enlargments','enlargements'),('Enlish','English'),('enourmous','enormous'),('enourmously','enormously'),('ensconsed','ensconced'),('entaglements','entanglements'),('enteratinment','entertainment'),('enthusiatic','enthusiastic'),('entitity','entity'),('entitlied','entitled'),('entrepeneur','entrepreneur'),('entrepeneurs','entrepreneurs'),('enviorment','environment'),('enviormental','environmental'),('enviormentally','environmentally'),('enviorments','environments'),('enviornment','environment'),('enviornmental','environmental'),('enviornmentalist','environmentalist'),('enviornmentally','environmentally'),('enviornments','environments'),('enviroment','environment'),('enviromental','environmental'),('enviromentalist','environmentalist'),('enviromentally','environmentally'),('enviroments','environments'),('envolutionary','evolutionary'),('envrionments','environments'),('enxt','next'),('epidsodes','episodes'),('epsiode','episode'),('equialent','equivalent'),('equilibium','equilibrium'),('equilibrum','equilibrium'),('equiped','equipped'),('equippment','equipment'),('equitorial','equatorial'),('equivelant','equivalent'),('equivelent','equivalent'),('equivilant','equivalent'),('equivilent','equivalent'),('equivlalent','equivalent'),('erally','orally'),('eratic','erratic'),('eratically','erratically'),('eraticly','erratically'),('erested','arrested'),('errupted','erupted'),('esential','essential'),('esitmated','estimated'),('esle','else'),('especialy','especially'),('essencial','essential'),('essense','essence'),('essentail','essential'),('essentialy','essentially'),('essentual','essential'),('essesital','essential'),('estabishes','establishes'),('establising','establishing'),('ethnocentricm','ethnocentrism'),('ethose','those'),('Europian','European'),('Europians','Europeans'),('Eurpean','European'),('Eurpoean','European'),('evenhtually','eventually'),('eventally','eventually'),('eventhough','even though'),('eventially','eventually'),('eventualy','eventually'),('everthing','everything'),('everytime','every time'),('everyting','everything'),('eveyr','every'),('evidentally','evidently'),('exagerate','exaggerate'),('exagerated','exaggerated'),('exagerates','exaggerates'),('exagerating','exaggerating'),('exagerrate','exaggerate'),('exagerrated','exaggerated'),('exagerrates','exaggerates'),('exagerrating','exaggerating'),('examinated','examined'),('exampt','exempt'),('exapansion','expansion'),('excact','exact'),('excange','exchange'),('excecute','execute'),('excecuted','executed'),('excecutes','executes'),('excecuting','executing'),('excecution','execution'),('excedded','exceeded'),('excelent','excellent'),('excell','excel'),('excellance','excellence'),('excellant','excellent'),('excells','excels'),('excercise','exercise'),('exchanching','exchanging'),('excisted','existed'),('exculsivly','exclusively'),('execising','exercising'),('exection','execution'),('exectued','executed'),('exeedingly','exceedingly'),('exelent','excellent'),('exellent','excellent'),('exemple','example'),('exept','except'),('exeptional','exceptional'),('exerbate','exacerbate'),('exerbated','exacerbated'),('exerciese','exercises'),('exerpt','excerpt'),('exerpts','excerpts'),('exersize','exercise'),('exerternal','external'),('exhalted','exalted'),('exhibtion','exhibition'),('exibition','exhibition'),('exibitions','exhibitions'),('exicting','exciting'),('exinct','extinct'),('existance','existence'),('existant','existent'),('existince','existence'),('exliled','exiled'),('exludes','excludes'),('exmaple','example'),('exonorate','exonerate'),('exoskelaton','exoskeleton'),('expalin','explain'),('expatriot','expatriate'),('expeced','expected'),('expecially','especially'),('expeditonary','expeditionary'),('expeiments','experiments'),('expell','expel'),('expells','expels'),('experiance','experience'),('experianced','experienced'),('expiditions','expeditions'),('expierence','experience'),('explaination','explanation'),('explaning','explaining'),('explictly','explicitly'),('exploititive','exploitative'),('explotation','exploitation'),('expropiated','expropriated'),('expropiation','expropriation'),('exressed','expressed'),('extemely','extremely'),('extention','extension'),('extentions','extensions'),('extered','exerted'),('extermist','extremist'),('extint','extinct'),('extradiction','extradition'),('extraterrestial','extraterrestrial'),('extraterrestials','extraterrestrials'),('extravagent','extravagant'),('extrememly','extremely'),('extremeophile','extremophile'),('extremly','extremely'),('extrordinarily','extraordinarily'),('extrordinary','extraordinary'),('eyar','year'),('eyars','years'),('eyasr','years'),('faciliate','facilitate'),('faciliated','facilitated'),('faciliates','facilitates'),('facilites','facilities'),('facillitate','facilitate'),('facinated','fascinated'),('facist','fascist'),('familes','families'),('familliar','familiar'),('famoust','famous'),('fanatism','fanaticism'),('Farenheit','Fahrenheit'),('fatc','fact'),('faught','fought'),('favoutrable','favourable'),('feasable','feasible'),('Febuary','February'),('Feburary','February'),('fedreally','federally'),('femminist','feminist'),('feromone','pheromone'),('fertily','fertility'),('fianite','finite'),('fianlly','finally'),('ficticious','fictitious'),('fictious','fictitious'),('fidn','find'),('fiel','feel'),('fiels','feels'),('fiercly','fiercely'),('fightings','fighting'),('filiament','filament'),('fimilies','families'),('finacial','financial'),('finaly','finally'),('financialy','financially'),('firends','friends'),('firts','flirts'),('fisionable','fissionable'),('flamable','flammable'),('flawess','flawless'),('fleed','fled'),('Flemmish','Flemish'),('florescent','fluorescent'),('flourescent','fluorescent'),('flourine','fluorine'),('fluorish','flourish'),('flourishment','flourishing'),('follwoing','following'),('folowing','following'),('fomed','formed'),('fomr','from'),('fonetic','phonetic'),('fontrier','fontier'),('foootball','football'),('forbad','forbade'),('forbiden','forbidden'),('foreward','foreword'),('forfiet','forfeit'),('forhead','forehead'),('foriegn','foreign'),('Formalhaut','Fomalhaut'),('formallize','formalize'),('formallized','formalized'),('formaly','formally'),('formelly','formerly'),('formidible','formidable'),('formost','foremost'),('forsaw','foresaw'),('forseeable','foreseeable'),('fortelling','foretelling'),('forunner','forerunner'),('foucs','focus'),('foudn','found'),('fougth','fought'),('foundaries','foundries'),('foundary','foundry'),('Foundland','Newfoundland'),('fourties','forties'),('fourty','forty'),('fouth','fourth'),('foward','forward'),('Fransiscan','Franciscan'),('Fransiscans','Franciscans'),('freind','friend'),('freindly','friendly'),('frequentily','frequently'),('frome','from'),('fromed','formed'),('froniter','frontier'),('fucntion','function'),('fucntioning','functioning'),('fufill','fulfill'),('fufilled','fulfilled'),('fulfiled','fulfilled'),('fullfill','fulfill'),('fullfilled','fulfilled'),('fundametal','fundamental'),('fundametals','fundamentals'),('funguses','fungi'),('funtion','function'),('furuther','further'),('futher','further'),('futhermore','furthermore'),('futhroc','futhark'),('gae','game'),('galatic','galactic'),('Galations','Galatians'),('gallaxies','galaxies'),('galvinized','galvanized'),('Gameboy','Game Boy'),('ganerate','generate'),('ganes','games'),('ganster','gangster'),('garantee','guarantee'),('garanteed','guaranteed'),('garantees','guarantees'),('gardai','gardaÃ'),('garnison','garrison'),('gauarana','guaraná'),('gaurantee','guarantee'),('gauranteed','guaranteed'),('gaurantees','guarantees'),('gaurd','guard'),('gaurentee','guarantee'),('gaurenteed','guaranteed'),('gaurentees','guarantees'),('geneological','genealogical'),('geneologies','genealogies'),('geneology','genealogy'),('generaly','generally'),('generatting','generating'),('genialia','genitalia'),('geographicial','geographical'),('geometrician','geometer'),('geometricians','geometers'),('gerat','great'),('Ghandi','Gandhi'),('glight','flight'),('gnawwed','gnawed'),('godess','goddess'),('godesses','goddesses'),('Godounov','Godunov'),('gogin','going'),('goign','going'),('gonig','going'),('Gothenberg','Gothenburg'),('Gottleib','Gottlieb'),('gouvener','governor'),('govement','government'),('govenment','government'),('govenrment','government'),('goverance','governance'),('goverment','government'),('govermental','governmental'),('governer','governor'),('governmnet','government'),('govorment','government'),('govormental','governmental'),('govornment','government'),('gracefull','graceful'),('graet','great'),('grafitti','graffiti'),('gramatically','grammatically'),('grammaticaly','grammatically'),('grammer','grammar'),('grat','great'),('gratuitious','gratuitous'),('greatful','grateful'),('greatfully','gratefully'),('greif','grief'),('gridles','griddles'),('gropu','group'),('grwo','grow'),('Guaduloupe','Guadalupe'),('Guadulupe','Guadalupe'),('guage','gauge'),('guarentee','guarantee'),('guarenteed','guaranteed'),('guarentees','guarantees'),('Guatamala','Guatemala'),('Guatamalan','Guatemalan'),('guerrila','guerrilla'),('guerrilas','guerrillas'),('guidence','guidance'),('Guilia','Giulia'),('Guilio','Giulio'),('Guiness','Guinness'),('Guiseppe','Giuseppe'),('gunanine','guanine'),('gurantee','guarantee'),('guranteed','guaranteed'),('gurantees','guarantees'),('guttaral','guttural'),('gutteral','guttural'),('habaeus','habeas'),('habeus','habeas'),('Habsbourg','Habsburg'),('haemorrage','haemorrhage'),('haev','have'),('halarious','hilarious'),('Hallowean','Hallowe\'en'),('halp','help'),('hapen','happen'),('hapened','happened'),('hapening','happening'),('happend','happened'),('happended','happened'),('happenned','happened'),('harased','harassed'),('harases','harasses'),('harasment','harassment'),('harasments','harassments'),('harassement','harassment'),('harras','harass'),('harrased','harassed'),('harrases','harasses'),('harrasing','harassing'),('harrasment','harassment'),('harrasments','harassments'),('harrassed','harassed'),('harrasses','harassed'),('harrassing','harassing'),('harrassment','harassment'),('harrassments','harassments'),('hasnt','hasn\'t'),('Hatian','Haitian'),('haviest','heaviest'),('headquarer','headquarter'),('headquater','headquarter'),('headquatered','headquartered'),('headquaters','headquarters'),('healthercare','healthcare'),('heared','heard'),('heathy','healthy'),('Heidelburg','Heidelberg'),('heigher','higher'),('heirarchy','hierarchy'),('heiroglyphics','hieroglyphics'),('helment','helmet'),('helpfull','helpful'),('helpped','helped'),('hemmorhage','hemorrhage'),('herad','heard'),('heridity','heredity'),('heroe','hero'),('heros','heroes'),('hertiage','heritage'),('hertzs','hertz'),('hesistant','hesitant'),('heterogenous','heterogeneous'),('hieght','height'),('hierachical','hierarchical'),('hierachies','hierarchies'),('hierachy','hierarchy'),('hierarcical','hierarchical'),('hierarcy','hierarchy'),('hieroglph','hieroglyph'),('hieroglphs','hieroglyphs'),('higer','higher'),('higest','highest'),('higway','highway'),('hillarious','hilarious'),('himselv','himself'),('hinderance','hindrance'),('hinderence','hindrance'),('hindrence','hindrance'),('hipopotamus','hippopotamus'),('hismelf','himself'),('histocompatability','histocompatibility'),('historicians','historians'),('hitsingles','hit singles'),('holf','hold'),('holliday','holiday'),('homestate','home state'),('homogeneize','homogenize'),('homogeneized','homogenized'),('honory','honorary'),('horrifing','horrifying'),('hosited','hoisted'),('hospitible','hospitable'),('hounour','honour'),('housr','hours'),('howver','however'),('hsitorians','historians'),('hstory','history'),('hten','then'),('htere','there'),('htey','they'),('htikn','think'),('hting','thing'),('htink','think'),('htis','this'),('humer','humor'),('humerous','humorous'),('huminoid','humanoid'),('humoural','humoral'),('humurous','humorous'),('husban','husband'),('hvae','have'),('hvaing','having'),('hvea','have'),('hwihc','which'),('hwile','while'),('hwole','whole'),('hydogen','hydrogen'),('hydropile','hydrophile'),('hydropilic','hydrophilic'),('hydropobe','hydrophobe'),('hydropobic','hydrophobic'),('hygeine','hygiene'),('hypocracy','hypocrisy'),('hypocrasy','hypocrisy'),('hypocricy','hypocrisy'),('hypocrit','hypocrite'),('hypocrits','hypocrites'),('iconclastic','iconoclastic'),('idaeidae','idea'),('idaes','ideas'),('idealogies','ideologies'),('idealogy','ideology'),('identicial','identical'),('identifers','identifiers'),('ideosyncratic','idiosyncratic'),('idesa','ideas'),('idiosyncracy','idiosyncrasy'),('Ihaca','Ithaca'),('illegimacy','illegitimacy'),('illegitmate','illegitimate'),('illess','illness'),('illiegal','illegal'),('illution','illusion'),('ilness','illness'),('ilogical','illogical'),('imagenary','imaginary'),('imagin','imagine'),('imaginery','imaginary'),('imanent','eminent'),('imcomplete','incomplete'),('imediately','immediately'),('imense','immense'),('imigrant','emigrant'),('imigrated','emigrated'),('imigration','emigration'),('iminent','eminent'),('immediatley','immediately'),('immediatly','immediately'),('immidately','immediately'),('immidiately','immediately'),('immitate','imitate'),('immitated','imitated'),('immitating','imitating'),('immitator','imitator'),('immunosupressant','immunosuppressant'),('impecabbly','impeccably'),('impedence','impedance'),('implamenting','implementing'),('impliment','implement'),('implimented','implemented'),('imploys','employs'),('importamt','important'),('impressario','impresario'),('imprioned','imprisoned'),('imprisonned','imprisoned'),('improvision','improvisation'),('improvments','improvements'),('inablility','inability'),('inaccessable','inaccessible'),('inadiquate','inadequate'),('inadquate','inadequate'),('inadvertant','inadvertent'),('inadvertantly','inadvertently'),('inagurated','inaugurated'),('inaguration','inauguration'),('inappropiate','inappropriate'),('inaugures','inaugurates'),('inbalance','imbalance'),('inbalanced','imbalanced'),('inbetween','between'),('incarcirated','incarcerated'),('incidentially','incidentally'),('incidently','incidentally'),('inclreased','increased'),('includ','include'),('includng','including'),('incompatabilities','incompatibilities'),('incompatability','incompatibility'),('incompatable','incompatible'),('incompatablities','incompatibilities'),('incompatablity','incompatibility'),('incompatiblities','incompatibilities'),('incompatiblity','incompatibility'),('incompetance','incompetence'),('incompetant','incompetent'),('incomptable','incompatible'),('incomptetent','incompetent'),('inconsistant','inconsistent'),('incoroporated','incorporated'),('incorperation','incorporation'),('incorportaed','incorporated'),('incorprates','incorporates'),('incorruptable','incorruptible'),('incramentally','incrementally'),('increadible','incredible'),('incredable','incredible'),('inctroduce','introduce'),('inctroduced','introduced'),('incuding','including'),('incunabla','incunabula'),('indefinately','indefinitely'),('indefineable','undefinable'),('indefinitly','indefinitely'),('indentical','identical'),('indepedantly','independently'),('indepedence','independence'),('independance','independence'),('independant','independent'),('independantly','independently'),('independece','independence'),('independendet','independent'),('indespensable','indispensable'),('indespensible','indispensable'),('indictement','indictment'),('indigineous','indigenous'),('indipendence','independence'),('indipendent','independent'),('indipendently','independently'),('indispensible','indispensable'),('indisputible','indisputable'),('indisputibly','indisputably'),('indite','indict'),('individualy','individually'),('indpendent','independent'),('indpendently','independently'),('indulgue','indulge'),('indutrial','industrial'),('indviduals','individuals'),('inefficienty','inefficiently'),('inevatible','inevitable'),('inevitible','inevitable'),('inevititably','inevitably'),('infalability','infallibility'),('infallable','infallible'),('infectuous','infectious'),('infered','inferred'),('infilitrate','infiltrate'),('infilitrated','infiltrated'),('infilitration','infiltration'),('infinit','infinite'),('inflamation','inflammation'),('influencial','influential'),('influented','influenced'),('infomation','information'),('informtion','information'),('infrantryman','infantryman'),('infrigement','infringement'),('ingenius','ingenious'),('ingreediants','ingredients'),('inhabitans','inhabitants'),('inherantly','inherently'),('inheritage','heritage'),('inheritence','inheritance'),('inital','initial'),('initally','initially'),('initation','initiation'),('initiaitive','initiative'),('inlcuding','including'),('inmigrant','immigrant'),('inmigrants','immigrants'),('innoculated','inoculated'),('inocence','innocence'),('inofficial','unofficial'),('inot','into'),('inpeach','impeach'),('inpolite','impolite'),('inprisonment','imprisonment'),('inproving','improving'),('insectiverous','insectivorous'),('insensative','insensitive'),('inseperable','inseparable'),('insistance','insistence'),('insitution','institution'),('insitutions','institutions'),('inspite','in spite'),('instade','instead'),('instatance','instance'),('institue','institute'),('instuction','instruction'),('instuments','instruments'),('instutionalized','institutionalized'),('instutions','intuitions'),('insurence','insurance'),('intelectual','intellectual'),('inteligence','intelligence'),('inteligent','intelligent'),('intenational','international'),('intented','intended'),('intepretation','interpretation'),('intepretator','interpretor'),('interational','international'),('interbread','interbreed'),('interchangable','interchangeable'),('interchangably','interchangeably'),('intercontinential','intercontinental'),('intercontinetal','intercontinental'),('intered','interred'),('interelated','interrelated'),('interferance','interference'),('interfereing','interfering'),('intergrated','integrated'),('intergration','integration'),('interm','interim'),('internation','international'),('interpet','interpret'),('interrim','interim'),('interrugum','interregnum'),('intertaining','entertaining'),('interupt','interrupt'),('intervines','intervenes'),('intevene','intervene'),('intial','initial'),('intially','initially'),('intrduced','introduced'),('intrest','interest'),('introdued','introduced'),('intruduced','introduced'),('intrument','instrument'),('intrumental','instrumental'),('intruments','instruments'),('intrusted','entrusted'),('intutive','intuitive'),('intutively','intuitively'),('inudstry','industry'),('inumerable','enumerable'),('inventer','inventor'),('invertibrates','invertebrates'),('investingate','investigate'),('involvment','involvement'),('irelevent','irrelevant'),('iresistable','irresistible'),('iresistably','irresistibly'),('iresistible','irresistible'),('iresistibly','irresistibly'),('iritable','irritable'),('iritated','irritated'),('ironicly','ironically'),('irregardless','regardless'),('irrelevent','irrelevant'),('irreplacable','irreplaceable'),('irresistable','irresistible'),('irresistably','irresistibly'),('isnt','isn\'t'),('Israelies','Israelis'),('issueing','issuing'),('itnroduced','introduced'),('iunior','junior'),('iwll','will'),('iwth','with'),('Janurary','January'),('Januray','January'),('Japanes','Japanese'),('jaques','jacques'),('jeapardy','jeopardy'),('jewllery','jewellery'),('Johanine','Johannine'),('jorunal','journal'),('Jospeh','Joseph'),('jouney','journey'),('journied','journeyed'),('journies','journeys'),('jstu','just'),('jsut','just'),('Juadaism','Judaism'),('Juadism','Judaism'),('judical','judicial'),('judisuary','judiciary'),('juducial','judicial'),('juristiction','jurisdiction'),('juristictions','jurisdictions'),('kindergarden','kindergarten'),('klenex','kleenex'),('knifes','knives'),('knive','knife'),('knowlege','knowledge'),('knowlegeable','knowledgeable'),('knwo','know'),('knwos','knows'),('konw','know'),('konws','knows'),('kwno','know'),('labatory','lavatory'),('labled','labelled'),('labratory','laboratory'),('laguage','language'),('laguages','languages'),('larg','large'),('largst','largest'),('larrry','larry'),('lastr','last'),('lattitude','latitude'),('launchs','launch'),('launhed','launched'),('lavae','larvae'),('layed','laid'),('lazyness','laziness'),('leage','league'),('leanr','lean'),('leathal','lethal'),('lefted','left'),('legitamate','legitimate'),('legitmate','legitimate'),('leibnitz','leibniz'),('lenght','length'),('leran','learn'),('lerans','learns'),('leutenant','lieutenant'),('levetate','levitate'),('levetated','levitated'),('levetates','levitates'),('levetating','levitating'),('levle','level'),('liasion','liaison'),('liason','liaison'),('liasons','liaisons'),('libary','library'),('libell','libel'),('libguistic','linguistic'),('libguistics','linguistics'),('libitarianisn','libertarianism'),('lible','libel'),('lieing','lying'),('liek','like'),('liekd','liked'),('liesure','leisure'),('lieuenant','lieutenant'),('lieved','lived'),('liftime','lifetime'),('lightyear','light year'),('lightyears','light years'),('likelyhood','likelihood'),('linnaena','linnaean'),('lippizaner','lipizzaner'),('liquify','liquefy'),('liscense','license'),('lisence','license'),('lisense','license'),('listners','listeners'),('litature','literature'),('literaly','literally'),('literture','literature'),('littel','little'),('litterally','literally'),('liuke','like'),('livley','lively'),('lmits','limits'),('loev','love'),('lonelyness','loneliness'),('longitudonal','longitudinal'),('lonley','lonely'),('lonly','lonely'),('loosing','losing'),('lotharingen','lothringen'),('lsat','last'),('lukid','likud'),('lveo','love'),('lvoe','love'),('Lybia','Libya'),('maching','machine'),('mackeral','mackerel'),('magasine','magazine'),('magizine','magazine'),('magisine','magizine'),('magincian','magician'),('magnificient','magnificent'),('magolia','magnolia'),('mailny','mainly'),('maintainance','maintenance'),('maintainence','maintenance'),('maintance','maintenance'),('maintenence','maintenance'),('maintinaing','maintaining'),('maintioned','mentioned'),('majoroty','majority'),('maked','marked'),('makse','makes'),('Malcom','Malcolm'),('maltesian','Maltese'),('mamal','mammal'),('mamalian','mammalian'),('managable','manageable'),('managment','management'),('maneouvre','manoeuvre'),('maneouvred','manoeuvred'),('maneouvres','manoeuvres'),('maneouvring','manoeuvring'),('manisfestations','manifestations'),('manoeuverability','maneuverability'),('manouver','maneuver'),('manouverability','maneuverability'),('manouverable','maneuverable'),('manouvers','maneuvers'),('mantained','maintained'),('manuever','maneuver'),('manuevers','maneuvers'),('manufacturedd','manufactured'),('manufature','manufacture'),('manufatured','manufactured'),('manufaturing','manufacturing'),('manuver','maneuver'),('mariage','marriage'),('marjority','majority'),('markes','marks'),('marketting','marketing'),('marmelade','marmalade'),('marrage','marriage'),('marraige','marriage'),('marrtyred','martyred'),('marryied','married'),('Massachussets','Massachusetts'),('Massachussetts','Massachusetts'),('massmedia','mass media'),('masterbation','masturbation'),('mataphysical','metaphysical'),('materalists','materialist'),('mathamatics','mathematics'),('mathematican','mathematician'),('mathematicas','mathematics'),('matheticians','mathematicians'),('mathmatically','mathematically'),('mathmatician','mathematician'),('mathmaticians','mathematicians'),('mccarthyst','mccarthyist'),('mchanics','mechanics'),('meaninng','meaning'),('mear','wear'),('mechandise','merchandise'),('medacine','medicine'),('medeival','medieval'),('medevial','medieval'),('mediciney','mediciny'),('medievel','medieval'),('mediterainnean','mediterranean'),('Mediteranean','Mediterranean'),('meerkrat','meerkat'),('melieux','milieux'),('membranaphone','membranophone'),('memeber','member'),('menally','mentally'),('meranda','veranda'),('mercentile','mercantile'),('messanger','messenger'),('messenging','messaging'),('metalic','metallic'),('metalurgic','metallurgic'),('metalurgical','metallurgical'),('metalurgy','metallurgy'),('metamorphysis','metamorphosis'),('metaphoricial','metaphorical'),('meterologist','meteorologist'),('meterology','meteorology'),('methaphor','metaphor'),('methaphors','metaphors'),('Michagan','Michigan'),('micoscopy','microscopy'),('midwifes','midwives'),('mileau','milieu'),('milennia','millennia'),('milennium','millennium'),('mileu','milieu'),('miliary','military'),('miligram','milligram'),('milion','million'),('miliraty','military'),('millenia','millennia'),('millenial','millennial'),('millenialism','millennialism'),('millenium','millennium'),('millepede','millipede'),('millioniare','millionaire'),('millitary','military'),('millon','million'),('miltary','military'),('minature','miniature'),('minerial','mineral'),('ministery','ministry'),('minsitry','ministry'),('minstries','ministries'),('minstry','ministry'),('minumum','minimum'),('mirrorred','mirrored'),('miscelaneous','miscellaneous'),('miscellanious','miscellaneous'),('miscellanous','miscellaneous'),('mischeivous','mischievous'),('mischevious','mischievous'),('mischievious','mischievous'),('misdameanor','misdemeanor'),('misdameanors','misdemeanors'),('misdemenor','misdemeanor'),('misdemenors','misdemeanors'),('misfourtunes','misfortunes'),('misile','missile'),('Misouri','Missouri'),('mispell','misspell'),('mispelled','misspelled'),('mispelling','misspelling'),('missen','mizzen'),('Missisipi','Mississippi'),('Missisippi','Mississippi'),('missle','missile'),('missonary','missionary'),('misterious','mysterious'),('mistery','mystery'),('misteryous','mysterious'),('mkae','make'),('mkaes','makes'),('mkaing','making'),('mkea','make'),('moderm','modem'),('modle','model'),('moent','moment'),('moeny','money'),('mohammedans','muslims'),('moil','mohel'),('moleclues','molecules'),('momento','memento'),('monestaries','monasteries'),('monestary','monastery'),('monickers','monikers'),('monolite','monolithic'),('Monserrat','Montserrat'),('montains','mountains'),('montanous','mountainous'),('Montnana','Montana'),('monts','months'),('montypic','monotypic'),('moreso','more'),('morgage','mortgage'),('Morisette','Morissette'),('Morrisette','Morissette'),('morroccan','moroccan'),('morrocco','morocco'),('morroco','morocco'),('mortage','mortgage'),('mosture','moisture'),('motiviated','motivated'),('mounth','month'),('movei','movie'),('movment','movement'),('mroe','more'),('mucuous','mucous'),('muder','murder'),('mudering','murdering'),('muhammadan','muslim'),('multicultralism','multiculturalism'),('multipled','multiplied'),('multiplers','multipliers'),('munbers','numbers'),('muncipalities','municipalities'),('muncipality','municipality'),('munnicipality','municipality'),('muscels','mussels'),('muscial','musical'),('muscician','musician'),('muscicians','musicians'),('mutiliated','mutilated'),('myraid','myriad'),('mysef','myself'),('mysogynist','misogynist'),('mysogyny','misogyny'),('mysterous','mysterious'),('Mythraic','Mithraic'),('naieve','naive'),('Naploeon','Napoleon'),('Napolean','Napoleon'),('Napoleonian','Napoleonic'),('naturaly','naturally'),('naturely','naturally'),('naturual','natural'),('naturually','naturally'),('Nazereth','Nazareth'),('neccesarily','necessarily'),('neccesary','necessary'),('neccessarily','necessarily'),('neccessary','necessary'),('neccessities','necessities'),('necesarily','necessarily'),('necesary','necessary'),('necessiate','necessitate'),('neglible','negligible'),('negligable','negligible'),('negociate','negotiate'),('negociation','negotiation'),('negociations','negotiations'),('negotation','negotiation'),('neice','niece'),('neigborhood','neighborhood'),('neigbour','neighbour'),('neigbourhood','neighbourhood'),('neigbouring','neighbouring'),('neigbours','neighbours'),('neolitic','neolithic'),('nessasarily','necessarily'),('nessecary','necessary'),('nestin','nesting'),('neverthless','nevertheless'),('newletters','newsletters'),('nickle','nickel'),('nightfa;;','nightfall'),('nightime','nighttime'),('nineth','ninth'),('ninteenth','nineteenth'),('ninties','1990s'),('ninty','ninety'),('nkow','know'),('nkwo','know'),('nmae','name'),('noncombatents','noncombatants'),('nonsence','nonsense'),('nontheless','nonetheless'),('noone','no one'),('norhern','northern'),('northen','northern'),('northereastern','northeastern'),('notabley','notably'),('noteable','notable'),('noteably','notably'),('noteriety','notoriety'),('noth','north'),('nothern','northern'),('noticable','noticeable'),('noticably','noticeably'),('noticeing','noticing'),('noticible','noticeable'),('notwhithstanding','notwithstanding'),('noveau','nouveau'),('Novermber','November'),('nowdays','nowadays'),('nowe','now'),('nto','not'),('nucular','nuclear'),('nuculear','nuclear'),('nuisanse','nuisance'),('Nullabour','Nullarbor'),('numberous','numerous'),('Nuremburg','Nuremberg'),('nusance','nuisance'),('nutritent','nutrient'),('nutritents','nutrients'),('nuturing','nurturing'),('obediance','obedience'),('obediant','obedient'),('obession','obsession'),('obssessed','obsessed'),('obstacal','obstacle'),('obstancles','obstacles'),('obstruced','obstructed'),('ocasion','occasion'),('ocasional','occasional'),('ocasionally','occasionally'),('ocasionaly','occasionally'),('ocasioned','occasioned'),('ocasions','occasions'),('ocassion','occasion'),('ocassional','occasional'),('ocassionally','occasionally'),('ocassionaly','occasionally'),('ocassioned','occasioned'),('ocassions','occasions'),('occaison','occasion'),('occassion','occasion'),('occassional','occasional'),('occassionally','occasionally'),('occassionaly','occasionally'),('occassioned','occasioned'),('occassions','occasions'),('occationally','occasionally'),('occour','occur'),('occurance','occurrence'),('occurances','occurrences'),('occured','occurred'),('occurence','occurrence'),('occurences','occurrences'),('occuring','occurring'),('occurr','occur'),('occurrance','occurrence'),('occurrances','occurrences'),('octohedra','octahedra'),('octohedral','octahedral'),('octohedron','octahedron'),('ocuntries','countries'),('ocuntry','country'),('ocurr','occur'),('ocurrance','occurrence'),('ocurred','occurred'),('ocurrence','occurrence'),('offcers','officers'),('offcially','officially'),('offereings','offerings'),('offical','official'),('offically','officially'),('officals','officials'),('officaly','officially'),('officialy','officially'),('offred','offered'),('oftenly','often'),('oging','going'),('ok','OK'),('omision','omission'),('omited','omitted'),('omiting','omitting'),('omlette','omelette'),('ommision','omission'),('ommited','omitted'),('ommiting','omitting'),('ommitted','omitted'),('ommitting','omitting'),('omniverous','omnivorous'),('omniverously','omnivorously'),('omre','more'),('onot','note'),('onyl','only'),('openess','openness'),('oponent','opponent'),('oportunity','opportunity'),('opose','oppose'),('oposite','opposite'),('oposition','opposition'),('oppenly','openly'),('oppinion','opinion'),('opponant','opponent'),('oppononent','opponent'),('oppositition','opposition'),('oppossed','opposed'),('opprotunity','opportunity'),('opression','oppression'),('opressive','oppressive'),('opthalmic','ophthalmic'),('opthalmologist','ophthalmologist'),('opthalmology','ophthalmology'),('opthamologist','ophthalmologist'),('optmizations','optimizations'),('optomism','optimism'),('orded','ordered'),('organim','organism'),('organistion','organisation'),('organiztion','organization'),('orgin','origin'),('orginal','original'),('orginally','originally'),('orginize','organise'),('oridinarily','ordinarily'),('origanaly','originally'),('originall','original'),('originaly','originally'),('originially','originally'),('originnally','originally'),('origional','original'),('orignally','originally'),('orignially','originally'),('otehr','other'),('oublisher','publisher'),('ouevre','oeuvre'),('oustanding','outstanding'),('overshaddowed','overshadowed'),('overthere','over there'),('overwelming','overwhelming'),('overwheliming','overwhelming'),('owrk','work'),('owudl','would'),('oxigen','oxygen'),('oximoron','oxymoron'),('p0enis','penis'),('paide','paid'),('paitience','patience'),('palce','place'),('paleolitic','paleolithic'),('paliamentarian','parliamentarian'),('Palistian','Palestinian'),('Palistinian','Palestinian'),('Palistinians','Palestinians'),('pallete','palette'),('pamflet','pamphlet'),('pamplet','pamphlet'),('pantomine','pantomime'),('Papanicalou','Papanicolaou'),('paralel','parallel'),('paralell','parallel'),('paralelly','parallelly'),('paralely','parallelly'),('parallely','parallelly'),('paranthesis','parenthesis'),('paraphenalia','paraphernalia'),('parellels','parallels'),('parisitic','parasitic'),('parituclar','particular'),('parliment','parliament'),('parrakeets','parakeets'),('parralel','parallel'),('parrallel','parallel'),('parrallell','parallel'),('parrallelly','parallelly'),('parrallely','parallelly'),('partialy','partially'),('particually','particularly'),('particualr','particular'),('particuarly','particularly'),('particularily','particularly'),('particulary','particularly'),('pary','party'),('pased','passed'),('pasengers','passengers'),('passerbys','passersby'),('pasttime','pastime'),('pastural','pastoral'),('paticular','particular'),('pattented','patented'),('pavillion','pavilion'),('payed','paid'),('pblisher','publisher'),('pbulisher','publisher'),('peacefuland','peaceful and'),('peageant','pageant'),('peculure','peculiar'),('pedestrain','pedestrian'),('peformed','performed'),('peice','piece'),('Peloponnes','Peloponnesus'),('penatly','penalty'),('penerator','penetrator'),('penisula','peninsula'),('penisular','peninsular'),('penninsula','peninsula'),('penninsular','peninsular'),('pennisula','peninsula'),('Pennyslvania','Pennsylvania'),('pensle','pencil'),('pensinula','peninsula'),('peom','poem'),('peoms','poems'),('peopel','people'),('peotry','poetry'),('perade','parade'),('percepted','perceived'),('percieve','perceive'),('percieved','perceived'),('perenially','perennially'),('perfomance','performance'),('perfomers','performers'),('performence','performance'),('performes','performed'),('perhasp','perhaps'),('perheaps','perhaps'),('perhpas','perhaps'),('peripathetic','peripatetic'),('peristent','persistent'),('perjery','perjury'),('perjorative','pejorative'),('permanant','permanent'),('permenant','permanent'),('permenantly','permanently'),('permissable','permissible'),('perogative','prerogative'),('peronal','personal'),('perpertrated','perpetrated'),('perosnality','personality'),('perphas','perhaps'),('perpindicular','perpendicular'),('perseverence','perseverance'),('persistance','persistence'),('persistant','persistent'),('personel','personnel'),('personell','personnel'),('personnell','personnel'),('persuded','persuaded'),('persue','pursue'),('persued','pursued'),('persuing','pursuing'),('persuit','pursuit'),('persuits','pursuits'),('pertubation','perturbation'),('pertubations','perturbations'),('pessiary','pessary'),('petetion','petition'),('Pharoah','Pharaoh'),('phenomenom','phenomenon'),('phenomenonal','phenomenal'),('phenomenonly','phenomenally'),('phenomonenon','phenomenon'),('phenomonon','phenomenon'),('phenonmena','phenomena'),('Philipines','Philippines'),('philisopher','philosopher'),('philisophical','philosophical'),('philisophy','philosophy'),('Phillipine','Philippine'),('Phillipines','Philippines'),('Phillippines','Philippines'),('phillosophically','philosophically'),('philospher','philosopher'),('philosphies','philosophies'),('philosphy','philosophy'),('Phonecian','Phoenecian'),('phongraph','phonograph'),('phylosophical','philosophical'),('physicaly','physically'),('piblisher','publisher'),('pich','pitch'),('pilgrimmage','pilgrimage'),('pilgrimmages','pilgrimages'),('pinapple','pineapple'),('pinnaple','pineapple'),('pinoneered','pioneered'),('plagarism','plagiarism'),('planation','plantation'),('planed','planned'),('plantiff','plaintiff'),('plateu','plateau'),('plausable','plausible'),('playright','playwright'),('playwrite','playwright'),('playwrites','playwrights'),('pleasent','pleasant'),('plebicite','plebiscite'),('plesant','pleasant'),('poenis','penis'),('poeoples','peoples'),('poety','poetry'),('poisin','poison'),('polical','political'),('polinator','pollinator'),('polinators','pollinators'),('politican','politician'),('politicans','politicians'),('poltical','political'),('polute','pollute'),('poluted','polluted'),('polutes','pollutes'),('poluting','polluting'),('polution','pollution'),('polyphonyic','polyphonic'),('polysaccaride','polysaccharide'),('polysaccharid','polysaccharide'),('pomegranite','pomegranate'),('pomotion','promotion'),('poportional','proportional'),('popoulation','population'),('popularaty','popularity'),('populare','popular'),('populer','popular'),('porshan','portion'),('porshon','portion'),('portait','portrait'),('portayed','portrayed'),('portraing','portraying'),('Portugese','Portuguese'),('portuguease','portuguese'),('portugues','Portuguese'),('posess','possess'),('posessed','possessed'),('posesses','possesses'),('posessing','possessing'),('posession','possession'),('posessions','possessions'),('posion','poison'),('positon','position'),('possable','possible'),('possably','possibly'),('posseses','possesses'),('possesing','possessing'),('possesion','possession'),('possessess','possesses'),('possibile','possible'),('possibilty','possibility'),('possiblility','possibility'),('possiblilty','possibility'),('possiblities','possibilities'),('possiblity','possibility'),('possition','position'),('Postdam','Potsdam'),('posthomous','posthumous'),('postion','position'),('postive','positive'),('potatos','potatoes'),('potrait','portrait'),('potrayed','portrayed'),('poulations','populations'),('poverful','powerful'),('poweful','powerful'),('powerfull','powerful'),('ppublisher','publisher'),('practial','practical'),('practially','practically'),('practicaly','practically'),('practicioner','practitioner'),('practicioners','practitioners'),('practicly','practically'),('practioner','practitioner'),('practioners','practitioners'),('prairy','prairie'),('prarie','prairie'),('praries','prairies'),('pratice','practice'),('preample','preamble'),('precedessor','predecessor'),('preceed','precede'),('preceeded','preceded'),('preceeding','preceding'),('preceeds','precedes'),('precentage','percentage'),('precice','precise'),('precisly','precisely'),('precurser','precursor'),('predecesors','predecessors'),('predicatble','predictable'),('predicitons','predictions'),('predomiantly','predominately'),('prefered','preferred'),('prefering','preferring'),('preferrably','preferably'),('pregancies','pregnancies'),('preiod','period'),('preliferation','proliferation'),('premeire','premiere'),('premeired','premiered'),('premillenial','premillennial'),('preminence','preeminence'),('premission','permission'),('Premonasterians','Premonstratensians'),('preocupation','preoccupation'),('prepair','prepare'),('prepartion','preparation'),('prepatory','preparatory'),('preperation','preparation'),('preperations','preparations'),('preriod','period'),('presedential','presidential'),('presense','presence'),('presidenital','presidential'),('presidental','presidential'),('presitgious','prestigious'),('prespective','perspective'),('prestigeous','prestigious'),('prestigous','prestigious'),('presumabely','presumably'),('presumibly','presumably'),('pretection','protection'),('prevelant','prevalent'),('preverse','perverse'),('previvous','previous'),('pricipal','principal'),('priciple','principle'),('priestood','priesthood'),('primarly','primarily'),('primative','primitive'),('primatively','primitively'),('primatives','primitives'),('primordal','primordial'),('principlaity','principality'),('principaly','principality'),('principial','principal'),('principly','principally'),('prinicipal','principal'),('privalege','privilege'),('privaleges','privileges'),('priveledges','privileges'),('privelege','privilege'),('priveleged','privileged'),('priveleges','privileges'),('privelige','privilege'),('priveliged','privileged'),('priveliges','privileges'),('privelleges','privileges'),('privilage','privilege'),('priviledge','privilege'),('priviledges','privileges'),('privledge','privilege'),('privte','private'),('probabilaty','probability'),('probablistic','probabilistic'),('probablly','probably'),('probalibity','probability'),('probaly','probably'),('probelm','problem'),('proccess','process'),('proccessing','processing'),('procede','proceed'),('proceded','proceeded'),('procedes','proceeds'),('procedger','procedure'),('proceding','proceeding'),('procedings','proceedings'),('proceedure','procedure'),('proces','process'),('processer','processor'),('proclaimation','proclamation'),('proclamed','proclaimed'),('proclaming','proclaiming'),('proclomation','proclamation'),('profesion','profusion'),('profesor','professor'),('professer','professor'),('proffesed','professed'),('proffesion','profession'),('proffesional','professional'),('proffesor','professor'),('profilic','prolific'),('progessed','progressed'),('progidy','prodigy'),('programable','programmable'),('progrom','pogrom'),('progroms','pogroms'),('prohabition','prohibition'),('prologomena','prolegomena'),('prominance','prominence'),('prominant','prominent'),('prominantly','prominently'),('prominately','prominently'),('promiscous','promiscuous'),('promotted','promoted'),('pronomial','pronominal'),('pronouced','pronounced'),('pronounched','pronounced'),('pronounciation','pronunciation'),('proove','prove'),('prooved','proved'),('prophacy','prophecy'),('propietary','proprietary'),('propmted','prompted'),('propoganda','propaganda'),('propogate','propagate'),('propogates','propagates'),('propogation','propagation'),('propostion','proposition'),('propotions','proportions'),('propper','proper'),('propperly','properly'),('proprietory','proprietary'),('proseletyzing','proselytizing'),('protaganist','protagonist'),('protaganists','protagonists'),('protocal','protocol'),('protoganist','protagonist'),('protrayed','portrayed'),('protruberance','protuberance'),('protruberances','protuberances'),('prouncements','pronouncements'),('provacative','provocative'),('provded','provided'),('provicial','provincial'),('provinicial','provincial'),('provisiosn','provision'),('provisonal','provisional'),('proximty','proximity'),('pseudononymous','pseudonymous'),('pseudonyn','pseudonym'),('psuedo','pseudo'),('psycology','psychology'),('psyhic','psychic'),('pubilsher','publisher'),('pubisher','publisher'),('publiaher','publisher'),('publically','publicly'),('publicaly','publicly'),('publicher','publisher'),('publihser','publisher'),('publisehr','publisher'),('publiser','publisher'),('publisger','publisher'),('publisheed','published'),('publisherr','publisher'),('publishher','publisher'),('publishor','publisher'),('publishre','publisher'),('publissher','publisher'),('publlisher','publisher'),('publsiher','publisher'),('publusher','publisher'),('puchasing','purchasing'),('Pucini','Puccini'),('Puertorrican','Puerto Rican'),('Puertorricans','Puerto Ricans'),('pulisher','publisher'),('pumkin','pumpkin'),('puplisher','publisher'),('puritannical','puritanical'),('purposedly','purposely'),('purpotedly','purportedly'),('pursuade','persuade'),('pursuaded','persuaded'),('pursuades','persuades'),('pususading','persuading'),('puting','putting'),('pwoer','power'),('pyscic','psychic'),('qtuie','quite'),('quantaty','quantity'),('quantitiy','quantity'),('quarantaine','quarantine'),('Queenland','Queensland'),('questonable','questionable'),('quicklyu','quickly'),('quinessential','quintessential'),('quitted','quit'),('quizes','quizzes'),('qutie','quite'),('rabinnical','rabbinical'),('racaus','raucous'),('radiactive','radioactive'),('radify','ratify'),('raelly','really'),('rarified','rarefied'),('reaccurring','recurring'),('reacing','reaching'),('reacll','recall'),('readmition','readmission'),('realitvely','relatively'),('realsitic','realistic'),('realtions','relations'),('realy','really'),('realyl','really'),('reasearch','research'),('rebiulding','rebuilding'),('rebllions','rebellions'),('rebounce','rebound'),('reccomend','recommend'),('reccomendations','recommendations'),('reccomended','recommended'),('reccomending','recommending'),('reccommend','recommend'),('reccommended','recommended'),('reccommending','recommending'),('reccuring','recurring'),('receeded','receded'),('receeding','receding'),('receivedfrom','received from'),('recepient','recipient'),('recepients','recipients'),('receving','receiving'),('rechargable','rechargeable'),('reched','reached'),('recide','reside'),('recided','resided'),('recident','resident'),('recidents','residents'),('reciding','residing'),('reciepents','recipients'),('reciept','receipt'),('recieve','receive'),('recieved','received'),('reciever','receiver'),('recievers','receivers'),('recieves','receives'),('recieving','receiving'),('recipiant','recipient'),('recipiants','recipients'),('recived','received'),('recivership','receivership'),('recogise','recognise'),('recogize','recognize'),('recomend','recommend'),('recomended','recommended'),('recomending','recommending'),('recomends','recommends'),('recommedations','recommendations'),('recompence','recompense'),('reconaissance','reconnaissance'),('reconcilation','reconciliation'),('reconized','recognized'),('reconnaisance','reconnaissance'),('reconnaissence','reconnaissance'),('recontructed','reconstructed'),('recordproducer','record producer'),('recquired','required'),('recrational','recreational'),('recrod','record'),('recuiting','recruiting'),('recuring','recurring'),('recurrance','recurrence'),('rediculous','ridiculous'),('reedeming','redeeming'),('reenforced','reinforced'),('refect','reflect'),('refedendum','referendum'),('referal','referral'),('referece','reference'),('refereces','references'),('refered','referred'),('referemce','reference'),('referemces','references'),('referencs','references'),('referenece','reference'),('refereneced','referenced'),('refereneces','references'),('referiang','referring'),('refering','referring'),('refernce','reference'),('refernces','references'),('referrence','reference'),('referrences','references'),('referrs','refers'),('reffered','referred'),('refference','reference'),('reffering','referring'),('refrence','reference'),('refrences','references'),('refrers','refers'),('refridgeration','refrigeration'),('refridgerator','refrigerator'),('refromist','reformist'),('refusla','refusal'),('regardes','regards'),('regluar','regular'),('reguarly','regularly'),('regulaion','regulation'),('regulaotrs','regulators'),('regularily','regularly'),('rehersal','rehearsal'),('reicarnation','reincarnation'),('reigining','reigning'),('reknown','renown'),('reknowned','renowned'),('rela','real'),('relaly','really'),('relatiopnship','relationship'),('relativly','relatively'),('relected','reelected'),('releive','relieve'),('releived','relieved'),('releiver','reliever'),('releses','releases'),('relevence','relevance'),('relevent','relevant'),('reliablity','reliability'),('relient','reliant'),('religeous','religious'),('religous','religious'),('religously','religiously'),('relinqushment','relinquishment'),('relitavely','relatively'),('relized','realised'),('relpacement','replacement'),('remaing','remaining'),('remeber','remember'),('rememberable','memorable'),('rememberance','remembrance'),('remembrence','remembrance'),('remenant','remnant'),('remenicent','reminiscent'),('reminent','remnant'),('reminescent','reminiscent'),('reminscent','reminiscent'),('reminsicent','reminiscent'),('rendevous','rendezvous'),('rendezous','rendezvous'),('renedered','rende'),('renewl','renewal'),('rennovate','renovate'),('rennovated','renovated'),('rennovating','renovating'),('rennovation','renovation'),('rentors','renters'),('reoccurrence','recurrence'),('reorganision','reorganisation'),('repatition','repetition'),('repblic','republic'),('repblican','republican'),('repblicans','republicans'),('repblics','republics'),('repectively','respectively'),('repeition','repetition'),('repentence','repentance'),('repentent','repentant'),('repeteadly','repeatedly'),('repetion','repetition'),('repid','rapid'),('reponse','response'),('reponsible','responsible'),('reportadly','reportedly'),('represantative','representative'),('representive','representative'),('representives','representatives'),('reproducable','reproducible'),('reprtoire','repertoire'),('repsectively','respectively'),('reptition','repetition'),('repubic','republic'),('repubican','republican'),('repubicans','republicans'),('repubics','republics'),('republi','republic'),('republian','republican'),('republians','republicans'),('republis','republics'),('repulic','republic'),('repulican','republican'),('repulicans','republicans'),('repulics','republics'),('requirment','requirement'),('requred','required'),('resaurant','restaurant'),('resembelance','resemblance'),('resembes','resembles'),('resemblence','resemblance'),('resevoir','reservoir'),('residental','residential'),('resignement','resignment'),('resistable','resistible'),('resistence','resistance'),('resistent','resistant'),('respectivly','respectively'),('responce','response'),('responibilities','responsibilities'),('responisble','responsible'),('responnsibilty','responsibility'),('responsability','responsibility'),('responsibile','responsible'),('responsibilites','responsibilities'),('responsiblities','responsibilities'),('responsiblity','responsibility'),('ressemblance','resemblance'),('ressemble','resemble'),('ressembled','resembled'),('ressemblence','resemblance'),('ressembling','resembling'),('resssurecting','resurrecting'),('ressurect','resurrect'),('ressurected','resurrected'),('ressurection','resurrection'),('ressurrection','resurrection'),('restarant','restaurant'),('restarants','restaurants'),('restaraunt','restaurant'),('restaraunteur','restaurateur'),('restaraunteurs','restaurateurs'),('restaraunts','restaurants'),('restauranteurs','restaurateurs'),('restauration','restoration'),('restauraunt','restaurant'),('resteraunt','restaurant'),('resteraunts','restaurants'),('resticted','restricted'),('restraunt','restraint'),('resturant','restaurant'),('resturants','restaurants'),('resturaunt','restaurant'),('resturaunts','restaurants'),('resurecting','resurrecting'),('retalitated','retaliated'),('retalitation','retaliation'),('retreive','retrieve'),('returnd','returned'),('revaluated','reevaluated'),('reveiw','review'),('reveral','reversal'),('reversable','reversible'),('revolutionar','revolutionary'),('rewitten','rewritten'),('rewriet','rewrite'),('rference','reference'),('rferences','references'),('rhymme','rhyme'),('rhythem','rhythm'),('rhythim','rhythm'),('rhytmic','rhythmic'),('rigeur','rigueur'),('rigourous','rigorous'),('rininging','ringing'),('rised','rose'),('Rockerfeller','Rockefeller'),('rococco','rococo'),('rocord','record'),('roomate','roommate'),('rougly','roughly'),('rucuperate','recuperate'),('rudimentatry','rudimentary'),('rulle','rule'),('runing','running'),('runnung','running'),('russina','Russian'),('Russion','Russian'),('rwite','write'),('rythem','rhythm'),('rythim','rhythm'),('rythm','rhythm'),('rythmic','rhythmic'),('rythyms','rhythms'),('sacrafice','sacrifice'),('sacreligious','sacrilegious'),('Sacremento','Sacramento'),('sacrifical','sacrificial'),('saftey','safety'),('safty','safety'),('salery','salary'),('sanctionning','sanctioning'),('sandwhich','sandwich'),('Sanhedrim','Sanhedrin'),('santioned','sanctioned'),('sargant','sergeant'),('sargeant','sergeant'),('sasy','says'),('satelite','satellite'),('satelites','satellites'),('Saterday','Saturday'),('Saterdays','Saturdays'),('satisfactority','satisfactorily'),('satric','satiric'),('satrical','satirical'),('satrically','satirically'),('sattelite','satellite'),('sattelites','satellites'),('saught','sought'),('saveing','saving'),('saxaphone','saxophone'),('scaleable','scalable'),('scandanavia','Scandinavia'),('scaricity','scarcity'),('scavanged','scavenged'),('schedual','schedule'),('scholarhip','scholarship'),('scholarstic','scholastic'),('scientfic','scientific'),('scientifc','scientific'),('scientis','scientist'),('scince','science'),('scinece','science'),('scirpt','script'),('scoll','scroll'),('screenwrighter','screenwriter'),('scrutinity','scrutiny'),('scuptures','sculptures'),('seach','search'),('seached','searched'),('seaches','searches'),('secceeded','seceded'),('seceed','succeed'),('seceeded','succeeded'),('secratary','secretary'),('secretery','secretary'),('sedereal','sidereal'),('seeked','sought'),('segementation','segmentation'),('seguoys','segues'),('seige','siege'),('seing','seeing'),('seinor','senior'),('seldomly','seldom'),('senarios','scenarios'),('sence','sense'),('senstive','sensitive'),('sensure','censure'),('seperate','separate'),('seperated','separated'),('seperately','separately'),('seperates','separates'),('seperating','separating'),('seperation','separation'),('seperatism','separatism'),('seperatist','separatist'),('sepina','subpoena'),('sepulchure','sepulchre'),('sepulcre','sepulchre'),('sergent','sergeant'),('settelement','settlement'),('settlment','settlement'),('severeal','several'),('severley','severely'),('severly','severely'),('sevice','service'),('shadasloo','shadaloo'),('shaddow','shadow'),('shadoloo','shadaloo'),('shamen','shaman'),('sheat','sheath'),('sheild','shield'),('sherif','sheriff'),('shineing','shining'),('shiped','shipped'),('shiping','shipping'),('shopkeeepers','shopkeepers'),('shorly','shortly'),('shortwhile','short while'),('shoudl','should'),('shoudln','should'),('shouldnt','shouldn\'t'),('shreak','shriek'),('shrinked','shrunk'),('sicne','since'),('sideral','sidereal'),('sieze','seize'),('siezed','seized'),('siezing','seizing'),('siezure','seizure'),('siezures','seizures'),('siginificant','significant'),('signficant','significant'),('signficiant','significant'),('signfies','signifies'),('signifantly','significantly'),('significently','significantly'),('signifigant','significant'),('signifigantly','significantly'),('signitories','signatories'),('signitory','signatory'),('similarily','similarly'),('similiar','similar'),('similiarity','similarity'),('similiarly','similarly'),('simmilar','similar'),('simpley','simply'),('simplier','simpler'),('simultanous','simultaneous'),('simultanously','simultaneously'),('sincerley','sincerely'),('singsog','singsong'),('sinse','sines'),('Sionist','Zionist'),('Sionists','Zionists'),('Sixtin','Sistine'),('Skagerak','Skagerrak'),('skateing','skating'),('slaugterhouses','slaughterhouses'),('slighly','slightly'),('slippy','slippery'),('slowy','slowly'),('smae','same'),('smealting','smelting'),('smoe','some'),('sneeks','sneaks'),('snese','sneeze'),('socalism','socialism'),('socities','societies'),('soem','some'),('sofware','software'),('sohw','show'),('soilders','soldiers'),('solatary','solitary'),('soley','solely'),('soliders','soldiers'),('soliliquy','soliloquy'),('soluable','soluble'),('somene','someone'),('somtimes','sometimes'),('somwhere','somewhere'),('sophicated','sophisticated'),('sophmore','sophomore'),('sorceror','sorcerer'),('sorrounding','surrounding'),('sotry','story'),('sotyr','satyr'),('soudn','sound'),('soudns','sounds'),('sould','could'),('sountrack','soundtrack'),('sourth','south'),('sourthern','southern'),('souvenier','souvenir'),('souveniers','souvenirs'),('soveits','soviets'),('sovereignity','sovereignty'),('soverign','sovereign'),('soverignity','sovereignty'),('soverignty','sovereignty'),('spainish','Spanish'),('speach','speech'),('specfic','specific'),('speciallized','specialised'),('specif','specific'),('specifiying','specifying'),('speciman','specimen'),('spectauclar','spectacular'),('spectaulars','spectaculars'),('spects','aspects'),('spectum','spectrum'),('speices','species'),('spendour','splendour'),('spermatozoan','spermatozoon'),('spoace','space'),('sponser','sponsor'),('sponsered','sponsored'),('spontanous','spontaneous'),('sponzored','sponsored'),('spoonfulls','spoonfuls'),('sppeches','speeches'),('spreaded','spread'),('sprech','speech'),('spred','spread'),('spriritual','spiritual'),('spritual','spiritual'),('sqaure','square'),('stablility','stability'),('stainlees','stainless'),('staion','station'),('standars','standards'),('stange','strange'),('startegic','strategic'),('startegies','strategies'),('startegy','strategy'),('stateman','statesman'),('statememts','statements'),('statment','statement'),('steriods','steroids'),('sterotypes','stereotypes'),('stilus','stylus'),('stingent','stringent'),('stiring','stirring'),('stirrs','stirs'),('stlye','style'),('stomache','stomach'),('stong','strong'),('stopry','story'),('storeis','stories'),('storise','stories'),('stornegst','strongest'),('stoyr','story'),('stpo','stop'),('stradegies','strategies'),('stradegy','strategy'),('strat','start'),('stratagically','strategically'),('streemlining','streamlining'),('stregth','strength'),('strenghen','strengthen'),('strenghened','strengthened'),('strenghening','strengthening'),('strenght','strength'),('strenghten','strengthen'),('strenghtened','strengthened'),('strenghtening','strengthening'),('strengtened','strengthened'),('strenous','strenuous'),('strictist','strictest'),('strikely','strikingly'),('strnad','strand'),('stroy','story'),('structual','structural'),('stubborness','stubbornness'),('stucture','structure'),('stuctured','structured'),('studdy','study'),('studing','studying'),('stuggling','struggling'),('sturcture','structure'),('subcatagories','subcategories'),('subcatagory','subcategory'),('subconsiously','subconsciously'),('subjudgation','subjugation'),('submachne','submachine'),('subpecies','subspecies'),('subsidary','subsidiary'),('subsiduary','subsidiary'),('subsquent','subsequent'),('subsquently','subsequently'),('substace','substance'),('substancial','substantial'),('substatial','substantial'),('substituded','substituted'),('substract','subtract'),('substracted','subtracted'),('substracting','subtracting'),('substraction','subtraction'),('substracts','subtracts'),('subtances','substances'),('subterranian','subterranean'),('suburburban','suburban'),('succceeded','succeeded'),('succcesses','successes'),('succedded','succeeded'),('succeded','succeeded'),('succeds','succeeds'),('succesful','successful'),('succesfully','successfully'),('succesfuly','successfully'),('succesion','succession'),('succesive','successive'),('successfull','successful'),('successully','successfully'),('succsess','success'),('succsessfull','successful'),('suceed','succeed'),('suceeded','succeeded'),('suceeding','succeeding'),('suceeds','succeeds'),('sucesful','successful'),('sucesfully','successfully'),('sucesfuly','successfully'),('sucesion','succession'),('sucess','success'),('sucesses','successes'),('sucessful','successful'),('sucessfull','successful'),('sucessfully','successfully'),('sucessfuly','successfully'),('sucession','succession'),('sucessive','successive'),('sucessor','successor'),('sucessot','successor'),('sucide','suicide'),('sucidial','suicidal'),('sufferage','suffrage'),('sufferred','suffered'),('sufferring','suffering'),('sufficent','sufficient'),('sufficently','sufficiently'),('sumary','summary'),('sunglases','sunglasses'),('suop','soup'),('superceeded','superseded'),('superintendant','superintendent'),('suphisticated','sophisticated'),('suplimented','supplemented'),('supose','suppose'),('suposed','supposed'),('suposedly','supposedly'),('suposes','supposes'),('suposing','supposing'),('supplamented','supplemented'),('suppliementing','supplementing'),('suppoed','supposed'),('supposingly','supposedly'),('suppy','supply'),('supress','suppress'),('supressed','suppressed'),('supresses','suppresses'),('supressing','suppressing'),('suprise','surprise'),('suprised','surprised'),('suprising','surprising'),('suprisingly','surprisingly'),('suprize','surprise'),('suprized','surprised'),('suprizing','surprising'),('suprizingly','surprisingly'),('surfce','surface'),('surley','surly'),('suround','surround'),('surounded','surrounded'),('surounding','surrounding'),('suroundings','surroundings'),('surounds','surrounds'),('surplanted','supplanted'),('surpress','suppress'),('surpressed','suppressed'),('surprize','surprise'),('surprized','surprised'),('surprizing','surprising'),('surprizingly','surprisingly'),('surrended','surrounded'),('surrepetitious','surreptitious'),('surrepetitiously','surreptitiously'),('surreptious','surreptitious'),('surreptiously','surreptitiously'),('surronded','surrounded'),('surrouded','surrounded'),('surrouding','surrounding'),('surrundering','surrendering'),('surveilence','surveillance'),('surveill','surveil'),('surveyer','surveyor'),('surviver','survivor'),('survivers','survivors'),('survivied','survived'),('suseptable','susceptible'),('suseptible','susceptible'),('suspention','suspension'),('swaer','swear'),('swaers','swears'),('swepth','swept'),('swiming','swimming'),('syas','says'),('symetrical','symmetrical'),('symetrically','symmetrically'),('symetry','symmetry'),('symettric','symmetric'),('symmetral','symmetric'),('symmetricaly','symmetrically'),('synagouge','synagogue'),('syncronization','synchronization'),('synonomous','synonymous'),('synonymns','synonyms'),('synphony','symphony'),('syphyllis','syphilis'),('sypmtoms','symptoms'),('syrap','syrup'),('sysmatically','systematically'),('sytem','system'),('sytle','style'),('tabacco','tobacco'),('tahn','than'),('taht','that'),('talekd','talked'),('targetted','targeted'),('targetting','targeting'),('tast','taste'),('tath','that'),('tattooes','tattoos'),('taxanomic','taxonomic'),('taxanomy','taxonomy'),('teached','taught'),('techician','technician'),('techicians','technicians'),('techiniques','techniques'),('technitian','technician'),('technnology','technology'),('technolgy','technology'),('teh','the'),('tehy','they'),('telelevision','television'),('televsion','television'),('telphony','telephony'),('temerature','temperature'),('tempalte','template'),('tempaltes','templates'),('temparate','temperate'),('temperarily','temporarily'),('temperment','temperament'),('tempertaure','temperature'),('temperture','temperature'),('temprary','temporary'),('tenacle','tentacle'),('tenacles','tentacles'),('tendacy','tendency'),('tendancies','tendencies'),('tendancy','tendency'),('tennisplayer','tennis player'),('tepmorarily','temporarily'),('terrestial','terrestrial'),('terriories','territories'),('terriory','territory'),('territorist','terrorist'),('territoy','territory'),('terroist','terrorist'),('testiclular','testicular'),('testomony','testimony'),('tghe','the'),('thast','that'),('theather','theater'),('theese','these'),('theif','thief'),('theives','thieves'),('themselfs','themselves'),('themslves','themselves'),('ther','there'),('therafter','thereafter'),('therby','thereby'),('theri','their'),('theyre','they\'re'),('thgat','that'),('thge','the'),('thier','their'),('thign','thing'),('thigns','things'),('thigsn','things'),('thikn','think'),('thikning','thinking'),('thikns','thinks'),('thiunk','think'),('thn','then'),('thna','than'),('thne','then'),('thnig','thing'),('thnigs','things'),('thoughout','throughout'),('threatend','threatened'),('threatning','threatening'),('threee','three'),('threshhold','threshold'),('thrid','third'),('throrough','thorough'),('throughly','thoroughly'),('throught','thought'),('througout','throughout'),('thru','through'),('thsi','this'),('thsoe','those'),('thta','that'),('thyat','that'),('tiem','time'),('tihkn','think'),('tihs','this'),('timne','time'),('tiome','time'),('tje','the'),('tjhe','the'),('tjpanishad','upanishad'),('tkae','take'),('tkaes','takes'),('tkaing','taking'),('tlaking','talking'),('tobbaco','tobacco'),('todays','today\'s'),('todya','today'),('toghether','together'),('toke','took'),('tolerence','tolerance'),('Tolkein','Tolkien'),('tomatos','tomatoes'),('tommorow','tomorrow'),('tommorrow','tomorrow'),('tongiht','tonight'),('toriodal','toroidal'),('tormenters','tormentors'),('tornadoe','tornado'),('torpeados','torpedoes'),('torpedos','torpedoes'),('tothe','to the'),('toubles','troubles'),('tounge','tongue'),('tourch','torch'),('towords','towards'),('towrad','toward'),('tradionally','traditionally'),('traditionaly','traditionally'),('traditionnal','traditional'),('traditition','tradition'),('tradtionally','traditionally'),('trafficed','trafficked'),('trafficing','trafficking'),('trafic','traffic'),('trancendent','transcendent'),('trancending','transcending'),('tranform','transform'),('tranformed','transformed'),('transcendance','transcendence'),('transcendant','transcendent'),('transcendentational','transcendental'),('transcripting','transcribing'),('transending','transcending'),('transesxuals','transsexuals'),('transfered','transferred'),('transfering','transferring'),('transformaton','transformation'),('transistion','transition'),('translater','translator'),('translaters','translators'),('transmissable','transmissible'),('transporation','transportation'),('tremelo','tremolo'),('tremelos','tremolos'),('triguered','triggered'),('triology','trilogy'),('troling','trolling'),('troup','troupe'),('troups','troupes'),('truely','truly'),('trustworthyness','trustworthiness'),('turnk','turnkey'),('Tuscon','Tucson'),('tust','trust'),('twelth','twelfth'),('twon','town'),('twpo','two'),('tyhat','that'),('tyhe','they'),('typcial','typical'),('typicaly','typically'),('tyranies','tyrannies'),('tyrany','tyranny'),('tyrranies','tyrannies'),('tyrrany','tyranny'),('ubiquitious','ubiquitous'),('ublisher','publisher'),('uise','use'),('Ukranian','Ukrainian'),('ultimely','ultimately'),('unacompanied','unaccompanied'),('unahppy','unhappy'),('unanymous','unanimous'),('unathorised','unauthorised'),('unavailible','unavailable'),('unballance','unbalance'),('unbeknowst','unbeknownst'),('unbeleivable','unbelievable'),('uncertainity','uncertainty'),('unchallengable','unchallengeable'),('unchangable','unchangeable'),('uncompetive','uncompetitive'),('unconcious','unconscious'),('unconciousness','unconsciousness'),('unconfortability','discomfort'),('uncontitutional','unconstitutional'),('unconvential','unconventional'),('undecideable','undecidable'),('understoon','understood'),('undesireable','undesirable'),('undetecable','undetectable'),('undoubtely','undoubtedly'),('undreground','underground'),('uneccesary','unnecessary'),('unecessary','unnecessary'),('unequalities','inequalities'),('unforetunately','unfortunately'),('unforgetable','unforgettable'),('unforgiveable','unforgivable'),('unfortunatley','unfortunately'),('unfortunatly','unfortunately'),('unfourtunately','unfortunately'),('unihabited','uninhabited'),('unilateraly','unilaterally'),('unilatreal','unilateral'),('unilatreally','unilaterally'),('uninterruped','uninterrupted'),('uninterupted','uninterrupted'),('UnitesStates','UnitedStates'),('univeral','universal'),('univeristies','universities'),('univeristy','university'),('univerity','university'),('universtiy','university'),('univesities','universities'),('univesity','university'),('unkown','unknown'),('unlikey','unlikely'),('unmanouverable','unmaneuverable'),('unmistakeably','unmistakably'),('unneccesarily','unnecessarily'),('unneccesary','unnecessary'),('unneccessarily','unnecessarily'),('unneccessary','unnecessary'),('unnecesarily','unnecessarily'),('unnecesary','unnecessary'),('unoffical','unofficial'),('unoperational','nonoperational'),('unoticeable','unnoticeable'),('unplease','displease'),('unplesant','unpleasant'),('unprecendented','unprecedented'),('unprecidented','unprecedented'),('unrepentent','unrepentant'),('unrepetant','unrepentant'),('unrepetent','unrepentant'),('unsed','used'),('unsubstanciated','unsubstantiated'),('unsuccesful','unsuccessful'),('unsuccesfully','unsuccessfully'),('unsuccessfull','unsuccessful'),('unsucesful','unsuccessful'),('unsucesfuly','unsuccessfully'),('unsucessful','unsuccessful'),('unsucessfull','unsuccessful'),('unsucessfully','unsuccessfully'),('unsuprised','unsurprised'),('unsuprising','unsurprising'),('unsuprisingly','unsurprisingly'),('unsuprized','unsurprised'),('unsuprizing','unsurprising'),('unsuprizingly','unsurprisingly'),('unsurprized','unsurprised'),('unsurprizing','unsurprising'),('unsurprizingly','unsurprisingly'),('untill','until'),('untranslateable','untranslatable'),('unuseable','unusable'),('unusuable','unusable'),('unviersity','university'),('unwarrented','unwarranted'),('unweildly','unwieldy'),('unwieldly','unwieldy'),('upcomming','upcoming'),('upgradded','upgraded'),('upto','up to'),('usally','usually'),('useage','usage'),('usefull','useful'),('usefuly','usefully'),('useing','using'),('usualy','usually'),('ususally','usually'),('vaccum','vacuum'),('vaccume','vacuum'),('vacinity','vicinity'),('vaguaries','vagaries'),('vaieties','varieties'),('vailidty','validity'),('valetta','valletta'),('valuble','valuable'),('valueable','valuable'),('varations','variations'),('varient','variant'),('variey','variety'),('varing','varying'),('varities','varieties'),('varity','variety'),('vasall','vassal'),('vasalls','vassals'),('vegatarian','vegetarian'),('vegitable','vegetable'),('vegitables','vegetables'),('vegtable','vegetable'),('vehicule','vehicle'),('vell','well'),('venemous','venomous'),('vengance','vengeance'),('vengence','vengeance'),('verfication','verification'),('verison','version'),('verisons','versions'),('vermillion','vermilion'),('versitilaty','versatility'),('versitlity','versatility'),('vetween','between'),('veyr','very'),('vigeur','vigueur'),('vigilence','vigilance'),('vigourous','vigorous'),('villian','villain'),('villification','vilification'),('villify','vilify'),('villin','villi'),('vincinity','vicinity'),('violentce','violence'),('virtualy','virtually'),('virutal','virtual'),('virutally','virtually'),('visable','visible'),('visably','visibly'),('visting','visiting'),('vistors','visitors'),('vitories','victories'),('volcanoe','volcano'),('voleyball','volleyball'),('volontary','voluntary'),('volonteer','volunteer'),('volonteered','volunteered'),('volonteering','volunteering'),('volonteers','volunteers'),('volounteer','volunteer'),('volounteered','volunteered'),('volounteering','volunteering'),('volounteers','volunteers'),('volumne','volume'),('vreity','variety'),('vrey','very'),('vriety','variety'),('vulnerablility','vulnerability'),('vyer','very'),('vyre','very'),('waht','what'),('wanna','want to'),('warantee','warranty'),('wardobe','wardrobe'),('warrent','warrant'),('warrriors','warriors'),('wasnt','wasn\'t'),('wass','was'),('watn','want'),('wayword','wayward'),('weaponary','weaponry'),('weas','was'),('wehn','when'),('weild','wield'),('weilded','wielded'),('wendsay','Wednesday'),('wensday','Wednesday'),('wereabouts','whereabouts'),('whant','want'),('whants','wants'),('whcih','which'),('wheras','whereas'),('wherease','whereas'),('whereever','wherever'),('whic','which'),('whihc','which'),('whith','with'),('whlch','which'),('whn','when'),('wholey','wholly'),('wholy','wholly'),('whta','what'),('whther','whether'),('wich','which'),('widesread','widespread'),('wief','wife'),('wierd','weird'),('wiew','view'),('wih','with'),('wiht','with'),('wille','will'),('willk','will'),('willingless','willingness'),('wirting','writing'),('withdrawl','withdrawal'),('witheld','withheld'),('withh','with'),('withing','within'),('withold','withhold'),('witht','with'),('witn','with'),('wiull','will'),('wnat','want'),('wnated','wanted'),('wnats','wants'),('wohle','whole'),('wokr','work'),('wokring','working'),('wonderfull','wonderful'),('wordlwide','worldwide'),('workststion','workstation'),('worls','world'),('worstened','worsened'),('woudl','would'),('wresters','wrestlers'),('wriet','write'),('writen','written'),('wroet','wrote'),('wrok','work'),('wroking','working'),('wtih','with'),('wupport','support'),('xenophoby','xenophobia'),('yaching','yachting'),('yaer','year'),('yaerly','yearly'),('yaers','years'),('yatch','yacht'),('yearm','year'),('yeasr','years'),('yeild','yield'),('yeilding','yielding'),('Yementite','Yemenite'),('yera','year'),('yrea','year'),('yeras','years'),('yersa','years'),('yotube','youtube'),('youseff','yousef'),('youself','yourself'),('ytou','you'),('yuo','you'),('zeebra','zebra'),('onehundred','one hundred'),('twohundred','two hundred'),('threehundred','three hundred'),('fourhundred','four hundred'),('fivehundred','five hundred'),('sixhundred','six hundred'),('sevenhundred','seven hundred'),('eighthundred','eight hundred'),('ninehundred','nine hundred'),('1/1000th','1/1000'),('1/100th','1/100'),('1/10th','1/10'),('1/12th','1/12'),('1/16th','1/16'),('1/2nd','1/2'),('1/2th','1/2'),('1/30th','1/30'),('1/32nd','1/32'),('1/360th','1/360'),('1/3rd','1/3'),('1/3th','1/3'),('1/48th','1/48'),('1/4th','1/4'),('1/50th','1/50'),('1/5th','1/5'),('1/6th','1/6'),('1/7th','1/7'),('1/8th','1/8'),('1/9th','1/9'),('100\'s of','hundreds of'),('1000\'s of','thousands of'),('1000s of','thousands of'),('100cc','100 cc'),('100cm','100 cm'),('100km','100 km'),('100m','100 m'),('100mm','100 mm'),('100s of','hundreds of'),('10M','10 million'),('10V','10 V'),('10am','10 am'),('10cc','10 cc'),('10km','10 km'),('10pm','10 pm'),('110V','110 V'),('11am','11 am'),('11pm','11 pm'),('11st','11th'),('120V','120 V'),('12V','12 V'),('12am','12 am'),('12nd','12th'),('12pm','12 pm'),('13rd','13th'),('1830ies','1830s'),('1840\'s','1840s'),('1850\'s','1850s'),('1850ies','1850s'),('1860ies','1860s'),('1870ies','1870s'),('1880ies','1880s'),('1890ies','1890s'),('1900ies','1900s'),('1910ies','1910s'),('1920ies','1920s'),('1930ies','1930s'),('1940\'s','1940s'),('1940ies','1940s'),('1950-ies','1950s'),('1950ies','1950s'),('1960-ies','1960s'),('1960ies','1960s'),('1970-ies','1970s'),('1970ies','1970s'),('1980-ies','1980s'),('1980ies','1980s'),('1990-ies','1990s'),('1990ies','1990s'),('2000ies','2000s'),('2010ies','2010s'),('1am','1 am'),('1nd','1st'),('1pm','1 pm'),('1th','1st'),('2/3rd','2/3'),('2/3rds','2/3'),('2/5th','2/5'),('20010','2010'),('20011','2011'),('21th','21st'),('22th','22nd'),('23th','23rd'),('24st','24th'),('2th','2nd'),('3/4th','3/4'),('3/5th','3/5'),('3/8ths','3/8'),('30-ties','1930s'),('31th','31st'),('32th','32nd'),('33th','33rd'),('3am','3 am'),('3nd','3rd'),('3pm','3 pm'),('3th','3rd'),('40ies','1940s'),('41th','41st'),('42th','42nd'),('43th','43rd'),('4am','4 am'),('4pm','4 pm'),('5,000m','5'),('5/8ths','5/8'),('50ies','1950s'),('51th','51st'),('52th','52nd'),('53nd','53rd'),('53th','53rd'),('5V','5 V'),('5am','5 am'),('5pm','5 pm'),('60\'ies','1960s'),('60\'s','1960s'),('60-ies','1960s'),('60W','60 W'),('60es','1960s'),('60ies','1960s'),('60ties','1960s'),('61th','61st'),('62th','62nd'),('63nd','63rd'),('63th','63rd'),('6am','6 am'),('6pm','6 pm'),('70-ies','1970s'),('70ies','1970s'),('80-ies','1980s'),('80ies','1980s'),('8am','8 am'),('8pm','8 pm'),('90ies','1990s'),('9am','9 am'),('9pm','9 pm'),('abberant','aberrant'),('abberation','aberration'),('abbrieviated','abbreviated'),('abbriviated','abbreviated'),('abbriviation','abbreviation'),('abcess','abscess'),('aberrent','aberrant'),('abilites','abilities'),('abillity','ability'),('abnormalites','abnormalities'),('absorbancy','absorbency'),('absorbant','absorbent'),('absorpsion','absorption'),('absoulte','absolute'),('abundence','abundance'),('abundent','abundant'),('accadamy','academy'),('accellerate','accelerate'),('accesed','accessed'),('accesory','accessory'),('accidentially','accidentally'),('accompaning','accompanying'),('accreditate','accredit'),('accros','across'),('acertain','ascertain'),('acess','access'),('acolade','accolade'),('acquistion','acquisition'),('acutally','actually'),('ad nauseum','ad nauseam'),('adaption','adaptation'),('adaptions','adaptations'),('addittion','addition'),('addmitted','admitted'),('adhear','adhere'),('adhearence','adherence'),('adition','addition'),('aditional','additional'),('administartion','administration'),('admiting','admitting'),('admitt','admit'),('advantagous','advantageous'),('affadavit','affidavit'),('affraid','afraid'),('affter','after'),('Afica','Africa'),('after-affect','after-effect'),('afteraffect','aftereffect'),('afterwhich','after which'),('afther','after'),('againsts','against'),('aggitate','agitate'),('aggree','agree'),('agregation','aggregation'),('agress','agrees'),('agriculturalist','agriculturist'),('agriculturalists','agriculturists'),('airborn','airborne'),('ajacent','adjacent'),('ajoin','adjoin'),('ajoining','adjoining'),('ajust','adjust'),('aka','a.k.a.'),('a.k.a','a.k.a.'),('akward','awkward'),('allagory','allegory'),('allaince','alliance'),('allignment','alignment'),('allmost','almost'),('allong','along'),('allright','all right'),('allso','also'),('allways','always'),('alos','also'),('alow','allow'),('alsways','always'),('Altanta','Atlanta'),('altenative','alternative'),('alterated','altered'),('alterpiece','altarpiece'),('aluminum','aluminium'),('alumnas','alumna'),('alumnis','alumni'),('aluvial','alluvial'),('ambiance','ambience'),('ambiant','ambient'),('ambience','ambiance'),('ambigious','ambiguous'),('Ameircan','American'),('amenites','amenities'),('Amercian','American'),('ammass','amass'),('ammenity','amenity'),('amnisty','amnesty'),('amonst','among'),('amonth','a month'),('ampitheater','amphitheater'),('ampitheatre','amphitheatre'),('Anahiem','Anaheim'),('analogue','analog'),('anaylsis','analysis'),('ancester','ancestor'),('ancesteral','ancestral'),('anihilate','annihilate'),('ankel','ankle'),('annd','and'),('annonce','announce'),('annoncement','announcement'),('annouce','announce'),('annoucement','announcement'),('anounce','announce'),('antropology','anthropology'),('anually','annually'),('anyways','anyway'),('apalca','alpaca'),('apalled','appalled'),('aparatus','apparatus'),('aparently','apparently'),('apear','appear'),('apearance','appearance'),('apeared','appeared'),('apearing','appearing'),('apoint','appoint'),('apoligize','apologize'),('appartmant','apartment'),('applyed','applied'),('appologize','apologize'),('apporach','approach'),('appretiate','appreciate'),('approch','approach'),('approximatley','approximately'),('approximatly','approximately'),('approximently','approximately'),('april','April'),('aproach','approach'),('aprox.','approx.'),('aproximation','approximation'),('aquarius','Aquarius'),('aquifier','aquifer'),('arbouretum','arboretum'),('archivement','achievement'),('archs','arches'),('are\'nt','are not'),('argueably','arguably'),('arguebly','arguably'),('arial view','aerial view'),('ariel view','aerial view'),('aries','Aries'),('arive','arrive'),('arn\'t','aren\'t'),('arrage','arrange'),('arragement','arrangement'),('artefact','artifact'),('Artic Circle','Arctic Circle'),('Artic Ocean','Arctic Ocean'),('artificialy','artificially'),('artilce','article'),('ascendancy','ascendency'),('asend','ascend'),('ashphalt','asphalt'),('asimilate','assimilate'),('askes','asks'),('asociate','associate'),('asociation','association'),('asperations','aspirations'),('asscoiate','associate'),('assimiliated','assimilated'),('assinate','assassinate'),('assination','assassination'),('assisstance','assistance'),('assitance','assistance'),('assumme','assume'),('assymetry','asymmetry'),('asterick','asterisk'),('asteriks','asterisk'),('astroid','asteroid'),('atempt','attempt'),('athelete','athlete'),('atheletic','athletic'),('atheltic','athletic'),('athmosphere','atmosphere'),('athough','although'),('atitude','attitude'),('atleast','at least'),('atomsphere','atmosphere'),('atractions','attractions'),('attacted','attracted'),('attaction','attraction'),('attactive','attractive'),('attatch','attach'),('attent','attend'),('attept','attempt'),('atthe','at the'),('attone','atone'),('attornies','attorneys'),('attorny','attorney'),('audiance','audience'),('august','August'),('australia','Australia'),('australian','Australian'),('Australiasia','Australasia'),('avalible','available'),('averege','average'),('awaked','awoke'),('awarness','awareness'),('awsome','awesome'),('axe','ax'),('babys','babies'),('bacame','became'),('backpeddle','backpedal'),('backruptcy','bankruptcy'),('backround','background'),('badmiton','badminton'),('badmitton','badminton'),('baleout','bailout'),('ballest','ballast'),('bannana','banana'),('batallion','battalion'),('batchs','batch\'s'),('beacame','became'),('beacause','because'),('beachs','beach\'s'),('beared','bared'),('bearly','barely'),('beastial','bestial'),('beated','beat'),('beaurocrat','bureaucrat'),('beautifull','beautiful'),('beautifuly','beautifully'),('becamed','became'),('becomed','became'),('beeen','been'),('beetween','between'),('beetwen','between'),('befor','before'),('begginning','beginning'),('beign','begin'),('beigns','begins'),('Bejing','Beijing'),('beleave','believe'),('Belguim','Belgium'),('believeable','believable'),('beliver','believer'),('beliving','believing'),('benchs','benches'),('benefical','beneficial'),('bennefit','benefit'),('bereau','bureau'),('beserk','berserk'),('besetted','beset'),('bettween','between'),('betweeen','between'),('bianual','biannual'),('bieng','being'),('bilbliography','bibliography'),('binded','bound'),('bited','bit'),('biten','bitten'),('biulding','building'),('blatent','blatant'),('bleeded','bled'),('bloosom','blossom'),('blossem','blossom'),('blowed','blew'),('bodly','bodily'),('bodys','bodies'),('bonafied','bona fide'),('bonified','bona fide'),('boo\'s','boos'),('borded','boarded'),('boths','booths'),('boundries','boundaries'),('boyfreind','boyfriend'),('boys\'s','boys\''),('brakedown','breakdown'),('brakeup','break-up'),('brakish','brackish'),('branchs','branch\'s'),('brang','brought'),('Brazillian','Brazilian'),('breaked','beaked'),('breeded','bred'),('breifcase','briefcase'),('brethrens','brethren'),('bribary','bribery'),('Brigandier','Brigadier'),('brillance','brilliance'),('bringed','brought'),('Britan','Britain'),('Brittain','Britain'),('broadcasted','broadcast'),('brodcast','broadcast'),('broght','brought'),('broked','broke'),('brung','brought'),('Brussells','Brussels'),('buget','budget'),('buiding','building'),('buidling','building'),('build-in','built-in'),('builded','built'),('buildng','building'),('bulding','building'),('buliding','building'),('bulliten','bulletin'),('burrial','burial'),('burry','bury'),('burrying','burying'),('bussines','business'),('buyed','bought'),('bye-election','by-election'),('byepass','bypass'),('cacaphonous','cacophonous'),('cacaphony','cacophony'),('caffiene','caffeine'),('calibur','caliber'),('Califronia','California'),('Califronian','Californian'),('calloused','callused'),('callouses','calluses'),('calous','callous'),('cameback','came back'),('cameraderie','camaraderie'),('camraderie','camaraderie'),('Canadaian','Canadian'),('Canadain','Canadian'),('cancelation','cancellation'),('canidate','candidate'),('canoing','canoeing'),('capabilites','capabilities'),('capacites','capacities'),('capitivity','captivity'),('captialize','capitalize'),('captian','captain'),('carmel','caramel'),('carraige','carriage'),('carrear','career'),('carred','cared'),('carrer','career'),('carring','caring'),('cassete','cassette'),('casted','cast'),('casualites','casualties'),('catagorise','categorise'),('catagorize','categorize'),('catalouge','catalogue'),('catched','caught'),('catchs','catches'),('catergory','category'),('cathegory','category'),('cavarly','cavalry'),('celeberate','celebrate'),('celebrites','celebrities'),('celibrate','celebrate'),('cementery','cemetery'),('cemetry','cemetery'),('centenial','centennial'),('centennary','centenary'),('centeral','central'),('centraly','centrally'),('centrifical','centrifugal'),('centures','centuries'),('certifed','certified'),('chaffing','chaffing'),('challanger','challenger'),('chanage','change'),('chanel','channel'),('changeing','changing'),('characterstic','characteristic'),('charater','character'),('charcter','character'),('Charolette','Charlotte'),('chequer','checker'),('chieftan','chieftain'),('childern','children'),('childrens','children'),('Chineese','Chinese'),('choclate','chocolate'),('choise','choice'),('choosed','chooses'),('chosed','chose'),('choses','chooses'),('chosing','choosing'),('chronical','chronic'),('cieling','ceiling'),('circiut','circuit'),('circuituous','circuitous'),('circumcized','circumcised'),('cladded','clad'),('claded','clad'),('claming','claiming'),('clamor','clamour'),('clamour','clamor'),('clangour','clangor'),('classicaly','classically'),('classifed','classified'),('claustraphobia','claustrophobia'),('clearence','clearance'),('closley','closely'),('co-stared','co-starred'),('co-staring','co-starring'),('co-starr','co-star'),('coachs','coaches'),('coaltion','coalition'),('coefficent','coefficient'),('coerse','coerce'),('coinside','coincide'),('coinsidence','coincidence'),('colaboration','collaboration'),('colapse','collapse'),('collaberation','collaboration'),('collegate','collegiate'),('collum','column'),('collumn','column'),('coloquial','colloquial'),('color','colour'),('coloration','colouration'),('colour','color'),('colouration','coloration'),('colourless','colorless'),('comemmorate','commemorate'),('comencement','commencement'),('comerce','commerce'),('comercial','comercial'),('comercially','commercially'),('commerate','commemorate'),('commerating','commemorating'),('commeration','commemoration'),('commerative','commemorative'),('committies','committees'),('committy','committee'),('communitys','communities'),('communty','community'),('communual','communal'),('compair','CompAir'),('compaired','compared'),('compairison','comparison'),('compareable','comparable'),('comparsion','comparison'),('compeet','compete'),('compeeting','competing'),('competiting','competing'),('competiton','competition'),('competively','competitively'),('competor','competitor'),('compilate','compile'),('complaintant','complainant'),('completly','completely'),('comprimize','compromise'),('compromize','compromise'),('compution','computation'),('comradarie','camaraderie'),('comraderie','camaraderie'),('comunity','community'),('conbined','combined'),('concecutive','consecutive'),('concide','coincide'),('concience','conscience'),('concientious','conscientious'),('concieve','conceive'),('concurent','concurrent'),('concuring','concurring'),('concurr','concur'),('concusion','conclusion'),('condem','condemn'),('condemed','condemned'),('condemm','condemn'),('conducter','conductor'),('condusive','conducive'),('conferance','conference'),('confered','conferred'),('conferr','confer'),('confidance','confidence'),('confrence','conference'),('confrim','confirm'),('congradulate','congratulate'),('conjour','conjure'),('connotate','connote'),('consacrate','consecrate'),('consective','consecutive'),('consel','counsel'),('conservate','conserve'),('consession','concession'),('considerated','considered'),('consistancy','consistency'),('conspicious','conspicuous'),('consquently','consequently'),('constructable','constructible'),('contageous','contagious'),('contension','contention'),('contestent','contestant'),('continious','continuous'),('continiued','continued'),('continum','continuum'),('continuos','continuous'),('continuty','continuity'),('contious','continuous'),('contolled','controlled'),('contracter','contractor'),('contractural','contractual'),('contraversial','controversial'),('contributery','contributory'),('controler','controller'),('conveniant','convenient'),('conventionaly','conventionally'),('Conventry','Coventry'),('convergance','convergence'),('convergant','convergent'),('conversate','converse'),('conversley','conversely'),('conversly','conversely'),('convertable','convertible'),('convertor','converter'),('convertors','converters'),('convetional','conventional'),('convinient','convenient'),('cooly','coolly'),('coporation','corporation'),('corespond','correspond'),('corode','corrode'),('corparation','corporation'),('corperate','corporate'),('corperation','corporation'),('correlary','corollary'),('correspondance','correspondence'),('correspondet','correspondent'),('coudn\'t','couldn\'t'),('counceling','counseling'),('councelling','counseling'),('countain','contain'),('countrys','countries'),('cowtow','kowtow'),('craftly','craftily'),('cresent','crescent'),('crimnal','criminal'),('cristened','christened'),('critcal','critical'),('criticaly','critically'),('critiscise','criticise'),('critiscism','criticism'),('critiscize','criticize'),('critise','criticise'),('critisise','criticise'),('critize','criticise'),('crowed','crowd'),('cryed','cried'),('cuase','cause'),('cultery','cutlery'),('curent','current'),('curretly','currently'),('currrent','current'),('cutted','cut'),('damageing','damaging'),('danceing','dancing'),('dangerious','dangerous'),('daschund','dachshund'),('datas','data'),('daugter','daughter'),('daugther','daughter'),('de-facto','de facto'),('de-jure','de jure'),('deactive','deactivate'),('dealed','dealt'),('deam','deem'),('debute','debut'),('decadance','decadence'),('decadant','decadent'),('decathalon','decathlon'),('decend','descend'),('decern','discern'),('decidly','decidedly'),('decieve','deceive'),('decrepid','decrepit'),('decypher','decipher'),('decyphered','deciphered'),('deep-seeded','deep-seated'),('defecit','deficit'),('defencive','defensive'),('defendor','defender'),('deffinite','definite'),('deffinition','definition'),('definitley','definitely'),('defuncted','defunct'),('degredation','degradation'),('deites','deities'),('delagate','delegate'),('delapidate','dilapidate'),('delhi','Delhi'),('deminish','diminish'),('derivated','derived'),('deriviative','derivative'),('desease','disease'),('desend','descend'),('desgin','design'),('desginate','designate'),('designes','designs'),('desimination','dissemination'),('desinged','designed'),('desinger','designer'),('desirible','desirable'),('desolution','dissolution'),('desolve','dissolve'),('desparately','desperately'),('desparation','desperation'),('desribe','describe'),('dessication','desiccation'),('destoryer','destroyer'),('detachement','detachment'),('detatch','detach'),('detatchment','detachment'),('detecter','detector'),('deterance','deterrence'),('deterant','deterrent'),('deterence','deterrence'),('deterent','deterrent'),('deteriation','deterioration'),('determent','determent'),('determing','determining'),('deterrance','deterrence'),('deterrant','deterrent'),('detrement','detriment'),('Detriot','Detroit'),('developemet','development'),('developper','developer'),('develp','develop'),('devestate','devastate'),('devestation','devastation'),('devision','division'),('devistate','devastate'),('devistation','devastation'),('devolopment','development'),('devolopped','developed'),('diad','dyad'),('diadic','dyadic'),('dialate','dilate'),('dialation','dilation'),('dialouge','dialog'),('diciple','disciple'),('dicision','decision'),('dicuss','discuss'),('dicussion','discussion'),('did\'nt','did not'),('diesal','diesel'),('diference','difference'),('differant','different'),('dificult','difficult'),('digged','dug'),('digitaly','digitally'),('dignatary','dignitary'),('dilletante','dilettante'),('dilligence','diligence'),('dilligent','diligent'),('dillute','dilute'),('dimenion','dimension'),('diminuation','diminution'),('diminuative','diminutive'),('diminuition','diminution'),('dinasty','dynasty'),('dinning room','dining room'),('diplay','display'),('diplayed','displayed'),('diptheria','diphtheria'),('direcly','directly'),('directer','director'),('directon','direction'),('disapear','disappear'),('disapoint','disappoint'),('disapointment','disappointment'),('disappearence','disappearance'),('disaprove','disapprove'),('discrepency','discrepancy'),('discription','description'),('disect','bisect'),('disection','bisection'),('disfunctional','dysfunctional'),('disolution','dissolution'),('disolve','dissolve'),('disperate','desperate'),('dissapoint','disappoint'),('dissapointed','disappointed'),('dissapointment','disappointment'),('dissappointment','disappointment'),('dissoluted','dissolved'),('distict','distinct'),('distilation','distillation'),('distirb','disturb'),('distopia','dystopia'),('distributer','distributor'),('distroy','destroy'),('distrub','disturb'),('distrubed','disturbed'),('\"divinition\"','divination'),('divised','devised'),('do\'nt','do not'),('documentery','documentary'),('documentry','documentary'),('doed','did'),('does\'nt','does not'),('doned','donned'),('dont','do not'),('doorjam','door jamb'),('doppleganger','doppelgänger'),('dozend','dozen'),('draftman','draftsman'),('drawed','drawn'),('drinked','drank'),('drived','derived'),('droped','dropped'),('druing','during'),('drumed','drummed'),('drumer','drummer'),('drumms','drums'),('dryed','dried'),('duaghter','daughter'),('duely','duly'),('dukeship','dukedom'),('Dwight Yokam','Dwight Yoakam'),('eached','each'),('eachother','each other'),('ealiest','earliest'),('eariler','earlier'),('earing','earing'),('earler','earlier'),('easiliy','easily'),('easly','early'),('eastermost','easternmost'),('eated','ate'),('eccessive','excessive'),('ecclestiastical','ecclesiastical'),('eclectric','eclectic'),('ecomony','economy'),('ect.','et cetera'),('editior','editor'),('eduction','education'),('eductional','educational'),('eeked','eked'),('eeking','eking'),('eeks','ekes'),('eery','eerie'),('elaborite','elaborate'),('electorial','electoral'),('elemantary','elementary'),('eletronic','electronic'),('eligability','eligibility'),('ellusive','elusive'),('embelish','embellish'),('embelishment','embellishment'),('emiter','emitter'),('emitt','emit'),('emmense','immense'),('emmigrant','emigrant'),('emmission','emission'),('empahsis','emphasis'),('empahsize','emphasize'),('empede','impede'),('employes','employees'),('empound','impound'),('empoundment','impoundment'),('enbankment','embankment'),('encorperate','incorporate'),('encorporate','incorporate'),('encouter','encounter'),('encrease','increase'),('encyclopeadia','encyclopaedia'),('encypher','encypher'),('endeaver','endeavor'),('endevor','endeavor'),('endurace','endurance'),('enemey','enemy'),('engineeer','engineer'),('engineeering','engineering'),('enginer','engineer'),('enity','entity'),('enought','enough'),('enrole','enrol'),('enrolement','enrollment'),('enroute','en route'),('enteprise','enterprise'),('enterance','entrance'),('enterpreneur','entrepreneur'),('enterprize','enterprise'),('entertainement','entertainment'),('entilted','entitled'),('entited','entitled'),('entites','entities'),('entrace','entrance'),('entrepeneurial','entrepreneurial'),('entrie','entire'),('enivoriment','environment'),('envoke','invoke'),('envolve','involve'),('epiode','episode'),('epitomy','epitome'),('eposide','episode'),('eptitude','aptitude'),('equaly','equally'),('equilavent','equivalent'),('equiptment','equipment'),('equivalant','equivalent'),('erradicate','eradicate'),('errect','erect'),('errode','erode'),('errosion','erosion'),('esctasy','ecstasy'),('esence','essence'),('essental','essential'),('estabilish','establish'),('estalbish','establish'),('ethymology','etymology'),('euology','eulogy'),('Euorpe','Europe'),('Euorpean','European'),('euphamism','euphemism'),('evalution','evaluation'),('examinate','examine'),('exasparated','exasperated'),('exaust','exhaust'),('excape','escape'),('excede','exceed'),('excempt','exempt'),('excrutiating','excruciating'),('exercice','exercise'),('exhalt','exalt'),('exhaltation','exaltation'),('exhilirate','exhilarate'),('exhorbitant','exorbitant'),('exhuast','exhaust'),('exhuberance','exuberance'),('exhuberant','exuberant'),('exibit','exhibit'),('exisiting','existing'),('expantion','expansion'),('expeled','expelled'),('expeling','expelling'),('expence','expense'),('experement','experiment'),('experment','experiment'),('expidite','expedite'),('expirement','experiment'),('explane','explain'),('explicitely','explicitly'),('explination','explanation'),('exploitate','exploit'),('exscape','escape'),('exsist','exist'),('exsistance','existence'),('exsistence','existence'),('exstensive','extensive'),('extincted','extinct'),('extraterestrial','extraterrestrial'),('extravagence','extravagance'),('extremley','extremely'),('facillity','facility'),('facimile','facsimile'),('facinate','fascinate'),('facination','fascination'),('facsimilie','facsimile'),('faculity','facility'),('fairwell','farewell'),('faithfull','faithful'),('faling','failing'),('faliure','failure'),('falled','fallen'),('falsley','falsely'),('falsly','falsely'),('famaly','family'),('familar','familiar'),('familiy','family'),('familys','families'),('famouse','famous'),('fascitious','facetious'),('fashism','fascism'),('fasinate','fascinate'),('fasion','fashion'),('fatalies','fatalities'),('fatalites','fatalities'),('fataly','fatally'),('fately','fatally'),('faulter','falter'),('feasability','feasibility'),('feauture','feature'),('Febrary','February'),('Ferburary','February'),('Ferbuary','February'),('Ferbruary','February'),('federaly','federally'),('feeded','fed'),('feeled','felt'),('feets','feet'),('feild','Feild'),('feirce','fierce'),('ffrom','from'),('fianl','final'),('ficitional','fictional'),('fictonal','fictional'),('fictous','fictitious'),('fighted','fought'),('fillament','filament'),('filmaker','filmmaker'),('filmaking','filmmaking'),('findout','find out'),('finnally','finally'),('finnaly','finally'),('finnished','finished'),('finnisher','finisher'),('firend','friend'),('firey','fiery'),('firt','first'),('fishs','fishes'),('fith','fifth'),('flacid','flaccid'),('flaged','flagged'),('flewn','flown'),('flexability','flexibility'),('flexable','flexible'),('floatation','flotation'),('florescence','florescence'),('floride','Floride'),('flourescence','fluorescence'),('flouride','fluoride'),('flury','flurry'),('fluxuate','fluctuate'),('focuse','focus'),('foilage','foliage'),('folicle','follicle'),('folow','follow'),('folowed','followed'),('folower','follower'),('fomer','former'),('fomerly','formerly'),('foots','feet'),('footware','footwear'),('for-runner','forerunner'),('forbidded','forbade'),('forbode','forebode'),('forcast','forecast'),('forcasted','forecast'),('forcasting','forecasting'),('forclose','foreclose'),('forclosure','foreclosure'),('forebad','forbad'),('forebade','forbade'),('forebid','forbid'),('forebidden','forbidden'),('forecasted','forecast'),('foreceps','forceps'),('forefeit','forfeit'),('forefiet','forfeit'),('forefieture','forfeiture'),('foreget','forget'),('foregive','forgive'),('forego','forego'),('forelese','forlese'),('forelorn','forlorn'),('foresake','forsake'),('foresaken','forsaken'),('foresook','forsook'),('foreswear','forswear'),('forewent','forewent'),('forfieture','forfeiture'),('forgoe','forgo'),('forgoing','foregoing'),('foriegner','foreigner'),('formall','formal'),('formated','formatted'),('forrest','forest'),('forrunner','forerunner'),('forsee','foresee'),('forseen','foreseen'),('forshadow','foreshadow'),('forsight','foresight'),('forstall','forestall'),('fortell','foretell'),('forthe','for the'),('fortunatly','fortunately'),('forumla','formula'),('forumlate','formulate'),('forwarn','forewarn'),('forword','foreword'),('fot the','for the'),('fragement','fragment'),('freezed','froze'),('freshmans','freshman\'s'),('friens','friends'),('frist','first'),('fromat','format'),('fromer','former'),('fromerly','formerly'),('froming','forming'),('fromt he','from the'),('fromthe','from the'),('fron the','from the'),('fronteir','frontier'),('frontpiece','frontispiece'),('frontspiece','frontispiece'),('frought','fraught'),('frozed','frozen'),('fruiton','fruition'),('frustrum','frustum'),('frution','fruition'),('fued','feud'),('fuedal','feudal'),('fulled','filled'),('fullfil','fulfill'),('functionaly','functionally'),('fundamentaly','fundamentally'),('fundation','foundation'),('fundemental','fundamental'),('furhter','further'),('furhtermore','furthermore'),('furnature','furniture'),('furtile','fertile'),('fustrate','frustrate'),('fysical','physical'),('garisson','garrison'),('garrision','garrison'),('garrisson','garrison'),('gaurdian','guardian'),('genaral','general'),('genearl','general'),('generater','generator'),('generical','generic'),('genisis','genesis'),('genra','genera'),('genreal','general'),('gentlemens','gentlemen\'s'),('genuis','genius'),('German Shepard','German Shepherd'),('get\'s','gets'),('Gibralter','Gibraltar'),('girate','gyrate'),('girated','gyrated'),('girates','gyrates'),('girating','gyrating'),('giration','gyration'),('girlfreind','girlfriend'),('girls\'s','girls\''),('giude','guide'),('giult','guilt'),('give\'s','gives'),('gived','gave'),('giveing','giving'),('Goerge','George'),('good riddens','good riddance'),('goodby','Goodby'),('govenor','governor'),('governement','government'),('Government Minster','Government Minister'),('governmentaly','governmentally'),('grabed','grabbed'),('grabing','grabbing'),('gradualy','gradually'),('graffitti','graffiti'),('gramatical','grammatic'),('gramattical','grammatical'),('granchild','grandchild'),('granchildren','grandchildren'),('grandaughter','granddaughter'),('grandure','grandeur'),('granfather','grandfather'),('granmother','grandmother'),('granparent','grandparent'),('granson','grandson'),('gratefull','grateful'),('greately','greatly'),('Greecian','Grecian'),('greviance','grievance'),('grevious','grievous'),('growed','grew'),('Guadulupe, Guaduloupe','Guadalupe'),('guilded','gilded'),('Guiliani','Giuliani'),('guitarrist','guitarist'),('gurad','guard'),('guradian','guardian'),('habbit','habit'),('habeus corpus','habeas corpus'),('hace','hare'),('had\'nt','had not'),('hadnt','had not'),('halfs','half\'s'),('handeled','handled'),('handfull','handful'),('hankerchief','handkerchief'),('haras','harass'),('hardend','hardened'),('hardwear','hardware'),('harrass','harass'),('has\'nt','has not'),('hastly','hastily'),('haved','have'),('haveing','having'),('hayday','heyday'),('hazerdous','hazardous'),('headquartes','headquarters'),('heaviliy','heavily'),('heighth','height'),('heigth','height'),('heirarchial','hierarchical'),('heirarchical','hierarchical'),('helded','held'),('henious','heinous'),('heptathalon','heptathlon'),('her\'s','her'),('heresay','hearsay'),('hersuit','hirsute'),('hersute','hirsute'),('hes','he\'s'),('hidding','hiding'),('hierarchial','hierarchical'),('highter','higher'),('hights','heights'),('hirsuit','hirsute'),('hisself','himself'),('histroy','history'),('hitchs','hitches'),('hiting','hitting'),('hitted','hit'),('hoasted','hosted'),('hobbiest','hobbyist'),('hobbist','hobbyist'),('hold\'s','holds'),('holded','held'),('hollistic','holistic'),('homocide','homicide'),('hompage','home page'),('honarable','honorable'),('honourarium','honorarium'),('honourary','honorary'),('honourific','honorific'),('hopefull','hopeful'),('hopefuly','hopefully'),('hopeing','hoping'),('hopfully','hopefully'),('housewifes','housewives'),('houshold','household'),('houskeeper','housekeeper'),('houswife','housewife'),('htat','that'),('hte','the'),('humourous','humorous'),('hundered','hundred'),('hurricaine','hurricane'),('hydralic','hydraulic'),('hygene','hygiene'),('hygenic','hygienic'),('hygine','hygiene'),('hyptonize','hypnotize'),('i\'ts','it\'s'),('iceburg','iceberg'),('iceing','icing'),('idae','idea'),('idealy','ideally'),('idenify','identify'),('idenity','identity'),('identicaly','identically'),('identifed','identified'),('identifing','identifying'),('iin','in'),('ilegally','illegally'),('illegaly','illegally'),('illegimate','illegitimate'),('ilumination','illumination'),('imediate','immediate'),('imigrate','emigrate'),('immagination','imagination'),('immediatelly','immediately'),('immensley','immensely'),('immensly','immensely'),('immitation','imitation'),('immuntable','immutable'),('imoral','immoral'),('impass','impasse'),('imperic','empiric'),('imperical','empiric'),('imperically','empirically'),('imporatnt','important'),('importan','important'),('improvment','improvement'),('in leiu','in lieu'),('in memorium','in memoriam'),('inact','enact'),('inaddition','in addition'),('inapropriate','inappropriate'),('inate','innate'),('inaugure','inaugurate'),('incase','incase'),('Inchon','Incheon'),('inchs','inches'),('incidential','incidental'),('incinuate','insinuate'),('inclose','enclose'),('inclosed','enclosed'),('includeing','including'),('inconsistancy','inconsistency'),('incorperate','incorporate'),('increse','increase'),('incumbancy','incumbency'),('incumbant','incumbent'),('indentification','identification'),('indentified','identified'),('indentifying','identifying'),('indepedent','independent'),('independient','independent'),('indurance','endurance'),('indure','endure'),('infact','in fact'),('infanty','infantry'),('infinate','infinite'),('inflammed','inflamed'),('inflitrate','infiltrate'),('influental','influential'),('inforamtion','information'),('infromation','information'),('infront','in front'),('inhabitate','inhabit'),('inheret','inherit'),('inherrent','inherent'),('inicial','initial'),('iniciate','initiate'),('initate','initiate'),('initative','initiative'),('initator','initiator'),('initialy','initially'),('initiatve','initiative'),('inititive','initiative'),('injuiry','injury'),('injuried','injured'),('injurys','injuries'),('inlcude','include'),('inlude','include'),('inmigrate','immigrate'),('inmigration','immigration'),('innaugurate','inaugurate'),('innauguration','inauguration'),('innundate','inundate'),('innundated','inundated'),('innundation','inundation'),('inovation','innovation'),('inovative','innovative'),('inpact','impact'),('inpose','impose'),('inpsired','inspired'),('inputted','input'),('inquierer','inquirer'),('inroll','enroll'),('insident','incident'),('insistant','insistent'),('insoluable','insoluble'),('insperation','inspiration'),('instal','install'),('instalation','installation'),('instil','instill'),('instilation','installation'),('instructer','instructor'),('insue','ensue'),('insufficent','insufficient'),('insufficently','insufficiently'),('int he','in the'),('intallment','installment'),('intallments','installments'),('intefere','interfere'),('interfear','interfere'),('interfearance','interference'),('interfearing','interfering'),('intergral','integral'),('intergrate','integrate'),('intermidiary','intermediary'),('intermidiate','intermediate'),('intermitent','intermittent'),('intermittant','intermittent'),('intermural','intramural'),('internaly','internally'),('internationaly','internationally'),('internment','interment'),('interogation','interrogation'),('interoir','interior'),('interor','interior'),('interpetation','interpretation'),('interpretate','interpret'),('interprete','interpret'),('interpretion','interpretation'),('interpretor','interpreter'),('interpretted','interpreted'),('interrior','interior'),('interruptable','interruptible'),('interseted','interested'),('interst','interest'),('intersted','interested'),('intersting','interesting'),('intertube','inner tube'),('interupted','interrupted'),('interuption','interruption'),('inthe','in the'),('intiative','initiative'),('intice','entice'),('intigrate','integrate'),('intital','initial'),('intitial','initial'),('intoduce','introduce'),('intorduce','introduce'),('intot he','into the'),('intregal','integral'),('intrical','integral'),('intrim','interim'),('introduct','induct'),('introduse','introduce'),('intruction','instruction'),('intrust','entrust'),('inturrupt','interrupt'),('intution','intuition'),('inuendo','innuendo'),('invarience','invariance'),('inversly','inversely'),('invertabrate','invertebrate'),('invertibrate','invertebrate'),('invertion','inversion'),('invester','investor'),('invole','involve'),('involement','involvement'),('involment','involvement'),('invovle','involve'),('inwhich','in which'),('iritate','irritate'),('ironical','ironic'),('ironicaly','ironically'),('irrelavent','irrelevant'),('irrevelant','irrelevant'),('irreverant','irreverent'),('is stil','is still'),('is\'nt','is not'),('Israely','Israeli'),('Isreal','Israel'),('Isreali','Israeli'),('Isrealite','Israelite'),('itenerant','itinerant'),('itinerate','itinerant'),('its\'','it\'s'),('its\'s','it\'s'),('itsself','itself'),('Japaneese','Japanese'),('Jersualem','Jerusalem'),('jewler','jeweler'),('jewlery','jewellery'),('jewlrey','jewellery'),('journy','journey'),('juction','junction'),('judgement','judgment'),('judgment','judgement'),('juniour','junior'),('junoir','junior'),('juntion','junction'),('jurisdication','jurisdiction'),('juvinile','juvenile'),('keep\'s','keeps'),('keeped','kept'),('keybord','keyboard'),('killogram','kilogram'),('killometer','kilometer'),('killometre','kilometre'),('kiloohm','kilohm'),('kiloohms','kilohms'),('kingdon','kingdom'),('kinoscope','kinescope'),('knit-pick','nitpick'),('knitpick','nitpick'),('know\'s','knows'),('knowed','knew'),('knowldge','knowledge'),('lable','label'),('labourious','laborious'),('ladden','laden'),('lamda','lambda'),('landscapping','landscaping'),('langauge','language'),('languise','languish'),('languistic','linguistic'),('lanscape','landscape'),('lanugage','language'),('largets','largest'),('largley','largely'),('largly','largely'),('larnyx','larynx'),('lastest','last'),('latancy','latency'),('latant','latent'),('latchs','latch\'s'),('lateley','lately'),('lateraly','laterally'),('latern','lantern'),('latley','lately'),('lauch','launch'),('laundrymat','laundromat'),('layed low','lay low'),('leaded','leaded'),('leafs','leaf\'s'),('leant','leaned'),('leasure','leisure'),('leauge','league'),('leaved','left'),('ledgend','legend'),('ledgendary','legendary'),('ledgislate','legislate'),('ledgislation','legislation'),('ledgislative','legislative'),('ledgislator','legislator'),('legaly','legally'),('legendry','legendary'),('legimate','legitimate'),('legistlate','legislate'),('legistlation','legislation'),('legistlative','legislative'),('legistlator','legislator'),('legnth','length'),('Lego','LEGO'),('lenghten','lengthen'),('lenghty','lengthy'),('lense','lens'),('lenth','length'),('lept','leaped'),('lernt','learned'),('lessor','less'),('lexion','lexicon'),('libarary','library'),('lief','leaf'),('lieutentant','lieutenant'),('likley','likely'),('lillies','lilies'),('limitate','limit'),('lingustic','linguistic'),('liscence','licence'),('lite','light'),('litle','Litle'),('littany','litany'),('litteral','literal'),('litterature','literature'),('liturature','literature'),('loacted','located'),('localy','locally'),('locamotive','locomotive'),('lockeroom','locker room'),('loction','location'),('locus','locus'),('logicaly','logically'),('lonleyness','loneliness'),('lonliness','loneliness'),('loose','loose'),('loosley','loosely'),('loosly','loosely'),('Los Angles','Los Angeles'),('losed','lost'),('loseing','losing'),('Lousiana','Louisiana'),('lovley','lovely'),('lubrification','lubrication'),('luckly','luckily'),('luekemia','leukemia'),('lukemia','leukemia'),('lunchs','lunches'),('luxary','luxury'),('mabye','maybe'),('magent','magnet'),('magentic','magnetic'),('magestic','majestic'),('magicaly','magically'),('magnant','magnate'),('Maimi','Miami'),('mainfest','manifest'),('mainley','mainly'),('maintaing','maintaining'),('maintanence','maintenance'),('maintence','maintenance'),('maintinance','maintenance'),('majiscule','majuscule'),('majorly','mainly'),('makeing','making'),('managament','management'),('manageral','managerial'),('manditory','mandatory'),('maneover','maneuver'),('maneovre','manoeuvre'),('mangement','management'),('mangerial','managerial'),('Manhatten','Manhattan'),('manifestion','manifestation'),('manoeuverable','maneuverable'),('marchs','marches'),('margain','margin'),('margine','margin'),('maried','married'),('Marixist','Marxist'),('marrige','marriage'),('marryed','married'),('marshmellow','marshmallow'),('masonary','masonry'),('masonery','masonry'),('matchs','matches'),('mathmatics','mathematics'),('maximalize','maximize'),('maximium','maximum'),('mayorial','mayoral'),('meaned','meant'),('meaningfull','meaningful'),('mearly','merely'),('measurment','measurement'),('mecanical','mechanical'),('mechanisim','mechanism'),('medeval','mediaeval'),('medicore','mediocre'),('medival','medieval'),('meeet','meet'),('megaohm','megohm'),('megaohms','megohms'),('memmory','memory'),('mens','men'),('mens\'','men\'s'),('menstration','menstruation'),('mentaly','mentally'),('mentiones','mentions'),('mentionned','mentioned'),('mercernary','mercenary'),('\"metalicity\"','metallicity'),('metorology','meteorology'),('metropolian','metropolitan'),('Micheal','Michael'),('Michgian','Michigan'),('mimickry','mimicry'),('miminal','minimal'),('miminum','minimum'),('minerological','mineralogical'),('minerologist','mineralogist'),('minerology','mineralogy'),('minimalise','minimise'),('minimalize','minimize'),('minimaly','minimally'),('minimium','minimum'),('miniscule','minuscule'),('ministy','ministry'),('miniture','miniature'),('minning','mining'),('mintues','minutes'),('miriad','myriad'),('miricle','miracle'),('Mississipi','Mississippi'),('misstaken','mistaken'),('missunderstand','misunderstand'),('missuse','misuse'),('mixted','mixed'),('moccassins','moccasins'),('moderatley','moderately'),('moderatly','moderately'),('modfied','modified'),('modifaction','modification'),('modifed','modified'),('mollest','molest'),('Monacco','Monaco'),('monastry','monastery'),('moniter','monitor'),('monoatomic','monatomic'),('monthes','months'),('moraly','morally'),('mountanous','mountainous'),('moveable','movable'),('moveing','moving'),('muderer','murderer'),('mueseum','museum'),('mulitplayer','multiplayer'),('mulitple','multiple'),('mulitply','multiply'),('multiplyed','multiplied'),('municipalites','municipalities'),('municipalties','municipalities'),('municipalty','municipality'),('municipaly','municipality'),('musicaly','musically'),('musican','musician'),('musicial','musical'),('musseum','museum'),('musuem','museum'),('mutiple','multiple'),('mutiplication','multiplication'),('mutiply','multiply'),('mutualy','mutually'),('mysogynistic','misogynistic'),('nacent','nascent'),('naivity','naivety'),('narate','narrate'),('narow','narrow'),('nascient','nascent'),('nationalites','nationalities'),('nationaly','nationally'),('natrual','natural'),('natual','natural'),('natuarlly','naturally'),('navagation','navigation'),('naviagation','navigation'),('nearbye','nearby'),('need\'s','needs'),('negativley','negatively'),('negativly','negatively'),('negitive','negative'),('negitivity','negativity'),('negotiater','negotiator'),('neigbor','neighbour'),('neighboor','neighbour'),('nessacary','necessary'),('Netherland\'s','Netherlands'),('nethertheless','nevertheless'),('neuclear','nuclear'),('newely','newly'),('newletter','newsletter'),('Newyorker','New Yorker'),('Niagra Falls','Niagara Falls'),('nicname','nickname'),('nieghbor','neighbor'),('nieghborhood','neighborhood'),('nieghbour','neighbour'),('niether','neither'),('nigth','night'),('ninethly','ninthly'),('ninteen','nineteen'),('nintey','ninety'),('nitch','niche'),('nomencalture','nomenclature'),('non-existant','nonexistent'),('normaly','normally'),('northermost','northernmost'),('nortoriety','notoriety'),('nortorious','notorious'),('Norweigan','Norwegian'),('notariety','notoriety'),('notewothy','noteworthy'),('notorius','notorious'),('now adays','nowadays'),('nuerological','neurological'),('nuerotransmitter','neurotransmitter'),('nuerotic','neurotic'),('nuetral','neutral'),('nuetrality','neutrality'),('nuetralize','neutralize'),('nuptual','nuptial'),('obcene','obscene'),('obcenity','obscenity'),('obervation','observation'),('objetive','objective'),('obligue','oblige'),('obligued','obliged'),('observence','observance'),('observor','observer'),('obsolecence','obsolescence'),('obsolecense','obsolescence'),('obsolesence','obsolescence'),('obsolesense','obsolescence'),('obssession','obsession'),('obssessive','obsessive'),('obsticle','obstacle'),('occaisonally','occasionally'),('occasionaly','occasionally'),('occasonal','occasional'),('octogon','octagon'),('octogonal','octagonal'),('octostyle','octastyle'),('ocupied','occupied'),('ocupy','occupy'),('ocurring','occurring'),('ocurrs','occurs'),('oddites','oddities'),('odouriferous','odoriferous'),('odourous','odorous'),('ofcourse','of course'),('offcial','official'),('offcier','officer'),('offen','often'),('offendor','offender'),('offshot','offshoot'),('oficial','official'),('oftern','often'),('ofthe','of the'),('Okalahoma','Oklahoma'),('olny','only'),('omitt','omit'),('ommission','omission'),('ommit','omit'),('onthe','on the'),('ontop','on top'),('oparate','operate'),('opend','opened'),('openned','opened'),('operater','operator'),('opertion','operation'),('opertunity','opportunity'),('opinate','opine'),('opionion','opinion'),('opon','upon'),('oppen','open'),('oppened','opened'),('oppening','opening'),('opperate','operate'),('opperation','operation'),('oppertunity','opportunity'),('opponet','opponent'),('opportuinity','opportunity'),('opportunites','opportunities'),('opportunty','opportunity'),('opposit','opposite'),('oppotunity','opportunity'),('oppourtunity','opportunity'),('oppurtunity','opportunity'),('opress','oppress'),('opthamology','ophthalmology'),('opthomologist','ophthalmologist'),('opthomology','ophthalmology'),('optomist','optimist'),('optomistic','optimistic'),('orchestera','orchestra'),('orchesteral','orchestral'),('orderes','ordered'),('orgainisation','organisation'),('orgainise','organise'),('orgainization','organization'),('orgainize','organize'),('organiation','organization'),('organical','organic'),('orginate','originate'),('orginise','organise'),('orginization','organization'),('oriention','orientation'),('origanal','original'),('originial','original'),('originiate','originate'),('originnal','original'),('orignal','original'),('orignate','originate'),('orignial','original'),('origonal','original'),('orthagonal','orthogonal'),('orthagonally','orthogonally'),('orthgonal','orthogonal'),('orthogonaly','orthogonally'),('OSes','OSs'),('othere','other'),('otheres','others'),('ottaman','Ottoman'),('oustide','outside'),('outake','outtake'),('outcasted','outcast'),('outlaying','outlying'),('outloud','out loud'),('outputted','output'),('outragous','outrageous'),('outter','outer'),('outweighted','outweighed'),('overan','overran'),('overated','overrated'),('overeach','overreach'),('overide','override'),('overlayed','overlaid'),('overode','overrode'),('oversite','oversight'),('overule','overrule'),('overun','overrun'),('oveture','overture'),('oxens','oxen'),('oxes','ox\'s'),('oxyen','oxygen'),('paliament','parliament'),('palyed','played'),('palyer','player'),('palying','playing'),('panal','panel'),('panarama','panorama'),('panicing','panicking'),('paniking','panicking'),('paparazzis','paparazzi'),('parallell','parallel'),('parlament','parliament'),('parseable','parsable'),('partecipate','participate'),('partical','partial'),('particpant','participant'),('particpated','participated'),('partion','partition'),('partipate','participate'),('pasenger','passenger'),('passanger','passenger'),('passtime','pastime'),('pasted','passed'),('pastorial','pastoral'),('pasturize','pasteurize'),('pattent','patent'),('payler','player'),('peachs','peaches'),('peasent','peasant'),('pecular','peculiar'),('pecularity','peculiarity'),('pecularly','peculiarly'),('peculiarites','peculiarities'),('peform','perform'),('peformance','performance'),('peleton','peloton'),('peninnsula','peninsula'),('Pennsilvania','Pennsylvania'),('pensinsula','peninsula'),('Pensylvania','Pennsylvania'),('pepole','people'),('perameter','parameter'),('perculiar','peculiar'),('perfom','perform'),('perfoming','performing'),('permited','permitted'),('permiting','permitting'),('permitt','permit'),('perpare','prepare'),('persaude','persuade'),('persay','per se'),('perserverance','perseverance'),('perservere','persevere'),('perserverence','perseverance'),('perseverent','perseverant'),('personaly','personally'),('personnal','personal'),('perview','purview'),('phenominal','phenomenal'),('Pheonix','Phoenix'),('Philadephia','Philadelphia'),('philantrophist','philanthropist'),('philantrophy','philanthropy'),('philantropist','philanthropist'),('philantropy','philanthropy'),('Philedelphia','Philadelphia'),('Philipino','Filipino'),('Philippino','Filipino'),('Phillipino','Filipino'),('Phillippine','Philippine'),('Phillippino','Filipino'),('photgraph','photograph'),('phyiscal','physical'),('physican','physician'),('physicial','physical'),('picure','picture'),('pidgeon','pigeon'),('pilgram','pilgrim'),('pilgramage','pilgrimage'),('piligrim','pilgrim'),('piligrimage','pilgrimage'),('pionering','pioneering'),('piont','point'),('pitchs','pitches'),('placeing','placing'),('plagarist','plagiarist'),('plagarize','plagiarize'),('platfrom','platform'),('plauge','plague'),('pleasnat','pleasant'),('plebian','plebeian'),('plentitude','plenitude'),('plethura','plethora'),('plyed','played'),('poinsetta','poinsettia'),('pokemon','Pokémon'),('polination','pollination'),('politian','politician'),('politition','politician'),('poore','poor'),('populaion','population'),('porportion','proportion'),('posible','possible'),('posibly','possibly'),('posistion','position'),('positve','positive'),('postitive','positive'),('postively','positively'),('postumous','posthumous'),('potentional','potential'),('potentialy','potentially'),('potray','portray'),('pourpose','purpose'),('pre-Colombian','pre-Columbian'),('pre-cursor','precursor'),('pre-existant','preexistent'),('preceive','perceive'),('preception','perception'),('precidence','precedence'),('precident','precedent'),('precint','precinct'),('predjudice','prejudice'),('predominately','predominantly'),('prefection','perfection'),('preferr','prefer'),('preferrable','preferable'),('preffer','prefer'),('preffered','preferred'),('pregancy','pregnancy'),('pregnacy','pregnancy'),('preist','priest'),('prejorative','pejorative'),('premanent','permanent'),('premeir','premier'),('premere','premier'),('premesis','premises'),('premire','premier'),('premotion','promotion'),('preperatory','preparatory'),('prescence','presence'),('presidence','precedence'),('presumeably','presumably'),('presure','pressure'),('presurize','pressurize'),('prevailance','prevalence'),('prevale','prevail'),('prevelance','prevalence'),('preventation','prevention'),('previus','previous'),('prevous','previous'),('Pricilla','Priscilla'),('primarially','primarily'),('Prime Minster','Prime Minister'),('primier','premier'),('primordeal','primordial'),('Princton','Princeton'),('prision','prison'),('privatley','privately'),('privatly','privately'),('priveledge','privilege'),('privelidge','privilege'),('proclame','proclaim'),('prodcution','production'),('professionaly','professionally'),('programe','program'),('programer','programmer'),('prominate','prominent'),('promixity','proximity'),('propably','probably'),('propeled','propelled'),('propeling','propelling'),('propell','propel'),('properity','prosperity'),('propery','property'),('prophecized','prophesied'),('propogator','propagator'),('propotion','proportion'),('propotional','proportional'),('protem','pro tem'),('protien','protein'),('protrait','portrait'),('protray','portray'),('protrayal','portrayal'),('prounounced','pronounced'),('providor','provider'),('psychidelic','psychedelic'),('psychodelic','psychedelic'),('publich','public'),('puchase','purchase'),('Puertorican','Puerto Rican'),('punchs','punches'),('purchace','purchase'),('pursing','pursing'),('puruse','peruse'),('put\'s','puts'),('pwn','own'),('pyschedelic','psychedelic'),('pyschic','psychic'),('pyscho','psycho'),('pyschological','psychological'),('pyschology','psychology'),('pyschosomatic','psychosomatic'),('qoute','quote'),('quadraped','quadruped'),('quadrapedal','quadrupedal'),('quadriped','quadruped'),('quadripedal','quadrupedal'),('quadripedalism','quadrupedalism'),('quadropedal','quadrupedal'),('qualifer','qualifier'),('qualifers','qualifiers'),('qualifyng','qualifyng'),('qualites','qualités'),('quandry','quandary'),('quanity','quantity'),('quantites','quantités'),('quarternary','quaternary'),('quater','quarter'),('quater-final','quarter-final'),('quaterback','quarterback'),('quaterly','quarterly'),('quatermaster','quartermaster'),('queazy','queasy'),('questionaire','questionnaire'),('questionned','questioned'),('quicky','quickie'),('quikly','quickly'),('quindecemvir','quindecimvir'),('quindecemviri','quindecimviri'),('quinquireme','quinquereme'),('quited','quit'),('quitely','quietly'),('qv','q.v.'),('radient','radiant'),('ranchs','ranch\'s'),('rappid','rapid'),('rarley','rarely'),('rasied','raised'),('re-realeased','re-released'),('reachs','reaches'),('reactived','reactivated'),('readly','readily'),('reak','wreak'),('realease','release'),('realites','realities'),('reallity','reality'),('realted','related'),('realting','relating'),('realtion','relation'),('realtive','relative'),('reamain','remain'),('reamin','remain'),('reasses','reassess'),('rebroadcasted','rebroadcast'),('rebuilded','rebuilt'),('rebuttle','rebuttal'),('recasted','recast'),('reccent','recent'),('reccur','recurr'),('recentley','recently'),('recepie','recipe'),('recepted','received'),('recipent','recipient'),('recipies','recipes'),('recoiless','recoilless'),('recomendation','recommendation'),('reconciliate','reconcile'),('recongize','recognize'),('reconize','recognize'),('recoreded','recorded'),('recouperate','recuperate'),('recquire','reacquire'),('recruted','recruited'),('recurr','recur'),('redevelope','redevelop'),('redevelopement','redevelopment'),('redundency','redundancy'),('redundent','redundant'),('referance','reference'),('referes','refers'),('refernece','reference'),('referr','refer'),('refferr','refer'),('reforce','reinforce'),('refrom','reform'),('registery','registry'),('registrate','register'),('reguard','regard'),('regulary','regularly'),('reherse','rehearse'),('reinact','reenact'),('rejuvinate','rejuvenate'),('relagate','relegate'),('relagated','relegated'),('relagation','relegation'),('relase','release'),('relativley','relatively'),('relavent','relevant'),('releave','relieve'),('reletive','relative'),('relience','reliance'),('religon','religion'),('relitively','relatively'),('relize','realize'),('relm','realm'),('remainging','remaining'),('remanants','remnants'),('remaned','remained'),('remaning','remaining'),('rember','remember'),('remian','remain'),('reminant','remnant'),('reminicent','reminiscent'),('renound','renowned'),('renouned','renowned'),('renowed','renowned'),('repayed','repaid'),('repeatly','repeatedly'),('repected','respected'),('repeled','repelled'),('repell','repel'),('repertoir','repertoire'),('repetetive','repetitive'),('repetive','repetitive'),('repetoire','repertoire'),('repitition','repetition'),('replaceing','replacing'),('replyed','replied'),('reportably','reportedly'),('repremand','reprimand'),('represenative','representative'),('representitive','representative'),('reprize','reprise'),('rescure','rescue'),('rescuse','rescue'),('reserach','research'),('reservor','reservoir'),('resetted','reset'),('residance','residence'),('residant','resident'),('resourse','resource'),('respondant','respondent'),('responibility','responsibility'),('responsable','responsible'),('responsed','responded'),('responsibilty','responsibility'),('resposibility','responsibility'),('resposible','responsible'),('restorant','restaurant'),('restuarant','restaurant'),('resurect','resurrect'),('resurgance','resurgence'),('retailation','retaliation'),('retardent','retardant'),('rethoric','rhetoric'),('retreived','retrieved'),('retrive','retrieve'),('retruning','returning'),('retured','returned'),('returing','returning'),('revelant','relevant'),('reverance','reverence'),('reverand','reverend'),('reverant','reverent'),('reverese','reverse'),('rhe','the'),('rhythym','rhythm'),('rhytm','rhythm'),('richochet','ricochet'),('richs','riches'),('rided','rode'),('riden','ridden'),('ridgid','RIDGID'),('rightous','righteous'),('rigth','right'),('riped','ripped'),('rivarly','rivalry'),('romatic','romantic'),('royalites','royalties'),('royalities','royalties'),('royality','royalty'),('ruller','ruler'),('rummor','rumor'),('runer','runner'),('runned','ran'),('rushs','rushes'),('Rusian','Russian'),('Russain','Russian'),('rutheless','ruthless'),('rythym','rhythm'),('sabatical','sabbatical'),('safegaurd','safeguard'),('safetly','safely'),('safley','safely'),('sais','says'),('saleman','salesman'),('saleries','salaries'),('San Deigo','San Diego'),('San Franciso','San Francisco'),('San Fransico','San Francisco'),('San Fransisco','San Francisco'),('sancutary','sanctuary'),('santuary','sanctuary'),('sargent','sergeant'),('sasparilla','sarsaparilla'),('satalite','satellite'),('satarize','satirize'),('sauter','solder'),('sautering','soldering'),('savy','savvy'),('say\'s','says'),('sayed','said'),('Scandanavian','Scandinavian'),('scaned','scanned'),('scaner','scanner'),('scenary','scenery'),('scenerio','scenario'),('scheduel','schedule'),('scientifical','scientific'),('Scotish','Scottish'),('Scottland','Scotland'),('sculpter','sculptor'),('scupture','sculpture'),('seamingly','seemingly'),('seaon','season'),('searchs','searches'),('seasonaly','seasonally'),('seccesion','secession'),('secondry','secondary'),('secrect','secret'),('secundary','secondary'),('securites','securities'),('sedantary','sedentary'),('sedatative','sedative'),('see\'s','See\'s'),('seem\'s','seems'),('seemless','seamless'),('segement','segment'),('seires','series'),('selction','selection'),('selled','sold'),('semblence','semblance'),('sended','sent'),('seniour','senior'),('senoir','senior'),('sensable','sensible'),('sentance','sentence'),('separatley','separately'),('seperable','separable'),('Septemper','September'),('sequal','sequel'),('sequencial','sequential'),('serach','search'),('serie\'s','series'),('serieses','series'),('serivce','service'),('servent','servant'),('set\'s','set\'s'),('setted','set'),('setteler','settler'),('settelment','settlement'),('settelments','settlements'),('severall','several'),('sevral','several'),('shedual','schedule'),('shedule','schedule'),('sheeps','sheep'),('shoe-in','shoo-in'),('shooted','shot'),('shoud','should'),('should\'nt','should not'),('shrewed','shrewd'),('siesmic','seismic'),('signifigance','significance'),('signiture','signature'),('silimar','similar'),('similair','similar'),('simillar','similar'),('simmilarly','similarly'),('simular','similar'),('simulcasted','simulcast'),('singed','sang'),('single-handily','single-handedly'),('sinked','sank'),('sissors','scissors'),('skilfull','skilful'),('skillfull','skillful'),('sleave','sleeve'),('slided','slid'),('smaler','smaller'),('smily','smiley'),('smoothe','smooth'),('smoothes','smooths'),('snorkle','snorkel'),('snuck','sneaked'),('soduko','sodoku'),('solider','soldier'),('solidier','soldier'),('sollid','solid'),('sollution','solution'),('somone','someone'),('songwritter','songwriter'),('soonafter','soon after'),('soons','soon'),('souly','solely'),('soundtack','soundtrack'),('southermost','southernmost'),('southheast','southeast'),('spacial','spatial'),('spagetti','spaghetti'),('spainiard','Spaniard'),('sparce','spare'),('sparcely','sparsely'),('sparsley','sparsely'),('sparsly','sparsely'),('speachless','speechless'),('speacial','special'),('speaked','spoke'),('specator','spectator'),('specialied','specialised'),('specialites','specialities'),('speciality','speciality'),('specialtes','specialties'),('specialy','speciality'),('speciemen','specimen'),('specifical','specific'),('specificaly','specifically'),('specifity','specificity'),('spectometry','spectrometry'),('speechs','speeches'),('speeker','speaker'),('speical','special'),('spelled','spelt'),('spelt','spelled'),('spended','spent'),('spermatazoa','spermatozoa'),('spermatazoon','spermatozoon'),('spilled','spilt'),('spilt','spilled'),('spilts','splits'),('spinned','spun'),('splited','split'),('spliting','splitting'),('splitted','split'),('spokeman','spokesman'),('spokemen','spokesmen'),('spokeperson','spokesperson'),('spokewoman','spokeswoman'),('spolier','spoiler'),('spontanious','spontaneous'),('sponteous','spongeous'),('sporadical','sporadic'),('sporatic','sporadic'),('sportscar','sports car'),('spoted','spotted'),('spouce','spouse'),('sprial','spiral'),('spunoff','spun off'),('spured','spurned'),('sqaud','squad'),('stabalize','stabilize'),('stabalizer','stabilizer'),('stabed','stabbed'),('stabilty','stability'),('stablity','stability'),('stablize','stabilize'),('stagnet','stagnant'),('stallwart','stalwart'),('standart','standard'),('standed','stood'),('staring','staring'),('stastic','static'),('stastical','statistical'),('stastics','statics'),('stategic','strategic'),('statuatory','statutory'),('stayes','stays'),('steadly','steadily'),('stealed','stole'),('steller','stellar'),('stemed','stemmed'),('steped','stepped'),('steping','stepping'),('steriod','steroid'),('sticked','stuck'),('stike','strike'),('stinked','stunk'),('stoled','stole'),('stoped','stopped'),('stoper','stopper'),('stoping','stopping'),('storey','story'),('storng','strong'),('storys','storeys'),('straightjacket','straitjacket'),('straped','strapped'),('stratedgy','strategy'),('strech','stretch'),('strenth','strength'),('strickly','strictly'),('stricly','strictly'),('striked','struck'),('striken','stricken'),('struckout','struck out'),('structer','structure'),('structue','structure'),('structuraly','structurally'),('stste','state'),('stucked','stuck'),('studdering','stuttering'),('stuggle','struggle'),('subceptible','susceptible'),('suberb','suburb'),('subesquent','subsequent'),('submision','submission'),('submited','submitted'),('submiting','submitting'),('submitt','submit'),('subpena','subpoena'),('subquent','subsequent'),('subsdiary','subsidiary'),('subsequential','subsequent'),('subsitute','substitute'),('substain','sustain'),('substanial','substantial'),('substantually','substantially'),('substitue','substitute'),('subtlely','subtly'),('subtley','subtly'),('succede','succeed'),('succeedes','succeeds'),('succeptibility','susceptibility'),('succeptible','susceptible'),('succes','success'),('succesfull','successful'),('succesor','successor'),('successer','successor'),('succint','succinct'),('succum','succumb'),('suceptibility','susceptibility'),('suceptible','susceptible'),('sucsessor','successor'),('suddendly','suddenly'),('suduko','sodoku'),('sueing','suing'),('suface','surface'),('suffcient','sufficient'),('suffciently','sufficiently'),('sufficant','sufficient'),('sufficiant','sufficient'),('sugest','suggest'),('sugget','suggest'),('suject','subject'),('supass','surpass'),('supercede','supersede'),('superceed','supersede'),('supercession','supersession'),('supercessionism','supersessionism'),('superfical','superficial'),('supermacy','supremacy'),('superscaler','superscalar'),('superseed','supersede'),('supervisior','supervisor'),('suplant','supplant'),('suplement','supplement'),('suplemental','supplemental'),('suplied','supplied'),('suply','supply'),('supperior','superior'),('supplament','supplement'),('suppliment','supplement'),('supplyed','supplied'),('supposably','supposedly'),('suppossed','supposed'),('suppost','support'),('supposted','supposed'),('suppresion','suppression'),('supprise','surprise'),('supremist','supremacist'),('surelly','surely'),('surender','surrender'),('surpased','surpassed'),('surperb','superb'),('surplant','supplant'),('surroud','surround'),('surving','serving'),('survivng','surviving'),('susceptability','susceptibility'),('susceptable','susceptible'),('suscribe','subscribe'),('susequent','subsequent'),('sustance','substance'),('swaped','swapped'),('sweared','swore'),('Sweeden','Sweden'),('Sweedish','Swedish'),('sweeped','swept'),('sweept','swept'),('swimmed','swam'),('switchs','switches'),('swored','swore'),('symbolical','symbolic'),('symetric','symmetric'),('synchophant','sycophant'),('synchotron','synchrotron'),('syncophant','sycophant'),('synthetical','synthetic'),('syphillis','syphilis'),('syphon','siphon'),('sypnosis','synopsis'),('tableclothes','tablecloths'),('tablewear','tableware'),('tacticaly','tactically'),('tactict','tactic'),('tactitian','tactician'),('tailight','taillight'),('taked','taken'),('tamborine','tambourine'),('tandum','tandem'),('tared','tarred'),('tarif','tariff'),('tarrif','tariff'),('tarriff','tariff'),('tatics','tactics'),('tatoo','tattoo'),('teachs','teaches'),('teamate','teammate'),('teared','teared'),('techical','technical'),('techincal','technical'),('technicaly','technically'),('technican','technician'),('tecnology','technology'),('televison','television'),('televize','televise'),('telled','told'),('tempermental','temperamental'),('temporarely','temporarily'),('temprature','temperature'),('tenament','tenement'),('tendonitis','tendinitis'),('tennament','tenement'),('tennant','tenant'),('tennement','tenement'),('tenous','tenuous'),('tenticle','tentacle'),('tenture','tenure'),('teritory','territory'),('terminaly','terminally'),('terresterial','terrestrial'),('territores','territories'),('territority','territory'),('testamonial','testimonial'),('testamony','testimony'),('testical','testicle'),('testiment','testament'),('testimont','testament'),('Teusday','Tuesday'),('textes','texts'),('thay','they'),('theit','their'),('themself','herself'),('themsleves','themselves'),('theologan','theologian'),('theraphy','therapy'),('therfore','therefore'),('theroretical','theoretical'),('thickend','thickened'),('thiefs','thieves'),('thieve','thief'),('thingking','thinking'),('thoery','theory'),('thorogh','thorough'),('thoroghbred','thoroughbred'),('thoroughbread','thoroughbred'),('thourough','thorough'),('thouroughly','thoroughly'),('thousnad','thousand'),('threated','threatened'),('threath','threat'),('thridly','thirdly'),('throughfare','thoroughfare'),('throughtout','throughout'),('thrusted','thrust'),('tht','that'),('thte','the'),('Tiajuana','Tijuana'),('tipical','typical'),('togather','together'),('togeather','together'),('togther','together'),('tolerent','tolerant'),('toliet','toilet'),('tomorow','tomorrow'),('tooked','taken'),('tookover','took over'),('tooless','toolless'),('toriod','toroid'),('tornament','tournament'),('totaly','totally'),('tought','taught'),('tourmanent','tournament'),('tracklisting','track listing'),('tradgedy','tragedy'),('tradicional','traditional'),('traditon','tradition'),('traditonal','traditional'),('traing','training'),('traingle','triangle'),('trancend','transcend'),('trancended','transcended'),('trancendental','transcendental'),('traning','training'),('transcept','transept'),('transend','transcend'),('transferr','transfer'),('transister','transistor'),('transitionary','transitional'),('transluscent','translucent'),('transmision','transmission'),('transmited','transmitted'),('transmiter','transmitter'),('transmiting','transmitting'),('transmitt','transmit'),('trate','trait'),('treaded','trod'),('treshold','threshold'),('tresle','trestle'),('tresspass','trespass'),('triathalon','triathlon'),('tripple','triple'),('truley','truly'),('tryed','tried'),('trys','tries'),('tumultous','tumultuous'),('turkies','turkeys'),('Turky','Turkey'),('turnes','turns'),('turnstyle','turnstile'),('twelveth','twelfth'),('twentys','twenties'),('twitter','Twitter'),('type-casted','type-cast'),('u','you'),('ukelele','ukulele'),('Ukrane','Ukraine'),('ulitmate','ultimate'),('ultimatim','ultimatum'),('ultimatium','ultimatum'),('ultimatley','ultimately'),('ultimatly','ultimately'),('ultimitely','ultimately'),('un-official','unofficial'),('unabatted','unabated'),('unactive','inactive'),('unadvisable','inadvisable'),('unaminous','unanimous'),('unavalable','unavailable'),('unavaliable','unavailable'),('unavalible','unavailable'),('uncertian','uncertain'),('uncomplete','incomplete'),('unconclusive','inconclusive'),('unconscience','unconscious'),('unconspicuous','inconspicuous'),('unconventionaly','unconventionally'),('undecisive','indecisive'),('underated','underrated'),('undergound','underground'),('underlayed','underlaid'),('underlaying','underlying'),('underlied','underlay'),('underly','underlie'),('undermind','undermine'),('underming','undermining'),('underware','underwear'),('undestructible','indestructible'),('undevelopment','underdevelopment'),('undisputable','indisputable'),('undoubtably','undoubtedly'),('uneeded','unneeded'),('uneffected','unaffected'),('uneffective','ineffective'),('unequaly','unequally'),('unescapable','inescapable'),('unexpectantly','unexpectedly'),('unexpensive','inexpensive'),('unexplicable','inexplicable'),('unexplicit','inexplicit'),('unfamilar','unfamiliar'),('unfinnished','unfinished'),('unforseen','unforeseen'),('unfrequent','infrequent'),('unfrequently','infrequently'),('unhumane','inhumane'),('unifed','unified'),('unifrom','uniform'),('unindentified','unidentified'),('United Kingdon','United Kingdom'),('univerities','universities'),('universaly','universally'),('universites','universities'),('universties','universities'),('universty','university'),('unknow','unknown'),('unlikley','unlikely'),('unnemployment','unemployment'),('unofficialy','unofficially'),('unorthadox','unorthodox'),('unperiodic','aperiodic'),('unphased','unfazed'),('unplausible','implausible'),('unpolite','impolite'),('unpresidented','unprecedented'),('unprobable','improbable'),('unregular','irregular'),('unsatiable','insatiable'),('unsinged','unsigned'),('unstability','instability'),('unsual','unusual'),('unsued','ensued'),('unsufficient','insufficient'),('unti','unit'),('untied','united'),('untracable','untraceable'),('unusualy','unusually'),('unviable','inviable'),('unviel','unveil'),('unviersities','universities'),('unvisible','invisible'),('unware','unaware'),('upgarde','upgrade'),('upholded','upheld'),('upsetted','upset'),('upthe','up the'),('ur','your'),('useally','usually'),('usefullness','usefulness'),('usuall','usual'),('usully','usually'),('usurpate','usurp'),('ususal','usual'),('utalise','use'),('utalize','use'),('utilisied','used'),('utilites','utilities'),('utilizied','used'),('vacency','vacancy'),('vacent','vacant'),('vacume','vacuum'),('vairous','various'),('valient','valiant'),('vannish','vanish'),('varable','variable'),('vareity','variety'),('variaty','variety'),('varible','variable'),('varience','variance'),('varietes','varieties'),('varifiable','verifiable'),('varify','verify'),('varios','various'),('variosly','variously'),('varisty','varsity'),('varius','various'),('varous','various'),('varys','varies'),('vasodialation','vasodilation'),('vasodialator','vasodilator'),('vechicle','vehicle'),('vector-born','vector-borne'),('vegeterian','vegetarian'),('vegitarian','vegetarian'),('vehichle','vehicle'),('veign','vein'),('veiw','view'),('venacular','vernacular'),('Venezeula','Venezuela'),('Venezuala','Venezuela'),('Venezulea','Venezuela'),('verbage','verbiage'),('verbaly','verbally'),('verifed','verified'),('verifing','verifying'),('verion','version'),('versimilitude','verisimilitude'),('versitile','versatile'),('vertabra','vertebra'),('vertabrate','vertebrate'),('verticle','vertical'),('veted','vetted'),('veteren','veteran'),('vetinarian','veterinarian'),('vetinary','veterinary'),('vetran','veteran'),('vice-versa','vice versa'),('viceversa','vice versa'),('vicintity','vicinity'),('vicinty','vicinity'),('vigilent','vigilant'),('vilage','village'),('vilager','villager'),('villans','villains'),('villany','villainy'),('villianous','villainous'),('villified','vilified'),('vinegarette','vinaigrette'),('vinegrette','vinaigrette'),('vioce','voice'),('violance','violence'),('Virgina','Virgin'),('viri','viri'),('virii','viruses'),('virtural','virtual'),('visability','visibility'),('visist','visit'),('vison','vision'),('visonary','visionary'),('vist','visit'),('vistation','visitation'),('visualy','visually'),('vittel','vittle'),('voilation','violation'),('voilence','violence'),('voilent','violent'),('volation','violation'),('Volkswagon','Volkswagen'),('vulcanic','volcanic'),('want\'s','wants'),('was\'nt','was not'),('was was','was'),('watchs','watches'),('weakend','weakened'),('weared','wore'),('webcasted','webcast'),('webstie','website'),('weekened','weakened'),('weere','we\'re'),('wel','well'),('welcame','welcomed'),('wendsday','Wednesday'),('were\'nt','were not'),('werent','were not'),('wesbite','website'),('wether','weather'),('wheather','weather'),('wheelbarrel','wheelbarrow'),('whent','went'),('wherabouts','whereabouts'),('wherby','whereby'),('wherin','wherein'),('whilest','while'),('whinning','whining'),('whitch','which'),('wholely','wholly'),('wholistic','holistic'),('wholley','wholly'),('whon','whom'),('whos','who\'s'),('wifes','wife\'s'),('wifes\'','wife\'s'),('wihtin','within'),('wihtout','without'),('willingess','willingness'),('Wisconson','Wisconsin'),('wistle','whistle'),('witchs','witches'),('with with','with'),('withought','without'),('withthe','with the'),('witout','without'),('wittness','witness'),('womens\'','women\'s'),('wont','won\'t'),('worlwide','worldwide'),('worsten','worsen'),('worstening','worsening'),('would\'nt','would not'),('wouldnt','would not'),('wreckless','reckless'),('wrecklessness','recklessness'),('writed','writhed'),('writter','writer'),('writting','writing'),('wroten','written'),('ws','was'),('X-Box','Xbox'),('XBox','Xbox'),('ya\'ll','y\'all'),('yaht','yacht'),('yelow','yellow'),('yhe','the'),('younge','young'),('yuonger','younger'),('Ziegfield Follies','Ziegfeld Follies'),('a 100','100'),('a 1000','1000'),('a 11','an 11'),('a 11th','an 11th'),('a 18','an 18'),('a 1800','an 1800'),('a 18th','an 18th'),('a 18th century','an 18th-century'),('a 8','an 8'),('a 80','an 80'),('a 800','an 800'),('a 8000','an 8000'),('a 80th','an 80th'),('a 81','an 81'),('a 82','an 82'),('a 83','an 83'),('a 88','an 88'),('a 89','an 89'),('a 8th','an 8th'),('a abbreviation','an abbreviation'),('a ABC','an ABC'),('a absolute','an absolute'),('a abstract','an abstract'),('a accident','an accident'),('a accidental','an accidental'),('a account','an account'),('a acoustic','an acoustic'),('a acronym','an acronym'),('a acting','an acting'),('a action','an action'),('a active','an active'),('a actor','an actor'),('a actress','an actress'),('a actual','an actual'),('a additional','an additional'),('a advanced','an advanced'),('a affair','an affair'),('a affiliate','an affiliate'),('a African','an African'),('a album','an album'),('a Algerian','an Algerian'),('a alias','an alias'),('a alien','an alien'),('a alternative','an alternative'),('a AM','an AM'),('a amateur','an amateur'),('a American','an American'),('a amount','an amount'),('a an','a'),('a analog','an analog'),('a analogue','an analogue'),('a ancient','an ancient'),('a angle','an angle'),('a Angolan','an Angolan'),('a annual','an annual'),('a another','another'),('a answer','an answer'),('a antenna','an antenna'),('a anti','an anti'),('a Arabian','an Arabian'),('a Arabic','an Arabic'),('a Argentine','an Argentine'),('a Armenian','an Armenian'),('a Asian','an Asian'),('a assistant','an assistant'),('a associate','an associate'),('a ATM','an ATM'),('a Australian','an Australian'),('a Austrian','an Austrian'),('a average','an average'),('a back up','a back-up'),('a businesswomen','a businesswoman'),('a dominate','a dominant'),('a early','an early'),('a effective','an effective'),('a Egyptian','an Egyptian'),('a eight','an eight'),('a eighth','an eighth'),('a eighteen','an eighteen'),('a eighteenth','an eighteenth'),('a eighty','an eighty'),('a electric','an electric'),('a electronic','an electronic'),('a eleven','an eleven'),('a eleventh','an eleventh'),('a elite','an elite'),('a embedded','an embedded'),('a English','an English'),('a entire','an entire'),('a EP','an EP'),('a epic','an epic'),('a episode','an episode'),('a equal','an equal'),('a estimate','an estimate'),('a Ethiopian','an Ethiopian'),('a ethnic','an ethnic'),('a example','an example'),('a extra','an extra'),('a FM','an FM'),('a impact','an impact'),('a independent','an independent'),('a Indian','an Indian'),('a individual','an individual'),('a Indonesian','an Indonesian'),('a indoor','an indoor'),('a information','an information'),('a initiative','an initiative'),('a intelligent','an intelligent'),('a interesting','an interesting'),('a interim','an interim'),('a interior','an interior'),('a intermediate','an intermediate'),('a international','an international'),('a Internet','an Internet'),('a intersection','an intersection'),('a interview','an interview'),('a introduction','an introduction'),('a Iranian','an Iranian'),('a Iraqi','an Iraqi'),('a Irish','an Irish'),('a iron','an iron'),('a island','an island'),('a Israeli','an Israeli'),('a issue','an issue'),('a Italian','an Italian'),('a line up','a line-up'),('a lock out','a lockout'),('a lose','a loss'),('a manufacture','a manufacturer'),('a match up','a match-up'),('a MLB','an MLB'),('a MRI','an MRI'),('a NBA','an NBA'),('a NBC','an NBC'),('a NFL','an NFL'),('a ocean','an ocean'),('a offensive','an offensive'),('a official','an official'),('a Ohio','an Ohio'),('a oil','an oil'),('a old','an old'),('a Olympic','an Olympic'),('a online','an online'),('a only','an only'),('a only a','only a'),('a open','an open'),('a opinion','an opinion'),('a opposite','an opposite'),('a organization','an organization'),('a original','an original'),('a other','an other'),('a outbreak','an outbreak'),('a outdoor','an outdoor'),('a outside','an outside'),('a overtime','an overtime'),('a owner','an owner'),('a run in','a run-in'),('a set back','a set-back'),('a set up','a setup'),('a several','several'),('a toss up','a toss-up'),('a two months','a two-month'),('a ultimate','an ultimate'),('a undercover','an undercover'),('a underground','an underground'),('a unfortunate','an unfortunate'),('a unusual','an unusual'),('a upgrade','an upgrade'),('a upper','an upper'),('a urban','an urban'),('a work out','a workout'),('Aboriginal decent','Aboriginal descent'),('AC current','AC'),('ACL ligament','ACL'),('affect on','effect on'),('affect upon','effect upon'),('affects of','effects of'),('African decent','African descent'),('after been','after being'),('against who','against whom'),('ago since','since'),('airplane hanger','airplane hangar'),('Albanian decent','Albanian descent'),('all it\'s','all its'),('all though','although'),('all tolled','all told'),('alma matter','alma mater'),('along side','alongside'),('alongside with','along with'),('alter boy','altar boy'),('alter server','altar server'),('AM in the morning','AM'),('American decent','American descent'),('an affect','an effect'),('an another','another'),('an British','a British'),('an Canadian','a Canadian'),('an European','a European'),('an half','a half'),('an halt','a halt'),('an hand','a hand'),('an head','a head'),('an heart','a heart'),('an hero','a hero'),('an high','a high'),('an new','a new'),('an nine','a nine'),('an number','a number'),('an other','another'),('an Scottish','a Scottish'),('an seven','a seven'),('an six','a six'),('an statement','a statement'),('an ten','a ten'),('an unit','a unit'),('an Unix','a Unix'),('an USB','a USB'),('and etc','etc'),('another wise','an otherwise'),('any another','another'),('any where','anywhere'),('apart for','apart from'),('apart form','apart from'),('are been','are being'),('are build','are built'),('are can','can'),('are drew','are drawn'),('are have','have'),('are lain','are laid'),('are meet','are met'),('are renown','are renowned'),('are were','are'),('Armenian decent','Armenian descent'),('as back up','as backup'),('as been','as being'),('as followed','as follows'),('Asian decent','Asian descent'),('aside form','aside from'),('ATM machine','ATM'),('away form','away from'),('back drop','backdrop'),('back fire','backfire'),('back peddle','backpedal'),('back round','background'),('badly effected','badly affected'),('baited breath','bated breath'),('barb wire','barbed wire'),('based off','based on'),('based out of','based in'),('basic principal','basic principle'),('be build','be built'),('be cause','because'),('be drew','be drawn'),('be it\'s','be its'),('be lain','be laid'),('be ran','be run'),('be rode','be ridden'),('be ware','beware'),('became know','became known'),('became to be','became'),('because of the fact that','because'),('been a while','been awhile'),('been build','been built'),('been it\'s','been its'),('been know','been known'),('been lain','been laid'),('been ran','been run'),('been rode','been ridden'),('before hand','beforehand'),('before it\'s','before its'),('behind it\'s','behind its'),('being build','being built'),('being giving','been giving'),('being it\'s','being its'),('being lain','being laid'),('being ran','being run'),('being rode','being ridden'),('being taking','been taking'),('below it\'s','below its'),('beneath it\'s','beneath its'),('beside it\'s','beside its'),('besides it\'s','besides its'),('better that','better than'),('better then','better than'),('between it\'s','between its'),('beyond it\'s','beyond its'),('Bogota, Columbia','Bogotá'),('both it\'s','both its'),('brake away','break away'),('breath fire','breathe fire'),('brew haha','brouhaha'),('Brinsley Schwartz','Brinsley Schwarz'),('by fisherman','by fishermen'),('by it\'s','by its'),('by who\'s','by whose'),('can been','can be'),('Canadian decent','Canadian descent'),('card shark','card sharp'),('Caribbean decent','Caribbean descent'),('certain extend','certain extent'),('chalk full','chock-full'),('Chinese decent','Chinese descent'),('chocked full','chock-full'),('close proximity','closeness'),('comprise of','comprise'),('comprised of','composed of'),('comprises entirely of','comprises entirely'),('comprises exclusively of','comprises exclusively'),('comprises mainly of','comprises mainly'),('comprises mostly of','comprises mostly'),('comprises of','comprises'),('comprises only of','comprises only'),('comprising chiefly of','comprising chiefly'),('comprising entirely of','comprising entirely'),('comprising exclusively of','comprising exclusively'),('comprising generally of','comprising generally'),('comprising largely of','comprising largely'),('comprising mainly of','comprising mainly'),('comprising mostly of','comprising mostly'),('comprising of','comprising'),('comprising only of','comprising only'),('comprising primarily of','comprising primarily'),('comprising principally of','comprising principally'),('comprising solely of','comprising solely'),('comprising totally of','comprising totally'),('comprising wholly of','comprising wholly'),('compromised of','comprised'),('consists a','comprises a'),('consists an','comprises an'),('constellation prize','consolation prize'),('constituted of','consisted of'),('constitutes of','consists of'),('constituting of','consisting of'),('construction sight','construction site'),('contains of','consists of'),('could been','could be'),('could spent','could spend'),('currently at this time','currently'),('daily regiment','daily regimen'),('DC current','DC'),('de factor','de facto'),('de rigor','de rigueur'),('death nail','death knell'),('deciding on how','deciding how'),('despite of','despite'),('did gave','did give'),('didn\'t fair','didn\'t fare'),('didn\'t had','didn\'t have'),('different tact','different tack'),('different to','different from'),('direct affect','direct effect'),('disc break','disc brake'),('do to','due to'),('doe snot','does not'),('dominate player','dominant player'),('dominate role','dominant role'),('door jam','door jamb'),('double header','doubleheader'),('down side','downside'),('due to the fact','because'),('during from','during'),('during in','during'),('during of','during'),('during to','during'),('each are','each is'),('easier then','easier than'),('egg yoke','egg yolk'),('either criteria','either criterion'),('either phenomena','either phenomenon'),('electrical current','electric current'),('eluded to','alluded to'),('en mass','en masse'),('even thought','even though'),('even tough','even though'),('eye brow','eyebrow'),('eye lash','eyelash'),('eye lid','eyelid'),('eye sight','eyesight'),('eye sore','eyesore'),('faired badly','fared badly'),('faired better','fared better'),('faired far','fared far'),('faired less','fared less'),('faired little','fared little'),('faired much','fared much'),('faired poorly','fared poorly'),('faired quite','fared quite'),('faired rather','fared rather'),('faired slightly','fared slightly'),('faired somewhat','fared somewhat'),('faired well','fared well'),('faired worse','fared worse'),('farther then','farther than'),('faster then','faster than'),('figure head','figurehead'),('first and foremost','first'),('flag ship','flagship'),('flow of current','current'),('flow of electric current','electric current'),('follow suite','follow suit'),('for it\'s','for its'),('forgone conclusion','foregone conclusion'),('for who','for whom'),('forth place','fourth place'),('free reign','free rein'),('freshman are','freshman is'),('from who','from whom'),('get pass','get past'),('going threw','going through'),('got ran','got run'),('ground work','groundwork'),('guest stared','guest-starred'),('had arose','had arisen'),('had awoke','had awoken'),('had be','had been'),('had became','had become'),('had began','had begun'),('had being','had been'),('had ben','had been'),('had bore','had borne'),('had broke','had broken'),('had brung','had brought'),('had came','had come'),('had chose','had chosen'),('had did','had done'),('had drank','had drunk'),('had drew','had drawn'),('had drove','had driven'),('had fell','had fallen'),('had flew','had flown'),('had forbad','had forbidden'),('had forbade','had forbidden'),('had gave','had given'),('had grew','had grown'),('had it\'s','had its'),('had knew','had known'),('had know','had known'),('had meet','had met}'),('had mislead','had misled'),('had overcame','had overcome'),('had overran','had overrun'),('had overtook','had overtaken'),('had plead','had pleaded'),('had ran','had run'),('had rang','had rung'),('had rode','had ridden'),('had rose','had risen'),('had sang','had sung'),('had saw','had seen'),('had send','had sent'),('had shook','had shaken'),('had sit','had sat'),('had sowed','had sown'),('had spend','had spent'),('had spoke','had spoken'),('had sprang','had sprung'),('had swam','had swum'),('had threw','had thrown'),('had throve','had thriven'),('had thunk','had thought'),('had took','had taken'),('had tore','had torn'),('had trod','had trodden'),('had undertook','had undertaken'),('had underwent','had undergone'),('had went','had gone'),('had woke','had woken'),('had wore','had worn'),('had wrote','had written'),('hadn\'t went','hadn\'t gone'),('has arose','has arisen'),('has awoke','has awoken'),('has be','has been'),('has became','became'),('has began','began'),('has being','as being'),('has ben','has been'),('has bore','has borne'),('has broke','has broken'),('has brung','has brought'),('has build','has built'),('has came','has come'),('has chose','has chosen'),('has did','has done'),('has drank','has drunk'),('has drew','has drawn'),('has drove','has driven'),('has fell','has fallen'),('has flew','has flown'),('has forbad','has forbidden'),('has forbade','has forbidden'),('has gave','has given'),('has gives','gives'),('has it\'s','has its'),('has knew','has known'),('has know','has known'),('has meet','has met}'),('has mislead','has misled'),('has overcame','has overcome'),('has plays','has played'),('has plead','has pleaded'),('has ran','has run'),('has rang','has rung'),('has sang','has sung'),('has shook','has shaken'),('has spoke','has spoken'),('has sprang','has sprung'),('has swam','has swum'),('has threw','has thrown'),('has throve','has thrived'),('has thunk','has thought'),('has took','has taken'),('has trod','has trodden'),('has undertook','has undertaken'),('has underwent','has undergone'),('has was','he was'),('has went','has gone'),('has woke','has woken'),('has wrote','has written'),('have drank','have drunken'),('have it\'s','have its'),('have mislead','have misled}'),('have ran','have run'),('have rang','have rung'),('have sang','have sung'),('have sprang','have sprung'),('have swam','have swum'),('have took','have taken'),('have underwent','have undergone'),('have went','have gone'),('having became','having become'),('having began','having begun'),('having being','having been'),('having it\'s','having its'),('having ran','having run'),('having sang','having sung'),('having swam','having swum'),('having took','having taken'),('having underwent','having undergone'),('having went','having gone'),('hay day','heyday'),('he begun','he began'),('he garnished','he garnered'),('he let\'s','he lets'),('he plead','he pleaded'),('he seen','he saw'),('he\'s drank','he\'s drunk'),('head gear','headgear'),('head quarters','headquarters'),('head stone','headstone'),('head wear','headwear'),('house hold','household'),('how ever','however'),('imminent domain','eminent domain'),('in affect','in effect'),('in close proximity to','close to'),('in masse','en masse'),('in parenthesis','in parentheses'),('in placed','in place'),('in principal','in principle'),('in stead of','instead of'),('in tact','intact'),('in titled','entitled'),('in vein','in vain'),('into affect','into effect'),('into to','into'),('is also is','is also'),('is are','are'),('is be','is'),('is been','has been'),('is comprised from','comprises'),('is comprised with','comprises'),('is compromised of','comprises'),('is contained of','contains'),('is does','does'),('is has','has'),('is know','is known'),('is the fact that','is that'),('is wants','wants'),('is was','it was'),('is were','is where'),('it begun','it began'),('it comprises of','it comprises'),('it effect','its effect'),('it lied','it lay'),('it self','itself'),('it spend','it spent'),('it weighted','it weighed'),('it weights','it weighs'),('it\'s 100th','its 100th'),('it\'s 10th','its 10th'),('it\'s 11th','its 11th'),('it\'s 125th','its 125th'),('it\'s 12th','its 12th'),('it\'s 13th','its 13th'),('it\'s 14th','its 14th'),('it\'s 150th','its 150th'),('it\'s 15th','its 15th'),('it\'s 16th','its 16th'),('it\'s 17th','its 17th'),('it\'s 18th','its 18th'),('it\'s 19th','its 19th'),('it\'s 1st','its 1st'),('it\'s 2','its two'),('it\'s 2000th','its 2000th'),('it\'s 200th','its 200th'),('it\'s 20th','its 20th'),('it\'s 21st','its 21st'),('it\'s 25th','its 25th'),('it\'s 2nd','its 2nd'),('it\'s 300th','its 300th'),('it\'s 30th','its 30th'),('it\'s 3rd','its 3rd'),('it\'s 400th','its 400th'),('it\'s 40th','its 40th'),('it\'s 4th','its 4th'),('it\'s 500th','its 500th'),('it\'s 50th','its 50th'),('it\'s 5th','its 5th'),('it\'s 6','its six'),('it\'s 60th','its 60th'),('it\'s 6th','its 6th'),('it\'s 70th','its 70th'),('it\'s 7th','its 7th'),('it\'s 80th','its 80th'),('it\'s 8th','its 8th'),('it\'s 90th','its 90th'),('it\'s 9th','its 9th'),('it\'s A-side','its A-side'),('it\'s ability','its ability'),('it\'s advantage','its advantage'),('it\'s aim','its aim'),('it\'s anniversary','its anniversary'),('it\'s annual','its annual'),('it\'s appearance','its appearance'),('it\'s B-side','its B-side'),('it\'s best','its best'),('it\'s capital','its capital'),('it\'s class','its class'),('it\'s closure','its closure'),('it\'s construction','its construction'),('it\'s contents','its contents'),('it\'s course','its course'),('it\'s current','its current'),('it\'s debut','its debut'),('it\'s doors','its doors'),('it\'s eastern','its eastern'),('it\'s end','its end'),('it\'s entire','its entire'),('it\'s entirety','its entirety'),('it\'s final','its final'),('it\'s first','its first'),('it\'s former','its former'),('it\'s fourth','its fourth'),('it\'s goal','its goal'),('it\'s highest','its highest'),('it\'s history','its history'),('it\'s home','its home'),('it\'s identity','its identity'),('it\'s inability','its inability'),('it\'s inception','its inception'),('it\'s initial','its initial'),('it\'s international','its international'),('it\'s junction','its junction'),('it\'s kind','its kind'),('it\'s lack','its lack'),('it\'s last','its last'),('it\'s latest','its latest'),('it\'s lead','its lead'),('it\'s leader','its leader'),('it\'s length','its length'),('it\'s lowest','its lowest'),('it\'s main','its main'),('it\'s major','its major'),('it\'s maximum','its maximum'),('it\'s minimum','its minimum'),('it\'s money','its money'),('it\'s name','its name'),('it\'s national','its national'),('it\'s north','its north'),('it\'s northern','its northern'),('it\'s original','its original'),('it\'s own','its own'),('it\'s peak','its peak'),('it\'s performance','its performance'),('it\'s period','its period'),('it\'s popularity','its popularity'),('it\'s population','its population'),('it\'s previous','its previous'),('it\'s price','its price'),('it\'s primary','its primary'),('it\'s prime','its prime'),('it\'s purpose','its purpose'),('it\'s release','its release'),('it\'s residents','its residents'),('it\'s rival','its rival'),('it\'s second','its second'),('it\'s sister','its sister'),('it\'s size','its size'),('it\'s source','its source'),('it\'s south','its south'),('it\'s southern','its southern'),('it\'s status','its status'),('it\'s style','its style'),('it\'s subsidiary','its subsidiary'),('it\'s successor','its successor'),('it\'s tail','its tail'),('it\'s target','its target'),('it\'s team','its team'),('it\'s tenth','its tenth'),('it\'s test','its test'),('it\'s theme','its theme'),('it\'s third','its third'),('it\'s timeslot','its timeslot'),('it\'s toll','its toll'),('it\'s total','its total'),('it\'s type','its type'),('it\'s usage','its usage'),('it\'s user','its user'),('it\'s value','its value'),('it\'s way','its way'),('it\'s website','its website'),('it\'s weight','its weight'),('it\'s western','its western'),('it\'s width','its width'),('it\'s worst','its worst'),('it\'s yearly','its yearly'),('its is','it is'),('Jimmy Buffet','Jimmy Buffett'),('Jimmy Hendrix','Jimi Hendrix'),('jive with','jibe with'),('key note','keynote'),('lack there of','lack thereof'),('laid ahead','lay ahead'),('laid dormant','lay dormant'),('laid empty','lay empty'),('laid fallow','lay fallow'),('larger that','larger than'),('larger then','larger than'),('laughing stock','laughingstock'),('law suite','lawsuit'),('lay around','lie around'),('lay low','lie low'),('laying around','lying around'),('laying awake','lying awake'),('laying low','lying low'),('lays atop','lies atop'),('lays beside','lies beside'),('lays in','lies in'),('lays low','lies low'),('lays near','lies near'),('lays on','lies on'),('lead by','led by'),('lead roll','lead role'),('leading roll','leading role'),('less dominate','less dominant'),('less that','less than'),('less then','less than'),('lesser then','less than'),('life time','lifetime'),('lighter then','lighter than'),('lions share','lion\'s share'),('loose to','lose to'),('loosing effort','losing effort'),('loosing record','losing record'),('loosing season','losing season'),('loosing streak','losing streak'),('loosing team','losing team'),('loosing the','losing the'),('loosing to','losing to'),('lot\'s of','lots of'),('lower that','lower than'),('lower then','lower than'),('made it\'s','made its'),('major roll','major role'),('make it\'s','make its'),('managerial reigns','managerial reins'),('mash potatoes','mashed potatoes'),('may been','may be'),('maybe be','may be'),('MCL ligament','MCL'),('mean while','meanwhile'),('might of','might have'),('Milwaukee, Oregon','Milwaukie'),('minor roll','minor role'),('more dominate','more dominant'),('more optimal','better'),('more that','more than'),('most dominate','most dominant'),('most optimal','best'),('most populace','most populous'),('most well-known','best-known'),('must of','must have'),('mute point','moot point'),('nation wide','nationwide'),('near by','nearby'),('neither criteria','neither criterion'),('neither phenomena','neither phenomenon'),('Netherland Antilles','Netherlands Antilles'),('new comer','newcomer'),('next store','next-door'),('no where to','nowhere to'),('note worthy','noteworthy'),('now a days','nowadays'),('of been','have been'),('oil barron','oil baron'),('on going','ongoing'),('on-going','ongoing'),('one criteria','one criterion'),('one phenomena','one phenomenon'),('originally born in','born in'),('other then','other than'),('out grow','outgrow'),('out side','outside'),('over a 100','over 100'),('over a 1000','over 1000'),('over hear','overhear'),('over heard','overheard'),('over look','overlook'),('over looked','overlooked'),('over looking','overlooking'),('over rated','overrated'),('over seas','overseas'),('over see','oversee'),('parent\'s house','parents\' house'),('past away','passed away'),('past down','passed down'),('per say','per se'),('Phillips Arena','Philips Arena'),('player\'s union','players\' union'),('playoff birth','playoff berth'),('premier episode','premiere episode'),('principle action','principal action'),('principle activity','principal activity'),('principle actor','principal actor'),('principle advantage','principal advantage'),('principle advocate','principal advocate'),('principle agent','principal agent'),('principle aim','principal aim'),('principle area','principal area'),('principle artist','principal artist'),('principle assistant','principal assistant'),('principle attraction','principal attraction'),('principle author','principal author'),('principle branch','principal branch'),('principle cast','principal cast'),('principle caste','principal caste'),('principle cause','principal cause'),('principle character','principal character'),('principle church','principal church'),('principle city','principal city'),('principle component','principal component'),('principle composer','principal composer'),('principle goal','principal goal'),('principle group','principal group'),('principle method','principal method'),('principle office','principal office'),('principle officer','principal officer'),('principle owner','principal owner'),('principle photography','principal photography'),('principle source','principal source'),('principle student','principal student'),('principle town','principal town'),('put fourth','put forth'),('rather then','rather than'),('reek havoc','wreak havoc'),('reign in','rein in'),('\"reigned in\"','reined in'),('reticence to','reluctance to'),('reticent to','reluctant to'),('role call','roll call'),('roll player','role player'),('runner up','runner-up'),('set backs','setbacks'),('she begun','she began'),('she let\'s','she lets'),('she seen','she saw'),('she weighted','she waited'),('short coming','shortcoming'),('shorter then','shorter than'),('shortly there after','shortly thereafter'),('should been','should be'),('should of','should have'),('side affect','side effect'),('side kick','sidekick'),('since it\'s','since its'),('since years','for years'),('single handily','single-handedly'),('site lines','sight lines'),('slue of','slew of'),('smaller then','smaller than'),('smarter then','smarter than'),('sneak peak','sneak peek'),('some how','somehow'),('some one','someone'),('some what','somewhat'),('some where','somewhere'),('soon there after','soon thereafter'),('sooner then','sooner than'),('sophomore album','second album'),('sophomore single','second single'),('spilt among','split among'),('spilt between','split between'),('spilt into','split into'),('spilt up','split up'),('spinal chord','spinal cord'),('split in to','split into'),('sq ft','ft²'),('sq in','in²'),('sq km','km²'),('sq mi','mi²'),('squared feet','square feet'),('squared inches','square inches'),('squared kilometers','square kilometers'),('squared meters','square meters'),('squared miles','square miles'),('stale mate','stalemate'),('staring role','starring role'),('starring roll','starring role'),('stay a while','stay awhile'),('strike outs','strikeouts'),('stronger then','stronger than'),('suppose to','supposed to'),('take affect','take effect'),('teacher\'s union','teachers\' union'),('that\'s is','that is'),('that\'s was','that was'),('the 1st of January','1 January'),('the are','that are'),('the began','that began'),('the both the','both the'),('the had','that had'),('the have','that have'),('the he was','he was'),('the one\'s','one\'s'),('the only the','only the'),('the their','their'),('the them','them'),('the these','these'),('the this','this'),('the those','those'),('the was','that was'),('the went','then went'),('the were','they were'),('the where the','where the'),('their are','there are'),('their had','there had'),('their has','there has'),('their have','there have'),('their is','there is'),('their was','there was'),('their were','there were'),('their would','there would'),('them selves','themselves'),('there after','thereafter'),('there best','their best'),('there by','thereby'),('there fifth','their fifth'),('there final','their final'),('there first','their first'),('there fore','therefore'),('there fourth','their fourth'),('there last','their last'),('there new','their new'),('there next','their next'),('there of','thereof'),('there only','their only'),('there own','their own'),('there second','their second'),('there third','their third'),('there where','there were'),('there\'s are','theirs are'),('there\'s is','theirs is'),('there\'s was','theirs was'),('these where','these were'),('they begun','they began'),('they garnished','they garnered'),('they includes','they include'),('they is','there is'),('they past','the past'),('they player','the player'),('they way','the way'),('they weight','they weigh'),('they where','they wear'),('they\'re are','there are'),('through out','threw out'),('time outs','timeouts'),('to back fire','to backfire'),('to bath','to bathe'),('to breath','to breathe'),('to built','to build'),('to chose','to choose'),('to forego','to forgo'),('to grown','to grow'),('to lit','to light'),('to they\'re','to their'),('to together','together'),('too also','also'),('too be','to be'),('took a while','took awhile'),('took affect','took effect'),('tot he','to the'),('under go','undergo'),('under going','undergoing'),('under gone','undergone'),('under it\'s','under its'),('under rated','underrated'),('under take','undertake'),('under wear','underwear'),('under went','underwent'),('underneath it\'s','underneath its'),('United State\'s','United States\''),('United Stats','United States'),('Unites States','United States'),('unlike it\'s','unlike its'),('until it\'s','until its'),('up field','upfield'),('up it\'s','up its'),('up side','upside'),('up until','until'),('upon it\'s','upon its'),('USD dollars','US dollars'),('USD$','US dollars'),('very minimal','minimal'),('very optimal','optimal'),('very unique','unique'),('via it\'s','via its'),('vise versa','vice versa'),('vocal chords','vocal cords'),('waived off','waved off'),('warn away','worn away'),('warn down','worn down'),('warn out','worn out'),('was aloud','was allowed'),('was be','was'),('was been','has been'),('was began','was begun'),('was build','was built'),('was comprised by','comprised'),('was comprised from','comprised'),('was comprised of','comprised'),('was comprised with','comprised'),('was drank','was drunk'),('was drew','drew'),('was flew','flew'),('was grew','grew'),('was had','had'),('was has','has'),('was is','is'),('was it\'s','was its'),('was knew','was known'),('was know','was known'),('was lain','was laid'),('was mislead','was misled'),('was ran','was run'),('was rebuild','was rebuilt'),('was release','was released'),('was reran','was rerun'),('was rode','was ridden'),('was sang','was sung'),('was send','was sent'),('was shook','was shaken'),('was shoot','was shot'),('was showed','was shown'),('was spend','was spent'),('was took','was taken'),('was tore','was torn'),('was wrote','was written'),('way side','wayside'),('well know','well known'),('went rouge','went rogue'),('went threw','went through'),('were aloud','were allowed'),('were be','were'),('were been','had been'),('were began','were begun'),('were build','were built'),('were comprised from','comprised'),('were comprised of','comprised'),('were comprised with','comprised'),('were drew','were drawn'),('were it\'s','were its'),('were knew','were known'),('were know','were known'),('were lain','were laid'),('were overran','were overrun'),('were ran','were run'),('were rebuild','were rebuilt'),('were reran','were rerun'),('were rode','were ridden'),('were sang','were sung'),('were showed','were shown'),('were spend','were spent'),('were took','were taken'),('were tore','were torn'),('were wrote','were written'),('what lied','what lay'),('when ever','whenever'),('when into','went into'),('when off','went off'),('where abouts','whereabouts'),('where as','whereas'),('where being','were being'),('where by','whereby'),('where him','where he'),('where made','were made'),('where taken','were taken'),('where upon','whereupon'),('where won','were won'),('whereas as','whereas'),('which comprised of','which comprised'),('which in which','in which'),('which where','which were'),('while him','while he'),('who lead','who led'),('who where','who were'),('who\'s brother','whose brother'),('who\'s father','whose father'),('who\'s first','whose first'),('who\'s last','whose last'),('who\'s mother','whose mother'),('who\'s name','whose name'),('who\'s opinion','whose opinion'),('who\'s own','whose own'),('who\'s parents','whose parents'),('who\'s previous','whose previous'),('who\'s team','whose team'),('who\'s title','whose title'),('who\'s was','who was'),('whom also','who also'),('whom is','who is'),('whom was','who was'),('whose its','whose'),('whose was','which was'),('wildcard birth','wildcard berth'),('will been','will be'),('will being','will begin'),('will likely','will probably'),('will took part','took part'),('will took place','took place'),('with a the','with a'),('with along with','along with'),('with be','will be'),('with he','when he'),('with in','within'),('with out','without'),('with she','when she'),('with who','with whom'),('within close proximity to','close to'),('within in','in'),('Wold War','World War'),('world wide','worldwide'),('worse-case scenario','worst-case scenario'),('worth while','worthwhile'),('would been','would be'),('would fair','would fare'),('would forego','would forgo'),('would won','won'),('would\'ve have','would have'),('wreck havoc','wreak havoc'),('younger that','younger than'),('younger then','younger than'),('April 1st','April 1'),('April 2nd','April 2'),('April 30th','April 30'),('April 31st','April 31'),('April of','April'),('August 1st','August 1'),('August 31st','August 31'),('December 1st','December 1'),('December 31st','December 31'),('February 1st','February 1'),('February 28th','February 28'),('February 29th','February 29'),('February of','February'),('January 1st','January 1'),('January 31st','January 31'),('July 1st','July 1'),('July 31st','July 31'),('June 1st','June 1'),('June 30th','June 30'),('June 31st','June 31'),('March 1st','March 1'),('March 31st','March 31'),('May 1st','May 1'),('May 31st','May 31'),('May of','May'),('November 1st','November 1'),('November 30th','November 30'),('November 31st','November 31'),('September 1st','September 1'),('September 30th','September 30'),('September 31st','September 31'),('the 31st of December','31 December'),('a a','a'),('about about','about'),('above above','above'),('across across','across'),('after after','after'),('against against','against'),('along along','along'),('also also','also'),('an an','an'),('and and','and'),('any any','and any'),('are are','are'),('around around','around'),('as as','as'),('at at','at'),('back back','back'),('be be','be'),('became became','became'),('because because','because'),('become become','become'),('been been','been'),('before before','before'),('behind behind','behind'),('being being','being'),('below below','below'),('between between','between'),('both both','both'),('but but','but'),('by by','by'),('could could','could'),('during during','during'),('each each','each'),('every every','every'),('first first','first'),('for for','for'),('from from','from'),('has has','has'),('have have','have'),('having having','having'),('he he','he'),('held held','held'),('her her','her'),('his his','his'),('if if','if'),('include include','include'),('included included','included'),('including including','including'),('includes includes','includes'),('into into','into'),('is is','is'),('its its','is its'),('last last','last'),('most most','most'),('of of','of'),('on on','on'),('only only','only'),('over over','of over'),('played played','played'),('she she','she'),('should should','should'),('since since','since'),('some some','some'),('still still','still'),('such such','such'),('than than','than'),('the the','the'),('their their','their'),('them them','them'),('then then','then'),('there there','there'),('these these','these'),('they they','they'),('this this','this'),('those those','those'),('through through','through'),('to to','to'),('under under','under'),('until until','until'),('up to up to','up to'),('we we','we'),('were were','were'),('when when','when'),('where where','where'),('whether whether','whether'),('which which','which'),('while while','while'),('who who','who'),('whom whom','whom'),('whose whose','whose'),('will will','will'),('would would','would'),('a the','a'),('an a','a'),('an and','and'),('twentyone','twenty-one'),('twenty one','twenty-one'),('twentytwo','twenty-two'),('twenty two','twenty-two'),('twentythree','twenty-three'),('twenty three','twenty-three'),('twentyfour','twenty-four'),('twenty four','twenty-four'),('twentyfive','twenty-five'),('twenty five','twenty-five'),('twentysix','twenty-six'),('twenty six','twenty-six'),('twentyseven','twenty-seven'),('twenty seven','twenty-seven'),('twentyeight','twenty-eight'),('twenty eight','twenty-eight'),('twentynine','twenty-nine'),('twenty nine','twenty-nine'),('thirtyone','thirty-one'),('thirty one','thirty-one'),('thirtytwo','thirty-two'),('thirty two','thirty-two'),('thirtythree','thirty-three'),('thirty three','thirty-three'),('thirtyfour','thirty-four'),('thirty four','thirty-four'),('thirtyfive','thirty-five'),('thirty five','thirty-five'),('thirtysix','thirty-six'),('thirty six','thirty-six'),('thirtyseven','thirty-seven'),('thirty seven','thirty-seven'),('thirtyeight','thirty-eight'),('thirty eight','thirty-eight'),('thirtynine','thirty-nine'),('thirty nine','thirty-nine'),('fortyone','forty-one'),('forty one','forty-one'),('fortytwo','forty-two'),('forty two','forty-two'),('fortythree','forty-three'),('forty three','forty-three'),('fortyfour','forty-four'),('forty four','forty-four'),('fortyfive','forty-five'),('forty five','forty-five'),('fortysix','forty-six'),('forty six','forty-six'),('fortyseven','forty-seven'),('forty seven','forty-seven'),('fortyeight','forty-eight'),('forty eight','forty-eight'),('fortynine','forty-nine'),('forty nine','forty-nine'),('fiftyone','fifty-one'),('fifty one','fifty-one'),('fiftytwo','fifty-two'),('fifty two','fifty-two'),('fiftythree','fifty-three'),('fifty three','fifty-three'),('fiftyfour','fifty-four'),('fifty four','fifty-four'),('fiftyfive','fifty-five'),('fifty five','fifty-five'),('fiftysix','fifty-six'),('fifty six','fifty-six'),('fiftyseven','fifty-seven'),('fifty seven','fifty-seven'),('fiftyeight','fifty-eight'),('fifty eight','fifty-eight'),('fiftynine','fifty-nine'),('fifty nine','fifty-nine'),('sixtyone','sixty-one'),('sixty one','sixty-one'),('sixtytwo','sixty-two'),('sixty two','sixty-two'),('sixtythree','sixty-three'),('sixty three','sixty-three'),('sixtyfour','sixty-four'),('sixty four','sixty-four'),('sixtyfive','sixty-five'),('sixty five','sixty-five'),('sixtysix','sixty-six'),('sixty six','sixty-six'),('sixtyseven','sixty-seven'),('sixty seven','sixty-seven'),('sixtyeight','sixty-eight'),('sixty eight','sixty-eight'),('sixtynine','sixty-nine'),('sixty nine','sixty-nine'),('seventyone','seventy-one'),('seventy one','seventy-one'),('seventytwo','seventy-two'),('seventy two','seventy-two'),('seventythree','seventy-three'),('seventy three','seventy-three'),('seventyfour','seventy-four'),('seventy four','seventy-four'),('seventyfive','seventy-five'),('seventy five','seventy-five'),('seventysix','seventy-six'),('seventy six','seventy-six'),('seventyseven','seventy-seven'),('seventy seven','seventy-seven'),('seventyeight','seventy-eight'),('seventy eight','seventy-eight'),('seventynine','seventy-nine'),('seventy nine','seventy-nine'),('eightyone','eighty-one'),('eighty one','eighty-one'),('eightytwo','eighty-two'),('eighty two','eighty-two'),('eightythree','eighty-three'),('eighty three','eighty-three'),('eightyfour','eighty-four'),('eighty four','eighty-four'),('eightyfive','eighty-five'),('eighty five','eighty-five'),('eightysix','eighty-six'),('eighty six','eighty-six'),('eightyseven','eighty-seven'),('eighty seven','eighty-seven'),('eightyeight','eighty-eight'),('eighty eight','eighty-eight'),('eightynine','eighty-nine'),('eighty nine','eighty-nine'),('ninetyone','ninety-one'),('ninety one','ninety-one'),('ninetytwo','ninety-two'),('ninety two','ninety-two'),('ninetythree','ninety-three'),('ninety three','ninety-three'),('ninetyfour','ninety-four'),('ninety four','ninety-four'),('ninetyfive','ninety-five'),('ninety five','ninety-five'),('ninetysix','ninety-six'),('ninety six','ninety-six'),('ninetyseven','ninety-seven'),('ninety seven','ninety-seven'),('ninetyeight','ninety-eight'),('ninety eight','ninety-eight'),('ninetynine','ninety-nine'),('ninety nine','ninety-nine')]
data = {}
for item in raw:
if len(re.split('[^a-zA-Z]', item[0])) == 1 and len(re.split('[^a-zA-Z]', item[1])) == 1:
if item[1].lower() == item[0].lower():
continue
if not data.__contains__(item[1].lower()):
data[item[1].lower()] = []
data[item[1].lower()].append(item[0].lower())
json_data = []
for key in data.keys():
json_data.append(
{
'input': data[key],
'target': key
}
)
print (json_data)
with open('test.json', 'w+') as f:
f.write(json.dumps(json_data))
| [
"[email protected]"
] | |
ce8b0ccddfd7609297323b335c25e3b87c13527f | 12069a0145682efe422a8aa174b850875fb3e36d | /leetcode_python/Tree/most_frequent_subtree_sum.py | bf5260e427e1228c797071b53e3abf3fc9a3fc02 | [] | no_license | ChillOrb/CS_basics | 313c8293f54e44993fa83595ff7fea21c30fa952 | 5195b032d8000a3d888e2d4068984011bebd3b84 | refs/heads/master | 2023-07-08T00:29:01.312220 | 2021-09-07T00:25:18 | 2021-09-07T00:25:18 | 403,811,622 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,469 | py | # 508. Most Frequent Subtree Sum
# Medium
#
# Given the root of a tree, you are asked to find the most frequent subtree sum. The subtree sum of a node is defined as the sum of all the node values formed by the subtree rooted at that node (including the node itself). So what is the most frequent subtree sum value? If there is a tie, return all the values with the highest frequency in any order.
#
# Examples 1
# Input:
#
# 5
# / \
# 2 -3
# return [2, -3, 4], since all the values happen only once, return all of them in any order.
# Examples 2
# Input:
#
# 5
# / \
# 2 -5
# return [2], since 2 happens twice, however -5 only occur once.
# Note: You may assume the sum of values in any subtree is in the range of 32-bit signed integer.
# V0
class Solution(object):
def findFrequentTreeSum(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if not root: return []
vals = []
def getSum(root):
if not root:
return 0
s = getSum(root.left) + root.val + getSum(root.right)
vals.append(s)
# remember to return s
return s
getSum(root)
count = collections.Counter(vals)
frequent = max(count.values())
return [x for x, v in count.items() if v == frequent]
# V1
# https://blog.csdn.net/fuxuemingzhu/article/details/79435381
# IDEA : TREE
class Solution(object):
def findFrequentTreeSum(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if not root: return []
vals = []
def getSum(root):
if not root:
return 0
s = getSum(root.left) + root.val + getSum(root.right)
vals.append(s)
# remember to return s
return s
getSum(root)
count = collections.Counter(vals)
frequent = max(count.values())
return [x for x, v in count.items() if v == frequent]
### Tese case : dev
# s=Solution()
# assert s.findFrequentTreeSum([]) == []
# assert s.findFrequentTreeSum([5,2,-3]) == [2,-3,4]
# assert s.findFrequentTreeSum([0,1,2]) == [0,1,2]
# V1'
# https://leetcode.com/problems/most-frequent-subtree-sum/discuss/98675/JavaC%2B%2BPython-DFS-Find-Subtree-Sum
# IDEA : TREE
class Solution(object):
def findFrequentTreeSum(self, root):
if root is None: return []
def dfs(node):
if node is None: return 0
s = node.val + dfs(node.left) + dfs(node.right)
count[s] += 1
return s
count = collections.Counter()
dfs(root)
maxCount = max(count.values())
return [s for s in count if count[s] == maxCount]
# V1''
# https://leetcode.com/problems/most-frequent-subtree-sum/discuss/98749/Python-clean-solution-beats-97
# IDEA : TREE
class Solution(object):
def findFrequentTreeSum(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
def helper(root, d):
if not root:
return 0
left = helper(root.left, d)
right = helper(root.right, d)
subtreeSum = left + right + root.val
d[subtreeSum] = d.get(subtreeSum, 0) + 1
return subtreeSum
d = {}
helper(root, d)
mostFreq = 0
ans = []
for key in d:
if d[key] > mostFreq:
mostFreq = d[key]
ans = [key]
elif d[key] == mostFreq:
ans.append(key)
return ans
# V1''''
# https://www.jianshu.com/p/c861361dc20f
# IDEA : TREE
import collections
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
def findFrequentTreeSum(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if not root:
return []
self.counter = collections.Counter()
self.postOrderTraverse(root)
maxValue = max(self.counter.values())
return [key for key in self.counter.keys() if self.counter[key] == maxValue]
def postOrderTraverse(self, node):
if node.left:
node.val += self.postOrderTraverse(node.left)
if node.right:
node.val += self.postOrderTraverse(node.right)
self.counter[node.val] += 1
return node.val
# V2
| [
"[email protected]"
] | |
887eb9563d4fdde01071692b7a09b5f1cd7e623e | 508bb4b0877c9c19538291486eefabf8ab8e3bb9 | /ArchDaily/spiders/archdaily.py | 59776b23391c1978f6d3e1de97cb72b03cbf4d4f | [] | no_license | rtfeng/ArchDaily | 41c06c3e13b53a0d45c69927d7facffa6bc41181 | 7c763ea0b759d088b6f556d2da39da9a9329e9e9 | refs/heads/master | 2021-09-14T07:02:14.929212 | 2018-05-09T07:04:26 | 2018-05-09T07:04:26 | 131,031,578 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,634 | py | # -*- coding: utf-8 -*-
import scrapy
import re, os, datetime
from ArchDaily.items import ArchdailyItem
from scrapy import Selector, Request, log
class ArchdailySpider(scrapy.Spider):
name = 'archdaily'
allowed_domains = ['www.archdaily.com']
# start_urls = ['https://www.archdaily.com/']
start_urls = ['https://www.archdaily.com/search/projects/categories/houses']
custom_settings = {
'ITEM_PIPELINES': {
'ArchDaily.pipelines.MongoDBPipeline': 100
},
# For remote DB
# 'MONGO_URI': "mongodb://archdaily_0:[email protected]:27017,cluster0-shard-00-01-naxzz.mongodb.net:27017,cluster0-shard-00-02-naxzz.mongodb.net:27017/test?ssl=true&replicaSet=Cluster0-shard-0&authSource=admin",
# 'MONGO_DATABASE': "archdaily",
# 'MONGO_COLLECTION': "archurl"
# For local DB
'MONGO_URI': "mongodb://127.0.0.1:27017",
'MONGO_DATABASE': "archdaily",
'MONGO_COLLECTION': "archurl"
}
def parse(self, response):
ignore_urls = ['https://www.archdaily.com',
'http://www.archdaily.com',
'//www.archdaily.cn',
'//www.plataformaarquitectura.cl',
'//www.archdaily.mx',
'http://my.archdaily.com/us/labels',
'http://account.archdaily.com/us/users/profile',
'//www.archdaily.com',
'#',
'https://chrome.google.com',
'//boty.archdaily.com']
current_url = response.url
hxs = Selector(response)
# Enter the arch list page
if current_url.startswith('https://www.archdaily.com/search/projects/categories/houses'):
item = ArchdailyItem()
# Read and parser current list page
item_anchors = hxs.xpath('//li[@class="afd-search-list__item nrd-search-list__item"]/a')
# log.msg('\n'.join(item_anchors))
for item_anchor in item_anchors:
item['title'] = item_anchor.xpath('h2[@class="afd-search-list__title"]/text()').extract_first()
item['url'] = 'https://www.archdaily.com' + item_anchor.xpath('@href').extract_first()
item['pic'] = item_anchor.xpath('figure/img[@class="afd-search-list__img "]/@src').extract_first().replace('small_jpg', 'large_jpg')
item['id'] = int(re.findall(r"\d{1,}", item['url'])[0])
item['date'] = int(datetime.datetime.now().strftime("%Y%m%d"))
# log.msg(item)
yield item
# Get next page url
next_url = hxs.xpath('//a[@rel="next" and @class="next" and text()="NEXT ›"]/@href').extract_first()
# Check if current page is the last one
# If it is, go back to the first page
if next_url is None:
next_url = hxs.xpath('//a[@class="next" and text()="First"]/@href').extract_first()
# Add .pop() to pop url out of the list
next_url = 'https://www.archdaily.com' + next_url
yield Request(next_url, callback=self.parse)
# all_urls = hxs.xpath('//a/@href').extract()
# for url in all_urls:
# # log.msg(url, level=log.CRITICAL)
# # Reduce regx match heuristic,
# if not url.startswith(tuple(ignore_urls)):
# # log.msg(url, level=log.CRITICAL)
# # In search result page
# if current_url.startswith('https://www.archdaily.com/search/projects/categories/houses'):
# # Get project url
# if re.match('/\d{6}/.*', url):
# # yield Request('https://www.archdaily.com/' + url, callback=self.parse)
# log.msg(url, level=log.CRITICAL)
# # log.msg('--Details--' + url, level=log.CRITICAL)
# # Get next result page
# elif re.match('/search/projects/categories/houses\?page=\d*', url):
# url = 'https://www.archdaily.com' + url
# yield Request(url, callback=self.parse)
# # log.msg('--NextPage--' + url, level=log.CRITICAL)
# # elif re.match('https://www.archdaily.com/\d{6}/.*', current_url):
# # # Select title
# # items = hxs.select('//div[@class="afd-title-big afd-title-big--left afd-title-big--full afd-title-big--bmargin-small afd-relativeposition"]/div')
#
| [
"[email protected]"
] | |
6331adc8b57af06bf4c3d8ec2aee6b208e54a461 | e582e61f60434f4f07ed7f0dcfe0a49bfa151a1a | /model_tm.py | fedb983e37b60e01cd49d35efdc5ae9b3a0d217d | [] | no_license | Rosaniline/CarND-BehavioralCloning | 7746fc49f0df82e0a06560280cc00f56d62a093f | 34232d356c7edd93a64deed073a50cf7cf9ab3a4 | refs/heads/master | 2020-03-27T22:23:30.049793 | 2018-09-04T14:20:47 | 2018-09-04T14:20:47 | 147,226,748 | 0 | 0 | null | 2018-09-03T16:02:45 | 2018-09-03T16:02:45 | null | UTF-8 | Python | false | false | 7,264 | py |
# coding: utf-8
# In[1]:
import pandas as pd
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import os
import sys
from sklearn.model_selection import train_test_split
from sklearn.utils import shuffle
import cv2
from keras.models import load_model
from keras.models import Sequential
from keras.layers import Dense, Activation, Dropout,Convolution2D,MaxPooling2D,Flatten,Lambda
from keras.optimizers import Adam
from keras.models import model_from_json
import json
import tensorflow as tf
matplotlib.style.use('ggplot')
# In[2]:
data_dir = './data/mydata'
data_csv = '/driving_log.csv'
model_json = 'model.json'
model_weights = 'model.h5'
#col_names = ['center', 'left','right','steering','throttle','brake','speed']
#col_names = ['center','steering','throttle','brake','speed','time','lap']
training_dat = pd.read_csv(data_dir+data_csv,names=None)
training_dat.head()
# In[3]:
training_dat[['center']]
X_train = training_dat[['center']]
Y_train = training_dat['steering']
X_train, X_val, Y_train, Y_val = train_test_split(X_train, Y_train, test_size=0.1, random_state=42)
X_train = X_train['center'].as_matrix()
X_val = X_val['center'].as_matrix()
Y_val = Y_val.as_matrix()
Y_train = Y_train.as_matrix()
Y_train = Y_train.astype(np.float32)
Y_val = Y_val.astype(np.float32)
# In[4]:
def read_next_image(m, X_train, Y_train):
# assume the side cameras are about 1.2 meters off the center and the offset to the left or right
# should be be corrected over the next dist meters, calculate the change in steering control
# using tan(alpha)=alpha
offset=1.0
dist=20.0
steering = Y_train[m]
image = plt.imread(X_train[m].strip(' '))
image = cv2.resize(image, (64, 64), interpolation=cv2.INTER_CUBIC)
return image, steering
def random_crop(image,steering=0.0,tx_lower=-20,tx_upper=20,ty_lower=-2,ty_upper=2,rand=True):
# we will randomly crop subsections of the image and use them as our data set.
# also the input to the network will need to be cropped, but of course not randomly and centered.
shape = image.shape
col_start,col_end =abs(tx_lower),shape[1]-tx_upper
horizon=60;
bonnet=136
if rand:
tx= np.random.randint(tx_lower,tx_upper+1)
ty= np.random.randint(ty_lower,ty_upper+1)
else:
tx,ty=0,0
# print('tx = ',tx,'ty = ',ty)
random_crop = image[horizon+ty:bonnet+ty,col_start+tx:col_end+tx,:]
image = cv2.resize(random_crop,(64,64),cv2.INTER_AREA)
# the steering variable needs to be updated to counteract the shift
if tx_lower != tx_upper:
dsteering = -tx/(tx_upper-tx_lower)/3.0
else:
dsteering = 0
steering += dsteering
return image,steering
def random_shear(image,steering,shear_range):
rows,cols,ch = image.shape
dx = np.random.randint(-shear_range,shear_range+1)
# print('dx',dx)
random_point = [cols/2+dx,rows/2]
pts1 = np.float32([[0,rows],[cols,rows],[cols/2,rows/2]])
pts2 = np.float32([[0,rows],[cols,rows],random_point])
dsteering = dx/(rows/2) * 360/(2*np.pi*25.0) / 6.0
M = cv2.getAffineTransform(pts1,pts2)
image = cv2.warpAffine(image,M,(cols,rows),borderMode=1)
steering +=dsteering
return image,steering
def random_brightness(image):
image1 = cv2.cvtColor(image,cv2.COLOR_RGB2HSV)
random_bright = 0.8 + 0.4*(2*np.random.uniform()-1.0)
image1[:,:,2] = image1[:,:,2]*random_bright
image1 = cv2.cvtColor(image1,cv2.COLOR_HSV2RGB)
return image1
def random_flip(image,steering):
coin=np.random.randint(0,2)
if coin==0:
image,steering=cv2.flip(image,1),-steering
return image,steering
def generate_training_example(X_train,Y_train):
m = np.random.randint(0,len(Y_train))
# print('training example m :',m)
# lcr = np.random.randint(0,3)
#lcr = 1
# print('left_center_right :',lcr)
image,steering = read_next_image(m, X_train, Y_train)
# print('steering :',steering)
# plt.imshow(image)
# image,steering = random_shear(image,steering,shear_range=100)
# print('steering :',steering)
# plt.figure()
# plt.imshow(image)
# image,steering = random_crop(image,steering,tx_lower=-20,tx_upper=20,ty_lower=-10,ty_upper=10)
# print('steering :',steering)
# plt.figure()
# plt.imshow(image)
# image,steering = random_flip(image,steering)
# print('steering :',steering)
# plt.figure()
# plt.imshow(image)
# image = random_brightness(image)
# plt.figure()
# plt.imshow(image)
return image,steering
def get_validation_set(X_val,Y_val):
X = np.zeros((len(X_val),64,64,3))
Y = np.zeros(len(X_val))
for i in range(len(X_val)):
x,y = read_next_image(i, X_val, Y_val)
X[i],Y[i] = random_crop(x,y,tx_lower=0,tx_upper=0,ty_lower=0,ty_upper=0)
return X,Y
def generate_train_batch(X_train,Y_train,batch_size = 32):
batch_images = np.zeros((batch_size, 64, 64, 3))
batch_steering = np.zeros(batch_size)
while 1:
for i_batch in range(batch_size):
x,y = generate_training_example(X_train,Y_train)
batch_images[i_batch] = x
batch_steering[i_batch] = y
yield batch_images, batch_steering
# In[5]:
batch_size=200
train_generator = generate_train_batch(X_train,Y_train,batch_size)
X_val,Y_val = get_validation_set(X_val,Y_val)
print('X_train data type :',X_train.dtype)
print('Y_train data type :',Y_train.dtype)
print('X_val data type :',X_val.dtype)
print('Y_val data type :',Y_val.dtype)
# In[6]:
model = Sequential()
model.add(Lambda(lambda x: x/127.5 - 1.0,input_shape=(64,64,3)))
model.add(Convolution2D(32, 8,8 ,border_mode='same', subsample=(4,4)))
model.add(Activation('relu'))
model.add(Convolution2D(64, 8,8 ,border_mode='same',subsample=(4,4)))
model.add(Activation('relu',name='relu2'))
model.add(Convolution2D(128, 4,4,border_mode='same',subsample=(2,2)))
model.add(Activation('relu'))
model.add(Convolution2D(128, 2,2,border_mode='same',subsample=(1,1)))
model.add(Activation('relu'))
model.add(Flatten())
model.add(Dropout(0.5))
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(128))
model.add(Dense(1))
model.summary()
# In[7]:
adam = Adam(lr=1e-4, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)
restart=True
# if os.path.isfile(model_json) and restart:
# try:
# with open(model_json) as jfile:
# model = model_from_json(json.load(jfile))
# model.load_weights(model_weights)
# print('loading trained model ...')
# except Exception as e:
# print('Unable to load model', 'model_name', ':', e)
# raise
model.compile(optimizer=adam, loss='mse')
nb_epoch=10
history = model.fit_generator(train_generator,
samples_per_epoch=20000, nb_epoch=nb_epoch,
validation_data=(X_val,Y_val),verbose=1)
json_string = model.to_json()
print('Save the model')
try:
os.remove(model_json)
os.remove(model_weights)
except OSError:
pass
with open(model_json, 'w') as outfile:
json.dump(json_string, outfile)
model.save_weights(model_weights)
print('Done')
| [
"[email protected]"
] | |
6a4b2e145f40c959e23399889cfea28fad1ad7ba | f8032055a1d0bfcc328ab52409d8451c1f5cee64 | /src/blog/migrations/0001_initial.py | bdab009279ee9ac34329d34bdac97e39ed1dcb72 | [] | no_license | SpeedSick/onekbtu-django | f1db13398959b51c7ce3675fe90e845cef8ab064 | 1a5198af36980964d65b7d7cbe1b39dfc6dc305d | refs/heads/master | 2020-04-20T13:42:05.363750 | 2019-02-02T20:58:16 | 2019-02-02T20:58:16 | 168,875,788 | 0 | 0 | null | 2019-02-02T20:42:18 | 2019-02-02T20:42:17 | null | UTF-8 | Python | false | false | 544 | py | # Generated by Django 2.1.4 on 2019-01-02 17:21
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=32)),
('text', models.TextField(max_length=128)),
],
),
]
| [
"[email protected]"
] | |
1e56f13587ecadb0d8cf92d0896eb30fa2dd6514 | 2b8bb3cf9c140e363592e19398fb639a40d321e7 | /WarehouseProfiles.py | 897c2e7cc28cbfd4556719ff94fa9769c7665b77 | [] | no_license | VinceBa/Warehouse-Profiles | c68370284f7a8c789f7449bf9dc99272fb2eaaed | e2fae223408bd6f5915c6e7e7a0bae4252edcb9b | refs/heads/main | 2023-03-20T23:25:45.720234 | 2023-03-09T17:52:13 | 2023-03-09T17:52:13 | 328,413,442 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 48,116 | py | #
#
# A python macro for Freecad
# to easely and quickly build profiles
# wrote by Vincent Ballu
# any remarks: [email protected]
# release as a freeware: on your risks.
# check all dimensions for a professional use!
# suitable use:
# automatic: select an edge on 3D display (sketch, line...) then launch the macro
# choose the profile requested, adjust options and click OK
# ->length and placement follow the edge
# manual: Run the macro, adjust the options and click OK
# ->Attachment and placement can be change as a Part object
# ->length can be change that and still manual. (Profile Length parameter)
# ->length = 0 provide a shape to sweep or extrude
# versions:
# 10/01/2021 : First release
# 17/01/2021 : Pipe and round bar added
# 23/01/2021 : Parameters for structure container changed
# Box layout changed
# Size give to the name profile enable
# 23/01/2021 : Reduce code
# Negative bevels are enable
# 27/01/2021 : Bevels for centered profiles enable
# 31/01/2021 : Bevel rotate option almost perfect :-)
# 07/02/2021 : Inverted angle bevel
# 08/02/2021 : Separate Rotate bevels
# 11/02/2021 : Make profiles on the fly (no close the box)
# 16/02/2021 : Allow 2nd bevel as rotation or cut
# Limit bevels to 60°
#
# To do: T profiles
# Aluminium profiles
# and More profiles!
# icons
# limit attachement to edges
from PySide import QtCore, QtGui
import FreeCAD, FreeCADGui, math
Vec = FreeCAD.Base.Vector
global path, file_len
class Box(QtGui.QDialog):
def __init__(self):
fam_init = 12
ind_init = 1
self.fams = recherche_fams()
self.fam = self.fams[fam_init]
self.dims = recherche_dims(self.fam)
self.dim = self.dims[ind_init]
self.MakeFillet = True
self.ReverseAttachement = False
self.HeightCentered = False
self.WidthCentered = False
self.SizeName = False
self.BevelsCombined = False
self.LenghtInit = 100
self.update_data()
self.o = SelObserver()
FreeCADGui.Selection.addObserver(self.o)
super(Box,self).__init__(Gui.getMainWindow(), QtCore.Qt.Tool)
self.initUI()
def initUI(self):
g_win_width = 270
g_win_height = 400
g_xLoc = 250
g_yLoc = 250
self.setGeometry(g_xLoc,g_yLoc,g_win_width,g_win_height)
self.setWindowTitle("Profile Warehouse")
# self.setWindowFlags(QtCore.Qt.WindowStaysOnTopHint)
QtCore.Qt.WA_DeleteOnClose
# self.setMouseTracking(True)
# lay = QtGui.QGridLayout(self)
#Labels
self.label_title1 = QtGui.QLabel("Family", self)
newFont=QtGui.QFont(self.label_title1.font())
newFont.setPointSize(10)
self.label_title1.setFont(newFont)
self.label_title1.move(50, 8)
self.label_title2 = QtGui.QLabel("Size", self)
newFont=QtGui.QFont(self.label_title2.font())
newFont.setPointSize(10)
self.label_title2.setFont(newFont)
self.label_title2.move(190, 8)
self.label_height = QtGui.QLabel("Height or diameter", self)
self.label_height.move (10, 65)
self.label_width = QtGui.QLabel("Width", self)
self.label_width.move (10, 90)
self.label_mainthickness = QtGui.QLabel("Main Thickness", self)
self.label_mainthickness.move (10, 115)
self.label_flangethickness = QtGui.QLabel("Flange Thickness", self)
self.label_flangethickness.move (10, 140)
self.label_length = QtGui.QLabel("Length", self)
self.label_length.move (10, 165)
self.label_length = QtGui.QLabel("Large radius", self)
self.label_length.move (10, 190)
self.label_length = QtGui.QLabel("Small radius", self)
self.label_length.move (10, 215)
self.label_attach= QtGui.QLabel("",self)
newFont=QtGui.QFont(self.label_attach.font())
newFont.setPointSize(10)
self.label_attach.setFont(newFont)
self.label_attach.move (10, 250)
self.update_selection("","")
# checkboxes
self.checkbox1 = QtGui.QCheckBox("Make Fillets", self)
self.checkbox1.setChecked(True)
self.checkbox1.clicked.connect(self.onCheckbox1)
self.checkbox1.move(10,275)
self.checkbox2 = QtGui.QCheckBox("Reverse Attachment", self)
self.checkbox2.clicked.connect(self.onCheckbox2)
self.checkbox2.move(140,275)
self.checkbox3 = QtGui.QCheckBox("Height Centered", self)
self.checkbox3.clicked.connect(self.onCheckbox3)
self.checkbox3.move(10,300)
self.checkbox4 = QtGui.QCheckBox("Width Centered", self)
self.checkbox4.clicked.connect(self.onCheckbox4)
self.checkbox4.move(140,300)
self.checkbox5 = QtGui.QCheckBox("Size in object name", self)
self.checkbox5.clicked.connect(self.onCheckbox5)
self.checkbox5.move(10,325)
self.checkbox6 = QtGui.QCheckBox("Combined Bevels", self)
self.checkbox6.clicked.connect(self.onCheckbox6)
self.checkbox6.move(140,325)
# Combo boxes
# familly
self.ComboFamily = QtGui.QComboBox(self)
self.ComboFamily.setToolTip("Choose kind of profile")
self.ComboFamily.addItems(self.fams)
self.ComboFamily.setCurrentIndex(self.fams.index(self.fam))
self.ComboFamily.activated[str].connect(self.onComboFamily_Changed)
self.ComboFamily.move (10,30)
# Size
self.ComboSize = QtGui.QComboBox(self)
self.ComboSize.setToolTip("Choose size")
self.ComboSize.addItems(self.dims)
self.ComboSize.setCurrentIndex(self.dims.index(self.dim))
self.ComboSize.activated[str].connect(self.onComboSize_Changed)
self.ComboSize.move (160,30)
# Spin Boxes
self.SB_height = QtGui.QDoubleSpinBox(self)
self.SB_height.setToolTip ("Adjust height")
self.SB_height.setDecimals(1)
self.SB_height.setMinimum(0.1)
self.SB_height.setMaximum(1000.0)
self.SB_height.setSingleStep(0.1)
self.SB_height.setProperty("value",self.P_height)
self.SB_height.setObjectName("height")
self.SB_height.move(160,60)
self.SB_width = QtGui.QDoubleSpinBox(self)
self.SB_width.setToolTip ("Adjust width")
self.SB_width.setDecimals(1)
self.SB_width.setMinimum(0.0)
self.SB_width.setMaximum(1000.0)
self.SB_width.setSingleStep(0.1)
self.SB_width.setProperty("value",self.P_width)
self.SB_width.setObjectName("width")
self.SB_width.move(160,85)
self.SB_mainthickness = QtGui.QDoubleSpinBox(self)
self.SB_mainthickness.setToolTip ("Adjust main or web thickness")
self.SB_mainthickness.setDecimals(2)
self.SB_mainthickness.setMinimum(0)
self.SB_mainthickness.setMaximum(100.0)
self.SB_mainthickness.setSingleStep(0.01)
self.SB_mainthickness.setProperty("value",self.P_mainthickness)
self.SB_mainthickness.setObjectName("mainthickness")
self.SB_mainthickness.move(160,110)
self.SB_flangethickness = QtGui.QDoubleSpinBox(self)
self.SB_flangethickness.setToolTip ("Adjust flange thickness")
self.SB_flangethickness.setDecimals(1)
self.SB_flangethickness.setMinimum(0)
self.SB_flangethickness.setMaximum(100.0)
self.SB_flangethickness.setSingleStep(0.1)
self.SB_flangethickness.setProperty("value",self.P_flangethickness)
self.SB_flangethickness.setObjectName("flangethickness")
self.SB_flangethickness.move(160,135)
self.SB_length = QtGui.QDoubleSpinBox(self)
self.SB_length.setToolTip ("Set length if not attached")
self.SB_length.setDecimals(1)
self.SB_length.setMinimum(0)
self.SB_length.setMaximum(24000.0)
self.SB_length.setSingleStep(1)
self.SB_length.setProperty("value",self.LenghtInit)
self.SB_length.setObjectName("length")
self.SB_length.move(160,160)
self.SB_Radius1 = QtGui.QDoubleSpinBox(self)
self.SB_Radius1.setToolTip ("Adjust Radius 1")
self.SB_Radius1.setDecimals(1)
self.SB_Radius1.setMinimum(0)
self.SB_Radius1.setMaximum(50)
self.SB_Radius1.setSingleStep(0.1)
self.SB_Radius1.setProperty("value",self.P_radius1)
self.SB_Radius1.setObjectName("radius1")
self.SB_Radius1.move(160,185)
self.SB_Radius2 = QtGui.QDoubleSpinBox(self)
self.SB_Radius2.setToolTip ("Adjust Radius 2")
self.SB_Radius2.setDecimals(1)
self.SB_Radius2.setMinimum(0)
self.SB_Radius2.setMaximum(50)
self.SB_Radius2.setSingleStep(0.1)
self.SB_Radius2.setProperty("value",self.P_radius2)
self.SB_Radius2.setObjectName("radius2")
self.SB_Radius2.move(160,210)
# cancel button
cancelButton = QtGui.QPushButton('Close', self)
cancelButton.clicked.connect(self.onCancel)
cancelButton.setAutoDefault(True)
cancelButton.move(50, 350)
# OK button
okButton = QtGui.QPushButton('Create', self)
okButton.clicked.connect(self.onOk)
okButton.move(150, 350)
self.show()
def onCancel(self):
FreeCADGui.Selection.removeObserver(self.o)
self.close()
def onOk(self):
if self.SizeName: name = self.fam + "_" + self.dim + "_"
else: name = self.fam
obj=doc.addObject("Part::FeaturePython",name)
obj.addExtension("Part::AttachExtensionPython")
obj.ViewObject.Proxy=0
viewObject = FreeCADGui.ActiveDocument.getObject(obj.Name)
viewObject.DisplayMode = "Flat Lines"
linksub = ""
try:
selobj = FreeCADGui.Selection.getSelectionEx()[0]
linksub = (selobj.Object, (selobj.SubElementNames[0]))
selsubobj = selobj.SubObjects[0]
feature = selobj.Object
edgeName = selobj.SubElementNames[0]
l = selsubobj.Length
obj.MapMode = "NormalToEdge"
obj.Support = (feature, edgeName)
if self.ReverseAttachement == False:
obj.MapPathParameter = 1
else:
obj.MapPathParameter = 0
obj.MapReversed = True
except: print ("no edge selected")
w = self.SB_width.value()
h = self.SB_height.value()
ft = self.SB_flangethickness.value()
mt = self.SB_mainthickness.value()
r1 = self.SB_Radius1.value()
r2 = self.SB_Radius2.value()
if linksub=="": l = self.SB_length.value()
p = float(self.Weight)
if self.fam == "Flat Sections" or self.fam == "Square" : self.Makefillet = False
Profile(obj,linksub,w,h,mt,ft,r1,r2,l,p,self.MakeFillet,self.HeightCentered,self.WidthCentered,self.fam,self.BevelsCombined)
try: d = selobj.Document
except: d = FreeCAD.activeDocument()
d.recompute()
FreeCADGui.Selection.removeObserver(self.o)
def onCheckbox1(self,state):
self.MakeFillet = state
def onCheckbox2(self,state):
self.ReverseAttachement = state
def onCheckbox3(self,state):
self.HeightCentered = state
def onCheckbox4(self,state):
self.WidthCentered = state
def onCheckbox5(self,state):
self.SizeName = state
def onCheckbox6(self,state):
self.BevelsCombined = state
def onComboFamily_Changed(self,texte):
self.fam = texte
self.dims = recherche_dims(self.fam)
self.dim = self.dims[0]
self.ComboSize.clear()
self.ComboSize.addItems(self.dims)
self.ComboSize.setCurrentIndex(self.dims.index(self.dim))
self.update_data()
self.update_box()
def onComboSize_Changed(self,texte):
self.dim = texte
self.update_data()
self.update_box()
def update_data(self):
self.data = extrait_data(self.fam,self.dim)
try: self.P_height = self.data[recherche_ind(self.fam,"Height")]
except:self.P_height = 0
try: self.P_width = self.data[recherche_ind(self.fam,"Width")]
except:self.P_width = 0
try: self.P_mainthickness = self.data[recherche_ind(self.fam,"Thickness")]
except:self.P_mainthickness = 0
try: self.P_flangethickness = self.data[recherche_ind(self.fam,"Flange Thickness")]
except:self.P_flangethickness = 0
try: self.P_radius1 = self.data[recherche_ind(self.fam,"Radius1")]
except:self.P_radius1 = 0
try: self.P_radius2 = self.data[recherche_ind(self.fam,"Radius2")]
except:self.P_radius2 = 0
try: self.Weight = self.data[recherche_ind(self.fam,"Weight")]
except:self.Weight = 0
def update_box(self):
self.SB_height.setProperty ("value",self.P_height)
self.SB_width.setProperty ("value",self.P_width)
self.SB_mainthickness.setProperty ("value",self.P_mainthickness)
self.SB_flangethickness.setProperty ("value",self.P_flangethickness)
self.SB_length.setProperty ("value",self.LenghtInit)
self.SB_Radius1.setProperty ("value",self.P_radius1)
self.SB_Radius2.setProperty ("value",self.P_radius2)
def update_selection(self,new_obj,new_sub):
try: # first run
selobj = FreeCADGui.Selection.getSelectionEx()[0]
edgeName = selobj.SubElementNames[0]
sel = FreeCADGui.Selection.getSelectionEx()
objname = sel[0].ObjectName
nom = "Attachment: "+ objname + " / " + edgeName
except:
nom = "Attachment: None "
if new_obj and new_sub: nom = "Attachment: " + new_obj + " / " + new_sub
self.label_attach.setText(nom)
print("updated attachement :",nom)
class SelObserver():
def addSelection(self,doc,obj,sub,other):
form.update_selection(obj,sub)
def clearSelection(self,other):
form.update_selection("","")
class Profile:
def __init__(self,obj,linksub,init_w,init_h,init_mt,init_ft,init_r1,init_r2,init_lenobj,init_wg,init_mf,init_hc,init_wc,type,bevels_combined):
obj.addProperty("App::PropertyFloat","ProfileHeight","Profile","",).ProfileHeight = init_h
obj.addProperty("App::PropertyFloat","ProfileWidth","Profile","").ProfileWidth = init_w
obj.addProperty("App::PropertyFloat","ProfileLength","Profile","").ProfileLength = init_lenobj
obj.addProperty("App::PropertyFloat","Thickness","Profile","Thickness of all the profile or the web").Thickness = init_mt
obj.addProperty("App::PropertyFloat","ThicknessFlange","Profile","Thickness of the flanges").ThicknessFlange = init_ft
obj.addProperty("App::PropertyFloat","RadiusLarge","Profile","Large radius").RadiusLarge = init_r1
obj.addProperty("App::PropertyFloat","RadiusSmall","Profile","Small radius").RadiusSmall = init_r2
obj.addProperty("App::PropertyBool","MakeFillet","Profile","Wheter to draw the fillets or not").MakeFillet = init_mf
if bevels_combined == False:
obj.addProperty("App::PropertyFloat","BevelStartCut1","Profile","Bevel on First axle at the start of the profile").BevelStartCut1 = 0
obj.addProperty("App::PropertyFloat","BevelStartCut2","Profile","Rotate the cut on Second axle at the start of the profile").BevelStartCut2 = 0
obj.addProperty("App::PropertyFloat","BevelEndCut1","Profile","Bevel on First axle at the end of the profile").BevelEndCut1 = 0
obj.addProperty("App::PropertyFloat","BevelEndCut2","Profile","Rotate the cut on Second axle at the end of the profile").BevelEndCut2 = 0
if bevels_combined == True:
obj.addProperty("App::PropertyFloat","BevelStartCut","Profile","Bevel at the start of the profile").BevelStartCut = 0
obj.addProperty("App::PropertyFloat","BevelStartRotate","Profile","Rotate the second cut on Profile axle").BevelStartRotate = 0
obj.addProperty("App::PropertyFloat","BevelEndCut","Profile","Bevel on First axle at the end of the profile").BevelEndCut = 0
obj.addProperty("App::PropertyFloat","BevelEndRotate","Profile","Rotate the second cut on Profile axle").BevelEndRotate = 0
obj.addProperty("App::PropertyFloat","ApproxWeight","Base","Approximate weight in Kilogram").ApproxWeight= init_wg*init_lenobj/1000
obj.addProperty("App::PropertyBool","CenteredOnHeight","Profile","Choose corner or profile centre as origin").CenteredOnHeight = init_hc
obj.addProperty("App::PropertyBool","CenteredOnWidth","Profile","Choose corner or profile centre as origin").CenteredOnWidth = init_wc
if type == "UPE":
obj.addProperty("App::PropertyBool","UPN","Profile","UPE style or UPN style").UPN = False
obj.addProperty("App::PropertyFloat","FlangeAngle","Profile").FlangeAngle = 4.57
if type == "UPN":
obj.addProperty("App::PropertyBool","UPN","Profile","UPE style or UPN style").UPN = True
obj.addProperty("App::PropertyFloat","FlangeAngle","Profile").FlangeAngle = 4.57
if type == "IPE" or type == "HEA" or type == "HEB" or type == "HEM":
obj.addProperty("App::PropertyBool","IPN","Profile","IPE/HEA style or IPN style").IPN = False
obj.addProperty("App::PropertyFloat","FlangeAngle","Profile").FlangeAngle = 8
if type == "IPN":
obj.addProperty("App::PropertyBool","IPN","Profile","IPE/HEA style or IPN style").IPN = True
obj.addProperty("App::PropertyFloat","FlangeAngle","Profile").FlangeAngle = 8
obj.addProperty("App::PropertyLength","Width","Structure","Parameter for structure").Width = obj.ProfileWidth # Property for structure
obj.addProperty("App::PropertyLength","Height","Structure","Parameter for structure").Height = obj.ProfileLength # Property for structure
obj.addProperty("App::PropertyLength","Length","Structure","Parameter for structure",).Length = obj.ProfileHeight # Property for structure
obj.setEditorMode("Width", 1) # user doesn't change !
obj.setEditorMode("Height", 1)
obj.setEditorMode("Length", 1)
if linksub: obj.addProperty("App::PropertyLinkSub","Target","Base","Target face").Target = linksub
self.WM = init_wg
self.type = type
self.BevelCombined = bevels_combined
obj.Proxy = self
def onChanged(self, obj, p):
if p == "ProfileWidth" or p == "ProfileHeight" or p == "Thickness" \
or p == "FilletRadius" or p == "Centered" or p == "Length"\
or p == "BevelStartCut1" or p == "BevelEndCut1" \
or p == "BevelStartCut2" or p == "BevelEndCut2" \
or p == "BevelStartCut" or p == "BevelEndCut" \
or p == "BevelStartRotate" or p == "BevelEndRotate" :
self.execute(obj)
def execute(self, obj):
try:
L = obj.Target[0].getSubObject(obj.Target[1][0]).Length
obj.ProfileLength = L
except:
L = obj.ProfileLength
obj.ApproxWeight = self.WM*L/1000
W = obj.ProfileWidth
H = obj.ProfileHeight
obj.Height = L
pl = obj.Placement
TW = obj.Thickness
TF = obj.ThicknessFlange
R = obj.RadiusLarge
r = obj.RadiusSmall
d = Vec(0,0,1)
if W == 0 : W = H
w = h = 0
if self.BevelCombined == False:
if obj.BevelStartCut1>60 : obj.BevelStartCut1 = 60
if obj.BevelStartCut1<-60 : obj.BevelStartCut1 = -60
if obj.BevelStartCut2>60 : obj.BevelStartCut2 = 60
if obj.BevelStartCut2<-60 : obj.BevelStartCut2 = -60
if obj.BevelEndCut1>60 : obj.BevelEndCut1 = 60
if obj.BevelEndCut1<-60 : obj.BevelEndCut1 = -60
if obj.BevelEndCut2>60 : obj.BevelEndCut2 = 60
if obj.BevelEndCut2<-60 : obj.BevelEndCut2 = -60
B1Y = obj.BevelStartCut1
B2Y = -obj.BevelEndCut1
B1X = -obj.BevelStartCut2
B2X = obj.BevelEndCut2
B1Z = 0
B2Z = 0
if self.BevelCombined == True:
if obj.BevelStartCut>60 : obj.BevelStartCut = 60
if obj.BevelStartCut<-60 : obj.BevelStartCut = -60
if obj.BevelStartRotate>60 : obj.BevelStartRotate = 60
if obj.BevelStartRotate<-60 : obj.BevelStartRotate = -60
if obj.BevelEndCut>60 : obj.BevelEndCut = 60
if obj.BevelEndCut<-60 : obj.BevelEndCut = -60
if obj.BevelEndRotate>60 : obj.BevelEndRotate = 60
if obj.BevelEndRotate<-60 : obj.BevelEndRotate = -60
B1Y = obj.BevelStartCut
B1Z = -obj.BevelStartRotate
B2Y = -obj.BevelEndCut
B2Z = -obj.BevelEndRotate
B1X = 0
B2X = 0
if obj.CenteredOnWidth == True: w = -W/2
if obj.CenteredOnHeight == True: h = -H/2
if self.type == "Equal Leg Angles" or self.type == "Unequal Leg Angles":
if obj.MakeFillet == False:
p1 = Vec(0+w,0+h,0)
p2 = Vec(0+w,H+h,0)
p3 = Vec(TW+w,H+h,0)
p4 = Vec(TW+w,TW+h,0)
p5 = Vec(W+w,TW+h,0)
p6 = Vec(W+w,0+h,0)
L1 = Part.makeLine(p1, p2)
L2 = Part.makeLine(p2, p3)
L3 = Part.makeLine(p3, p4)
L4 = Part.makeLine(p4, p5)
L5 = Part.makeLine(p5, p6)
L6 = Part.makeLine(p6, p1)
wire1 = Part.Wire([L1,L2,L3,L4,L5,L6])
if obj.MakeFillet == True:
p1 = Vec(0+w,0+h,0)
p2 = Vec(0+w,H+h,0)
p3 = Vec(TW-r+w,H+h,0)
p4 = Vec(TW+w,H-r+h,0)
p5 = Vec(TW+w,TW+R+h,0)
p6 = Vec(TW+R+w,TW+h,0)
p7 = Vec(W-r+w,TW+h,0)
p8 = Vec(W+w,TW-r+h,0)
p9 = Vec(W+w,0+h,0)
c1 = Vec(TW-r+w,H-r+h,0)
c2 = Vec(TW+R+w,TW+R+h,0)
c3 = Vec(W-r+w,TW-r+h,0)
L1 = Part.makeLine(p1, p2)
L2 = Part.makeLine(p2, p3)
L3 = Part.makeLine(p4, p5)
L4 = Part.makeLine(p6, p7)
L5 = Part.makeLine(p8, p9)
L6 = Part.makeLine(p9, p1)
A1 = Part.makeCircle(r,c1,d,0,90)
A2 = Part.makeCircle(R,c2,d,180,270)
A3 = Part.makeCircle(r,c3,d,0,90)
wire1 = Part.Wire([L1,L2,A1,L3,A2,L4,A3,L5,L6])
p = Part.Face(wire1)
if self.type == "Flat Sections" or self.type == "Square" or self.type == "Square Hollow" or self.type == "Rectangular Hollow":
wire1=wire2=0
if self.type == "Square" or self.type == "Flat Sections":
p1 = Vec(0+w,0+h,0)
p2 = Vec(0+w,H+h,0)
p3 = Vec(W+w,H+h,0)
p4 = Vec(W+w,0+h,0)
L1 = Part.makeLine(p1, p2)
L2 = Part.makeLine(p2, p3)
L3 = Part.makeLine(p3, p4)
L4 = Part.makeLine(p4, p1)
wire1 = Part.Wire([L1,L2,L3,L4])
if obj.MakeFillet == False and (self.type == "Square Hollow" or self.type == "Rectangular Hollow") :
p1 = Vec(0+w,0+h,0)
p2 = Vec(0+w,H+h,0)
p3 = Vec(W+w,H+h,0)
p4 = Vec(W+w,0+h,0)
p5 = Vec(TW+w,TW+h,0)
p6 = Vec(TW+w,H+h-TW,0)
p7 = Vec(W+w-TW,H+h-TW,0)
p8 = Vec(W+w-TW,TW+h,0)
L1 = Part.makeLine(p1, p2)
L2 = Part.makeLine(p2, p3)
L3 = Part.makeLine(p3, p4)
L4 = Part.makeLine(p4, p1)
L5 = Part.makeLine(p5, p6)
L6 = Part.makeLine(p6, p7)
L7 = Part.makeLine(p7, p8)
L8 = Part.makeLine(p8, p5)
wire1 = Part.Wire([L1,L2,L3,L4])
wire2 = Part.Wire([L5,L6,L7,L8])
if obj.MakeFillet == True and (self.type == "Square Hollow" or self.type == "Rectangular Hollow") :
p1 = Vec(0+w, 0+R+h, 0)
p2 = Vec(0+w, H-R+h, 0)
p3 = Vec(R+w, H+h, 0)
p4 = Vec(W-R+w,H+h, 0)
p5 = Vec(W+w, H-R+h, 0)
p6 = Vec(W+w, R+h, 0)
p7 = Vec(W-R+w,0+h, 0)
p8 = Vec(R+w, 0+h, 0)
c1 = Vec(R+w, R+h, 0)
c2 = Vec(R+w, H-R+h, 0)
c3 = Vec(W-R+w,H-R+h, 0)
c4 = Vec(W-R+w,R+h, 0)
L1 = Part.makeLine(p1, p2)
L2 = Part.makeLine(p3, p4)
L3 = Part.makeLine(p5, p6)
L4 = Part.makeLine(p7, p8)
A1 = Part.makeCircle(R,c1,d,180,270)
A2 = Part.makeCircle(R,c2,d,90,180)
A3 = Part.makeCircle(R,c3,d,0,90)
A4 = Part.makeCircle(R,c4,d,270,0)
wire1 = Part.Wire([L1,A2,L2,A3,L3,A4,L4,A1])
p1 = Vec(TW+w, TW+r+h, 0)
p2 = Vec(TW+w, H-TW-r+h, 0)
p3 = Vec(TW+r+w, H-TW+h, 0)
p4 = Vec(W-TW-r+w,H-TW+h, 0)
p5 = Vec(W-TW+w, H-TW-r+h, 0)
p6 = Vec(W-TW+w, TW+r+h, 0)
p7 = Vec(W-TW-r+w,TW+h, 0)
p8 = Vec(TW+r+w, TW+h, 0)
c1 = Vec(TW+r+w, TW+r+h, 0)
c2 = Vec(TW+r+w, H-TW-r+h, 0)
c3 = Vec(W-TW-r+w,H-TW-r+h, 0)
c4 = Vec(W-TW-r+w,TW+r+h, 0)
L1 = Part.makeLine(p1, p2)
L2 = Part.makeLine(p3, p4)
L3 = Part.makeLine(p5, p6)
L4 = Part.makeLine(p7, p8)
A1 = Part.makeCircle(r,c1,d,180,270)
A2 = Part.makeCircle(r,c2,d,90,180)
A3 = Part.makeCircle(r,c3,d,0,90)
A4 = Part.makeCircle(r,c4,d,270,0)
wire2 = Part.Wire([L1,A2,L2,A3,L3,A4,L4,A1])
if wire2:
p1 = Part.Face(wire1)
p2 = Part.Face(wire2)
p = p1.cut(p2)
else:
p = Part.Face(wire1)
if self.type == "UPE" or self.type == "UPN":
if obj.MakeFillet == False: # UPE ou UPN sans arrondis
Yd = 0
if obj.UPN == True: Yd = (W/4)*math.tan(math.pi*obj.FlangeAngle/180)
p1 = Vec(w, h,0)
p2 = Vec(w, H+h,0)
p3 = Vec(w+W, H+h,0)
p4 = Vec(W+w, h,0)
p5 = Vec(W+w+Yd-TW, h,0)
p6 = Vec(W+w-Yd-TW, H+h-TF,0)
p7 = Vec(w+TW+Yd, H+h-TF,0)
p8 = Vec(w+TW-Yd, h,0)
L1 = Part.makeLine(p1, p2)
L2 = Part.makeLine(p2, p3)
L3 = Part.makeLine(p3, p4)
L4 = Part.makeLine(p4, p5)
L5 = Part.makeLine(p5, p6)
L6 = Part.makeLine(p6, p7)
L7 = Part.makeLine(p7, p8)
L8 = Part.makeLine(p8, p1)
wire1 = Part.Wire([L1,L2,L3,L4,L5,L6,L7,L8])
if obj.MakeFillet == True and obj.UPN == False: # UPE avec arrondis
p1 = Vec(w, h,0)
p2 = Vec(w, H+h,0)
p3 = Vec(w+W, H+h,0)
p4 = Vec(W+w, h,0)
p5 = Vec(W+w-TW+r, h,0)
p6 = Vec(W+w-TW, h+r,0)
p7 = Vec(W+w-TW, H+h-TF-R,0)
p8 = Vec(W+w-TW-R, H+h-TF,0)
p9 = Vec(w+TW+R, H+h-TF,0)
p10 = Vec(w+TW, H+h-TF-R,0)
p11 = Vec(w+TW, h+r,0)
p12 = Vec(w+TW-r, h,0)
C1 = Vec(w+TW-r,h+r,0)
C2 = Vec(w+TW+R,H+h-TF-R,0)
C3 = Vec(W+w-TW-R,H+h-TF-R,0)
C4 = Vec(W+w-TW+r,r+h,0)
L1 = Part.makeLine(p1, p2)
L2 = Part.makeLine(p2, p3)
L3 = Part.makeLine(p3, p4)
L4 = Part.makeLine(p4, p5)
L5 = Part.makeLine(p6, p7)
L6 = Part.makeLine(p8, p9)
L7 = Part.makeLine(p10, p11)
L8 = Part.makeLine(p12, p1)
A1 = Part.makeCircle(r,C1,d,270,0)
A2 = Part.makeCircle(R,C2,d,90,180)
A3 = Part.makeCircle(R,C3,d,0,90)
A4 = Part.makeCircle(r,C4,d,180,270)
wire1 = Part.Wire([L1,L2,L3,L4,A4,L5,A3,L6,A2,L7,A1,L8])
if obj.MakeFillet == True and obj.UPN == True: # UPN avec arrondis
angarc = obj.FlangeAngle
angrad = math.pi*angarc/180
sina = math.sin(angrad)
cosa = math.cos(angrad)
tana = math.tan(angrad)
cot1 = r*sina
y11 = r-cot1
cot2 = (H/2-r)*tana
cot3 = cot1*tana
x11 = TW-cot2-cot3
xc1 = TW-cot2-cot3-r*cosa
yc1 = r
cot8 = (H/2-R-TF+R*sina)*tana
x10 = TW+cot8
y10 = H-TF-R+R*sina
xc2 = cot8+R*cosa+TW
yc2 = H-TF-R
x12 = TW-cot2-cot3-r*cosa
y12 = 0
x9 = cot8+R*cosa+TW
y9 = H-TF
xc3 = W-xc2
yc3 = yc2
xc4 = W-xc1
yc4 = yc1
x1 = 0
y1 = 0
x2 = 0
y2 = H
x3 = W
y3 = H
x4 = W
y4 = 0
x5 = W-x12
y5 = 0
x6 = W-x11
y6 = y11
x7 = W-x10
y7 = y10
x8 = W-x9
y8 = y9
c1 = Vec(xc1+w,yc1+h,0)
c2 = Vec(xc2+w,yc2+h,0)
c3 = Vec(xc3+w,yc3+h,0)
c4 = Vec(xc4+w,yc4+h,0)
p1 = Vec(x1+w,y1+h,0)
p2 = Vec(x2+w,y2+h,0)
p3 = Vec(x3+w,y3+h,0)
p4 = Vec(x4+w,y4+h,0)
p5 = Vec(x5+w,y5+h,0)
p6 = Vec(x6+w,y6+h,0)
p7 = Vec(x7+w,y7+h,0)
p8 = Vec(x8+w,y8+h,0)
p9 = Vec(x9+w,y9+h,0)
p10 = Vec(x10+w,y10+h,0)
p11 = Vec(x11+w,y11+h,0)
p12 = Vec(x12+w,y12+h,0)
A1 = Part.makeCircle(r,c1,d,270,0-angarc)
A2 = Part.makeCircle(R,c2,d,90,180-angarc)
A3 = Part.makeCircle(R,c3,d,0+angarc,90)
A4 = Part.makeCircle(r,c4,d,180+angarc,270)
L1 = Part.makeLine(p1, p2)
L2 = Part.makeLine(p2, p3)
L3 = Part.makeLine(p3, p4)
L4 = Part.makeLine(p4, p5)
L5 = Part.makeLine(p6, p7)
L6 = Part.makeLine(p8, p9)
L7 = Part.makeLine(p10, p11)
L8 = Part.makeLine(p12, p1)
wire1 = Part.Wire([L1,L2,L3,L4,A4,L5,A3,L6,A2,L7,A1,L8])
p = Part.Face(wire1)
if self.type == "IPE" or self.type == "IPN" or self.type == "HEA" or self.type == "HEB" or self.type == "HEM":
XA1 = W/2-TW/2 # face gauche du web
XA2 = W/2+TW/2 # face droite du web
if obj.MakeFillet == False: # IPE ou IPN sans arrondis
Yd = 0
if obj.IPN == True: Yd = (W/4)*math.tan(math.pi*obj.FlangeAngle/180)
p1 = Vec(0+w,0+h,0)
p2 = Vec(0+w,TF+h-Yd,0)
p3 = Vec(XA1+w,TF+h+Yd,0)
p4 = Vec(XA1+w,H-TF+h-Yd,0)
p5 = Vec(0+w,H-TF+h+Yd,0)
p6 = Vec(0+w,H+h,0)
p7 = Vec(W+w,H+h,0)
p8 = Vec(W+w,H-TF+h+Yd,0)
p9 = Vec(XA2+w,H-TF+h-Yd,0)
p10 = Vec(XA2+w,TF+h+Yd,0)
p11 = Vec(W+w,TF+h-Yd,0)
p12 = Vec(W+w,0+h,0)
L1 = Part.makeLine(p1, p2)
L2 = Part.makeLine(p2, p3)
L3 = Part.makeLine(p3, p4)
L4 = Part.makeLine(p4, p5)
L5 = Part.makeLine(p5, p6)
L6 = Part.makeLine(p6, p7)
L7 = Part.makeLine(p7, p8)
L8 = Part.makeLine(p8, p9)
L9 = Part.makeLine(p9, p10)
L10 = Part.makeLine(p10,p11)
L11 = Part.makeLine(p11,p12)
L12 = Part.makeLine(p12,p1)
wire1 = Part.Wire([L1,L2,L3,L4,L5,L6,L7,L8,L9,L10,L11,L12])
if obj.MakeFillet == True and obj.IPN == False: # IPE avec arrondis
p1 = Vec(0+w,0+h,0)
p2 = Vec(0+w,TF+h,0)
p3 = Vec(XA1-R+w,TF+h,0)
p4 = Vec(XA1+w,TF+R+h,0)
p5 = Vec(XA1+w,H-TF-R+h,0)
p6 = Vec(XA1-R+w,H-TF+h,0)
p7 = Vec(0+w,H-TF+h,0)
p8 = Vec(0+w,H+h,0)
p9 = Vec(W+w,H+h,0)
p10 = Vec(W+w,H-TF+h,0)
p11 = Vec(XA2+R+w,H-TF+h,0)
p12 = Vec(XA2+w,H-TF-R+h,0)
p13 = Vec(XA2+w,TF+R+h,0)
p14 = Vec(XA2+R+w,TF+h,0)
p15 = Vec(W+w,TF+h,0)
p16 = Vec(W+w,0+h,0)
c1 = Vec(XA1-R+w,TF+R+h,0)
c2 = Vec(XA1-R+w,H-TF-R+h,0)
c3 = Vec(XA2+R+w,H-TF-R+h,0)
c4 = Vec(XA2+R+w,TF+R+h,0)
L1 = Part.makeLine(p1, p2)
L2 = Part.makeLine(p2, p3)
L3 = Part.makeLine(p4, p5)
L4 = Part.makeLine(p6, p7)
L5 = Part.makeLine(p7, p8)
L6 = Part.makeLine(p8, p9)
L7 = Part.makeLine(p9, p10)
L8 = Part.makeLine(p10, p11)
L9 = Part.makeLine(p12, p13)
L10 = Part.makeLine(p14, p15)
L11 = Part.makeLine(p15, p16)
L12 = Part.makeLine(p16, p1)
A1 = Part.makeCircle(R,c1,d,270,0)
A2 = Part.makeCircle(R,c2,d,0,90)
A3 = Part.makeCircle(R,c3,d,90,180)
A4 = Part.makeCircle(R,c4,d,180,270)
wire1 = Part.Wire([L1,L2,A1,L3,A2,L4,L5,L6,L7,L8,A3,L9,A4,L10,L11,L12])
if obj.MakeFillet == True and obj.IPN == True: # IPN avec arrondis
angarc = obj.FlangeAngle
angrad = math.pi*angarc/180
sina = math.sin(angrad)
cosa = math.cos(angrad)
tana = math.tan(angrad)
cot1 = W/4*tana #1,47
cot2 = TF-cot1 #4,42
cot3 = r*cosa #1,98
cot4 = r-cot3*tana #1,72
cot5 = cot4*tana #0,24
cot5 = cot2+cot5 #4,66
cot6 = R*sina #0,55
cot7 = W/4-R-TW/2 #4,6
cot8 = cot6+cot7 #5,15
cot9 = cot7*tana #0,72
cot10 = R*cosa #3,96
xc1 = r
yc1 = cot5-cot3
c1 = Vec(xc1+w,yc1+h,0)
xc2 = W/2-TW/2-R
yc2 = cot9+TF+cot10
c2 = Vec(xc2+w,yc2+h,0)
xc3 = xc2
yc3 = H-yc2
c3 = Vec(xc3+w,yc3+h,0)
xc4 = xc1
yc4 = H-yc1
c4 = Vec(xc4+w,yc4+h,0)
xc5 = W-xc1
yc5 = yc4
c5 = Vec(xc5+w,yc5+h,0)
xc6 = W-xc2
yc6 = yc3
c6 = Vec(xc6+w,yc6+h,0)
xc7 = xc6
yc7 = yc2
c7 = Vec(xc7+w,yc7+h,0)
xc8 = xc5
yc8 = yc1
c8 = Vec(xc8+w,yc8+h,0)
A1 = Part.makeCircle(r,c1,d,90+angarc,180)
A2 = Part.makeCircle(R,c2,d,270+angarc,0)
A3 = Part.makeCircle(R,c3,d,0,90-angarc)
A4 = Part.makeCircle(r,c4,d,180,270-angarc)
A5 = Part.makeCircle(r,c5,d,270+angarc,0)
A6 = Part.makeCircle(R,c6,d,90+angarc,180)
A7 = Part.makeCircle(R,c7,d,180,270-angarc)
A8 = Part.makeCircle(r,c8,d,0,90-angarc)
xp1 = 0
yp1 = 0
p1 = Vec(xp1+w,yp1+h,0)
xp2 = 0
yp2 = cot5-cot3
p2 = Vec(xp2+w,yp2+h,0)
xp3 = cot4
yp3 = cot5
p3 = Vec(xp3+w,yp3+h,0)
xp4 = W/4+cot8
yp4 = TF+cot9
p4 = Vec(xp4+w,yp4+h,0)
xp5 = W/2-TW/2
yp5 = yc2
p5 = Vec(xp5+w,yp5+h,0)
xp6 = xp5
yp6 = H-yp5
p6 = Vec(xp6+w,yp6+h,0)
xp7 = xp4
yp7 = H-yp4
p7 = Vec(xp7+w,yp7+h,0)
xp8 = xp3
yp8 = H-yp3
p8 = Vec(xp8+w,yp8+h,0)
xp9 = xp2
yp9 = H - yp2
p9 = Vec(xp9+w,yp9+h,0)
xp10 = xp1
yp10 = H
p10 = Vec(xp10+w,yp10+h,0)
xp11 = W
yp11 = H
p11 = Vec(xp11+w,yp11+h,0)
xp12 = xp11
yp12 = yp9
p12 = Vec(xp12+w,yp12+h,0)
xp13 = W-xp8
yp13 = yp8
p13 = Vec(xp13+w,yp13+h,0)
xp14 = W-xp7
yp14 = yp7
p14 = Vec(xp14+w,yp14+h,0)
xp15 = W-xp6
yp15 = yp6
p15 = Vec(xp15+w,yp15+h,0)
xp16 = W-xp5
yp16 = yp5
p16 = Vec(xp16+w,yp16+h,0)
xp17 = W-xp4
yp17 = yp4
p17 = Vec(xp17+w,yp17+h,0)
xp18 = W-xp3
yp18 = yp3
p18 = Vec(xp18+w,yp18+h,0)
xp19 = W-xp2
yp19 = yp2
p19 = Vec(xp19+w,yp19+h,0)
xp20 = W
yp20 = 0
p20 = Vec(xp20+w,yp20+h,0)
L1 = Part.makeLine(p1, p2)
L2 = Part.makeLine(p3, p4)
L3 = Part.makeLine(p5, p6)
L4 = Part.makeLine(p7, p8)
L5 = Part.makeLine(p9, p10)
L6 = Part.makeLine(p10, p11)
L7 = Part.makeLine(p11, p12)
L8 = Part.makeLine(p13, p14)
L9 = Part.makeLine(p15, p16)
L10 = Part.makeLine(p17, p18)
L11 = Part.makeLine(p19, p20)
L12 = Part.makeLine(p20, p1)
wire1 = Part.Wire([L1,A1,L2,A2,L3,A3,L4,A4,L5,L6,L7,A5,L8,A6,L9,A7,L10,A8,L11,L12])
p = Part.Face(wire1)
if self.type == "Round bar" or self.type == "Pipe":
c = Vec(H/2+w,H/2+h,0)
A1 = Part.makeCircle(H/2,c,d,0,360)
A2 = Part.makeCircle((H-TW)/2,c,d,0,360)
wire1 = Part.Wire([A1])
wire2 = Part.Wire([A2])
if TW:
p1 = Part.Face(wire1)
p2 = Part.Face(wire2)
p = p1.cut(p2)
else:
p = Part.Face(wire1)
if L:
ProfileFull = p.extrude(Vec(0,0,L))
obj.Shape = ProfileFull
if B1Y or B2Y or B1X or B2X or B1Z or B2Z: # make the bevels:
hc = 10 * max (H,W)
ProfileExt = ProfileFull.fuse(p.extrude(Vec(0,0,L+hc/4)))
box = Part.makeBox(hc,hc,hc)
box.translate (Vec(-hc/2+w,-hc/2+h,L))
pr = Vec(0,0,L)
box.rotate(pr,Vec(0,1,0),B2Y)
if self.BevelCombined == True: box.rotate(pr,Vec(0,0,1),B2Z)
else: box.rotate(pr,Vec(1,0,0),B2X)
ProfileCut = ProfileExt.cut(box)
ProfileExt = ProfileCut.fuse(p.extrude(Vec(0,0,-hc/4)))
box = Part.makeBox(hc,hc,hc)
box.translate (Vec(-hc/2+w,-hc/2+h,-hc))
pr = Vec(0,0,0)
box.rotate(pr,Vec(0,1,0),B1Y)
if self.BevelCombined == True: box.rotate(pr,Vec(0,0,1),B1Z)
else: box.rotate(pr,Vec(1,0,0),B1X)
ProfileCut = ProfileExt.cut(box)
obj.Shape = ProfileCut.removeSplitter()
# if wire2: obj.Shape = Part.Compound([wire1,wire2]) # OCC Sweep doesn't be able hollow shape yet :-(
else:
obj.Shape = Part.Face(wire1)
obj.Placement = pl
obj.positionBySupport()
def recherche_fams():
#Scan le fichier complet pour trouver les familles
#Renvoie une liste contenant les noms
tab =[]
pos = 0
with open(path, "r") as file:
while pos < file_len:
while True:
car = file.read(1)
if car == "*" or not car: break
# print (pos)
ligne = file.readline() #famille trouvée
txt = ligne[:len(ligne)-1]
if txt: tab.append(txt)
ligne = file.readline()
ligne = file.readline()
ligne = file.readline()
pos = file.tell()
txt =""
return tab
def trouve_txt(pos,txt):
#Trouve un str à partir de pos
#Renvoie la nouvelle position
with open(path, "r") as file:
file.seek(pos)
while True:
ligne = file.readline()
if ligne.find(txt) !=-1 : break
pos_line = file.tell() - len(ligne)
pos_found = pos_line + ligne.find(txt)
return pos_found
def extrait_data(fam,size):
#Extrait toutes les données pour une dimension d'une famille
#Retour une liste:
#Famille/Size/Donnée1/Donnée2...
tab=[]
tab.append(fam)
tab.append(size)
posfam = trouve_txt(0,fam)
possize = trouve_txt(posfam,size)
car=str=""
with open(path, "r") as file:
file.seek (possize+len(size))
while True:
while True:
car = file.read(1)
if car == "\t" or car == "\n": break
str += car
if str: tab.append(str)
str=""
if car == "\n": break
# print(tab)
return tab
def recherche_ind(fam,type):
# Recherche l'indice de la donnée dans la famille
pos1 = trouve_txt(0,fam)
pos2 = trouve_txt(pos1+1,"*")
pos3 = trouve_txt(pos2+1,"*")
pos4 = trouve_txt(pos3+1,"*")
typ = []
with open(path, "r") as file:
file.seek(pos4)
ligne = file.readline().rstrip()
typ = ligne.split("/")
ind = typ.index(type)+1
return ind
def recherche_dims(fam):
#Recherche de toutes les dimensions d'une famille
#Et retourne une liste les contenant
pos1 = trouve_txt(0,fam)
pos2 = trouve_txt(pos1+1,"*")
pos3 = trouve_txt(pos2+1,"*")
pos4 = trouve_txt(pos3+1,"*")
tab = []
str = ""
with open(path, "r") as file:
file.seek(pos4)
ligne = file.readline()
car = file.read(1)
while car !="\n" and car !="":
while car != "\t":
str += car
car = file.read(1)
if str: tab.append(str)
str=""
ligne = file.readline()
car = file.read(1)
# tab.sort()
# print (tab)
return tab
# get the path of the current python script
file = "Profiles.txt"
macro_path = os.path.realpath(__file__)
path = os.path.realpath(__file__)
path = os.path.dirname(path)
path = os.path.join(path,file)
file_len = os.stat(path).st_size
print ("file: ",file_len)
doc = FreeCAD.activeDocument()
if doc == None: doc = FreeCAD.newDocument()
form = Box()
form.show()
form.exec_()
| [
"[email protected]"
] | |
63ec5eccfc491a2921fe75994e203d976bc35358 | f07fd563c67f6da73caaaba205c6a56242805266 | /Model_training/5_train_test_split.py | 175981c78bd4a36967690264522c643b7550e5e8 | [] | no_license | stankusnt97/StrideTech-AI-Deploy | 98df0bfe5cd55f948e495c8a8bccb00012a5d407 | 5943286698040ed0f05f5d374ee60baeda9691d9 | refs/heads/master | 2023-01-20T08:28:32.462657 | 2020-12-03T20:05:24 | 2020-12-03T20:05:24 | 318,306,385 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 847 | py |
import numpy as np
from sklearn.model_selection import train_test_split
X = np.load('framed_X.npy')
Y = np.load('framed_Y.npy')
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size = 0.2, random_state = 0, stratify = Y) # 80/20 split on train/test, typical split
print("X_train shape is: " + str(X_train.shape) + "X_test shape is: " + str(X_test.shape))
# Tuple uncoupling for reference during reshape
X_train_dim1, X_train_dim2, X_train_dim3 = X_train.shape
X_test_dim1, X_test_dim2, X_test_dim3 = X_test.shape
# Make 3D model
X_train = X_train.reshape(X_train_dim1, X_train_dim2, X_train_dim3, 1)
X_test = X_test.reshape(X_test_dim1, X_test_dim2, X_test_dim3, 1)
print(X_train.shape, X_test.shape)
np.save('X_train.npy', X_train)
np.save('X_test.npy', X_test)
np.save('y_train.npy', y_train)
np.save('y_test.npy', y_test)
| [
"[email protected]"
] | |
daa0d3a1f8f62bc66605216fe7354b5fde00a714 | a6eed190690b3883954a134017e73b6506a29a64 | /apps/cards/management/commands/import_common_english_words.py | 722fa7b570225d0141d4542ed6c809603e9a8a46 | [] | no_license | ubihinon/spelling | 6069f7079e2112495d9196e5ef42346a8207e3c5 | a28e4831b188ec410e78ff980cd7035ca0c58277 | refs/heads/master | 2023-02-27T03:02:33.459006 | 2021-01-30T14:32:31 | 2021-01-30T14:32:31 | 213,395,655 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 892 | py | import openpyxl
from django.core.files import File
from django.core.management import BaseCommand
from apps.cards.models import Card
ROOT_DICTIONARY_FOLDER = './oxford_dictionary_scraper/'
class Command(BaseCommand):
def handle(self, *args, **options):
word_dictionary = openpyxl.load_workbook(
filename=f'{ROOT_DICTIONARY_FOLDER}oxford_dictionary_3000_words.xlsx'
)
sheet = word_dictionary.worksheets[0]
for i in range(1, sheet.max_row):
text = sheet.cell(i, 1).value
sound_path = sheet.cell(i, 3).value
if sound_path is None:
continue
f = open(f'{ROOT_DICTIONARY_FOLDER}{sound_path}', 'rb')
sound = File(f)
filename = sound_path.split('/')
card = Card(text=text)
card.sound.save(filename[1], sound)
card.save()
| [
"[email protected]"
] | |
1cc73f66c9c69bba295a21434fc6ec62abc5c5d4 | c1da5c1530ff768d9c9ed61b70f7913eb1c4172e | /Practice/Recursion/PermutationOfArraysOfArray.py | 2361e290785e72009123f7441bc8791a9a152c4a | [] | no_license | saumyasinha023/PythonProgramming | b3773d52e1058deebeffab0315d154784c154f87 | 610474ee649df184ff24c00d869f69ffd7af52e5 | refs/heads/master | 2021-05-10T11:02:11.160595 | 2018-03-12T17:29:34 | 2018-03-12T17:29:34 | 118,398,400 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 572 | py | class Solution():
def findPermutations(self, arr):
tmp, final = [], []
for each in range(len(arr[0])):
final = self.helper(arr, [arr[0][each]], final, 1)
print(final)
def helper(self, arr, tmp, final, index):
if len(tmp) == len(arr[0]):
final.append(tmp)
if index >= len(arr):
return
for every in range(len(arr[index])):
self.helper(arr, tmp + [arr[index][every]], final, index + 1)
return final
S = Solution()
S.findPermutations([[1, 2, 3], [4], [5, 6]])
| [
"[email protected]"
] | |
6d1a80dc45e42a12a16da8c0ca9b41e4e896d2a3 | 69323ef926f905f2bc73bc04139c7d568fd931b2 | /Dutch Flag Partition.py | cb611d6051a63f01863d9f62f3e379ccc46879eb | [] | no_license | charlie-ww/Date_Structure_hw1_code | 1ffe38febab970d8235f47b67a0e44652eb0c80d | f128ac6c876e56d331e3e18d3cb7f981ab22260c | refs/heads/master | 2023-08-27T06:10:05.141076 | 2021-10-28T07:06:24 | 2021-10-28T07:06:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,025 | py | # code from https://www.techiedelight.com/quicksort-using-dutch-national-flag-algorithm/
def swap(A, i, j):
temp = A[i]
A[i] = A[j]
A[j] = temp
# Partition routine using the Dutch national flag algorithm
def partition(A, start, end):
mid = start
pivot = A[end]
while mid <= end:
if A[mid] < pivot:
swap(A, start, mid)
start += 1
mid += 1
elif A[mid] > pivot:
swap(A, mid, end)
end -= 1
else:
mid += 1
# `A[start … mid-1]` contains all occurrences of a pivot
return start - 1, mid
# 3–way Quicksort routine
def quicksort(A, start, end):
# base condition for 0 or 1 elements
if start >= end:
return
# handle 2 elements separately as the Dutch national flag
# algorithm will work for 3 or more elements
if start - end == 1:
if A[start] < A[end]:
swap(A, start, end)
return
# rearrange elements across pivot using the Dutch
# national flag problem algorithm
x, y = partition(A, start, end)
# recur on sublist containing elements that are less than the pivot
quicksort(A, start, x)
# recur on sublist containing elements that are more than the pivot
quicksort(A, y, end)
from openpyxl import Workbook
wb = Workbook()
ws = wb.active
ws['A1'] = '時間'
import numpy as np
import time
# arr = [12, 11, 13, 5, 6, 7]
# n = len(random_list)
# 26
for a in range(10, 26):
n = 2 ** a
ws.append(["2的", a])
for k in range(10):
random_list = np.random.randint(0, 1001, size=n)
# print("Given array is")
# for x in range(n):
# print("%d" % random_list[x]),
start = time.time()
quicksort(random_list, 0, n - 1)
end = time.time()
# print("\n\nSorted array is")
# for i in range(n):
# print("%d" % random_list[i]),
print("use ", end-start, "time")
ws.append([end-start])
wb.save("dutch.xlsx") | [
"[email protected]"
] | |
70190dd7c1f27c1814b8a24685b77a3a012e84bd | 317058436d87ae947d12a41841fff5c09ee34856 | /index.py | 6dcd4b6de41dfbdcc49e14d1a7686cb93bae3828 | [] | no_license | Zattri/Fortify | fe943634877696d161356dcbb27d6aa9533ebbd5 | fc7841ed19a430e6da126ca3b1a97160147b230f | refs/heads/master | 2021-05-09T16:07:25.263532 | 2018-02-03T18:43:50 | 2018-02-03T18:43:50 | 119,107,893 | 5 | 0 | null | 2018-02-03T18:43:50 | 2018-01-26T21:59:40 | Python | UTF-8 | Python | false | false | 2,272 | py | # Make a default starting value for resource dictionary
resourceDictionary = {"stone": [50, 3], "wood": [50, 3], "clay": [50, 3], "hide": [50, 3], "gold": [50, 3], "workers": [30, 2]}
# Building Index, contains all the buildings of a village
buildingIndex = [None] * 10
buildingIndex[0] = ["Stone Quarry", "A large quarry, used for gathering stone", 0]
buildingIndex[1] = ["Lumber yard", "A yard for woodcutters to process and store wood", 0]
buildingIndex[2] = ["Clay Pit", "Wet flatland, perfect for harvesting clay", 0]
buildingIndex[3] = ["Hunter's Lodge", "A small wooden hut, occupied by hunters of various pelts hide", 0]
buildingIndex[4] = ["Gold Mine", "A deep mine, maybe there's some gold in it still", 0]
buildingIndex[5] = ["Farm", "Rich fertile soil ideal for farming and increasing workers", 0]
# Upgrade Index - Statically holds the costs of all buildings and upgrades
upgradeIndex = [[None for width in range(5)] for height in range(10)]
upgradeIndex[0][0] = ({"clay": 5, "wood": 5, "workers": 3}, {"stone": 2}, None)
upgradeIndex[1][0] = ({"stone": 5, "hide": 5, "workers": 3}, {"wood": 2}, None)
upgradeIndex[2][0] = ({"hide": 5, "wood": 5, "workers": 3}, {"clay": 2}, None)
upgradeIndex[3][0] = ({"stone": 5, "wood": 5, "workers": 3}, {"hide": 2}, None)
upgradeIndex[4][0] = ({"clay": 5, "stone": 5, "workers": 3}, {"gold": 2}, None)
upgradeIndex[5][0] = ({"hide": 5, "clay": 5, "workers": 3}, {"workers": 1}, None)
# Quarry Upgrades
upgradeIndex[0][1] = ({"stone": 10, "wood": 10}, {"stone": 2}, 1)
upgradeIndex[0][2] = ({"stone": 15, "wood": 15}, {"stone": 3}, 2)
techIndex = [None] * 10
techIndex[0] = ["Forged Armour", "Forge armour to increase troop defence", 0]
techIndex[1] = ["Forged Tools", "Create tools to increase gathering rate", 0]
techIndex[2] = ["Herbalist Training", "Train workers to become Herbalists", 0]
techIndex[3] = ["Recruit Training", "Train workers to become military recruits", 0]
'''
Building Ideas -
Quarry - Stone
Lumbermill - Wood
Clay Pit - Clay
Hunter's Lodge - Hide
Mine - Gold?
Farm - For food / growth rate
Unit Buildings -
Trading Post - Mercenaries
Garrison - Infantry
Barracks - More Infantry?
Mercenary Ideas -
Hired Sword -
Roaming Wizard -
'''
| [
"[email protected]"
] | |
fa2ec9394bf24356e2611e93ee8acea3082da037 | 1ad512e9023e7ed43f8f3647733718ba65c20ce8 | /extra_apps/xadmin/plugins/ueditor.py | 2da178fd7c6e76f66dd5682f231608c66925391f | [] | no_license | caijunrong31/MxOnline2.7 | e660bfa0619c4ef94ce9da614388370ec4c74725 | a60054a2938fcc0707be4c0903df1afa75e9cbb0 | refs/heads/master | 2020-03-10T20:18:48.010733 | 2018-04-17T14:16:06 | 2018-04-17T14:16:06 | 129,567,829 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,501 | py | # _*_ coding: utf-8 _*_
__author__ = 'cai'
__date__ = '2018/1/13 14:12'
import xadmin
from django.db.models import TextField
from xadmin.views import BaseAdminPlugin, CreateAdminView, UpdateAdminView
from DjangoUeditor.models import UEditorField
from DjangoUeditor.widgets import UEditorWidget
from django.conf import settings
class XadminUEditorWidget(UEditorWidget):
def __init__(self, **kwargs):
self.ueditor_options = kwargs
self.Media.js = None
super(XadminUEditorWidget, self).__init__(kwargs)
class UeditorPlugin(BaseAdminPlugin):
def get_field_style(self, attrs, db_field, style, **kwargs):
if style == 'ueditor':
if isinstance(db_field, UEditorField):
widget = db_field.formfield().widget
param = {}
param.update(widget.ueditor_settings)
param.update(widget.attrs)
return {'widget': XadminUEditorWidget(**param)}
return attrs
def block_extrahead(self, context, nodes):
js = '<script type="text/javascript" src="%s"></script>' % (settings.STATIC_URL + "ueditor/ueditor.config.js") # 自己的静态资源
js += '<script type="text/javascript" src="%s"></script>' % (settings.STATIC_URL + "ueditor/ueditor.all.min.js") # 自己的静态资源
nodes.append(js)
xadmin.site.register_plugin(UeditorPlugin, UpdateAdminView)
xadmin.site.register_plugin(UeditorPlugin, CreateAdminView) | [
"[email protected]"
] | |
8e1a750e7c2c4b05387e141918d6b4c3e7020d8e | 6ba00ac7c7fe4b6ad0496f017b15221c9b155362 | /6_merging_lists.py | 7e49afec8b8ac2e27bda5dddea2b037f0802a157 | [] | no_license | SaveliiLapin/learning_projects | a0cdfb6479ed7fa01d3dcfda7489066c094a0366 | cbbf7efba4225955e5a160fe5034dd2b9c51cb5a | refs/heads/main | 2023-08-21T14:42:23.231001 | 2021-10-11T18:32:06 | 2021-10-11T18:32:06 | 416,041,152 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py | def merge(aa, bb):
cc = []
p_1 = 0
p_2 = 0
for i in range(len(aa) + len(bb)):
if p_2 == len(bb) or p_1 < len(aa) and aa[p_1] < bb[p_2]:
cc.append(aa[p_1])
p_1 += 1
else:
cc.append(bb[p_2])
p_2 += 1
return cc
a = list(map(int, input().split()))
b = list(map(int, input().split()))
print(*merge(a, b))
| [
"[email protected]"
] | |
5ff53f04c66bc52149067232691909d9566d288d | a1dfba3829797aa0f5707401a4c688b8d51a4351 | /quotes/urls.py | affb9890382e4686099ffcff894a725d85c63997 | [
"MIT"
] | permissive | abumukhlis/mfdw_project | 1d73993844e2e985b5d5595efc28515af9f99127 | 3368a270716a0937a6acdf62b120e46c10d1c7cc | refs/heads/main | 2023-01-28T10:23:00.293205 | 2020-12-09T21:22:58 | 2020-12-09T21:22:58 | 319,397,917 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 291 | py | from django.urls import path
from . import views
from .views import QuoteList, QuoteView
urlpatterns = [
path('', views.quote_req, name='quote-request'),
path('show/<int:pk>', QuoteView.as_view(), name='quote-detail'),
path('show', QuoteList.as_view(), name='show-quotes'),
]
| [
"[email protected]"
] | |
5ab05d6eb7c9ea34dff5a334d58e2842c5f1b550 | 3d8fb949cc6f81b4cd406b82b07145c986ae3036 | /malc0de.py | b72d73a4d8b519f9a297ec1873ed77f4e5f6822d | [] | no_license | ckane/revolutionuc-cybersecurity | 8a99a256d6f722f01d15fe20da0f399317c66deb | 2778d076e8e1e2b09e78da56d48a2725aa8bfca3 | refs/heads/master | 2020-12-24T19:28:06.734914 | 2016-04-16T18:29:54 | 2016-04-16T18:29:54 | 56,092,819 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,442 | py | #!/usr/bin/env python
import untangle
import sys
import json
import re
import requests
def add_domain(d, i, com):
# stolen from https://github.com/crits/crits/wiki/Authenticated-API
url = 'http://localhost:8080/api/v1/domains/'
data = {
'api_key': 'a4eab72098c263d74d23e3745b361e910fe68ffa',
'username': 'ckane',
'source': 'malc0de',
'reference': 'RSS feed',
'method': 'malc0de.py',
'domain': d,
'ip': i,
'ip_type': 'IPv4 Address',
'ip_source': 'malc0de',
'ip_reference': 'RSS feed',
'ip_method': 'malc0de.py',
'add_ip': True
}
r = requests.post(url, data=data)
if r.status_code == 201:
rj = json.loads(r.text)
cdata = {
'api_key': 'a4eab72098c263d74d23e3745b361e910fe68ffa',
'username': 'ckane',
'comment': com,
'object_id': rj['id'],
'object_type': 'Domain'
}
r = requests.post('http://localhost:8080/api/v1/comments', data=cdata)
print "Successfully added " + data['domain']
data = untangle.parse('feed.xml')
ips = []
domains = []
ip_extr = re.compile('IP Address: (\\d+\\.\\d+\\.\\d+\\.\\d+)')
for item in data.rss.channel.item:
ip_matches = ip_extr.search(item.description.cdata)
ip = None
if ip_matches:
ip = ip_matches.group(1)
add_domain(item.title.cdata, ip, item.description.cdata)
| [
"[email protected]"
] | |
d9f7090fed4813fead0b2a20f583e935fccc1d34 | 15b17d497bb7a3edaf3aa8e55615da837737ff29 | /mysite/settings.py | 0ce2c378ffb5604f5ccf4737c2403d4c6ca8b533 | [] | no_license | kaurk123/nginx-demo | f5e7a745ba71cfac58645213017880c56084c13d | 861e468baf56358d488fd341d6b3023b7658ffb0 | refs/heads/master | 2020-03-29T20:38:07.163938 | 2018-09-25T20:04:18 | 2018-09-25T20:04:18 | 150,322,436 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,145 | py | """
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 1.10.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '838og@=6lpj92$@k^gi!@qosg8ls!=e_artn5(#pxkq+b2nld&'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, "static/")
| [
"[email protected]"
] | |
ca196829e12d80283dfb117eec1b1041f7507d33 | 619054eaea6b93fb3ad354606029363817088285 | /rai/rai_node_gateway/rai_node_gateway.py | cec6e8d68f4859a99c91d669fb2f04d0619c20a4 | [
"MIT"
] | permissive | raicoincommunity/Raicoin | e3d1047b30cde66706b994a3b31e0b541b07c79f | ebc480caca04caebdedbc3afb4a8a0f60a8c895f | refs/heads/master | 2023-09-03T00:39:00.762688 | 2023-08-29T17:01:09 | 2023-08-29T17:01:09 | 210,498,091 | 99 | 16 | MIT | 2023-08-29T17:01:11 | 2019-09-24T02:52:27 | C++ | UTF-8 | Python | false | false | 15,653 | py | #!/usr/bin/env python
#V1.0.0
from dotenv import load_dotenv
load_dotenv()
import argparse
import asyncio
import ipaddress
import json
import logging
import os
import sys
import time
import uuid
import uvloop
import secrets
import random
import traceback
from logging.handlers import TimedRotatingFileHandler, WatchedFileHandler
from aiohttp import ClientSession, WSMessage, WSMsgType, log, web, ClientWebSocketResponse
from functools import partial
ALLOWED_RPC_ACTIONS = [
'account_heads', 'active_account_heads', 'block_confirm', 'block_publish', 'blocks_query', 'event_subscribe'
]
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
LOOP = asyncio.get_event_loop()
# Configuration arguments
parser = argparse.ArgumentParser(description="Raicoin Node Gateway")
parser.add_argument('--host', type=str, help='Host to listen on (e.g. 127.0.0.1)', default='127.0.0.1')
parser.add_argument('-p', '--port', type=int, help='Port to listen on', default=7178)
parser.add_argument('--log-file', type=str, help='Log file location', default='rai_node_gateway.log')
parser.add_argument('--limit', type=int, help='Max allowed requests per second from one IP', default=0)
parser.add_argument('-t', '--token', help='Create a secure url token', action='store_true')
options = parser.parse_args()
try:
if options.token:
print(secrets.token_urlsafe())
sys.exit(0)
LISTEN_HOST = str(ipaddress.ip_address(options.host))
LISTEN_PORT = int(options.port)
if not os.path.exists('log'):
os.makedirs('log')
LOG_FILE = f'log/{options.log_file}'
server_desc = f'on {LISTEN_HOST} port {LISTEN_PORT}'
print(f'Starting Raicoin Node Gateway {server_desc}')
LIMIT = int(options.limit)
except Exception as e:
parser.print_help()
print(e)
sys.exit(0)
# Environment configuration
CALLBACK_WHITELIST = os.getenv('CALLBACK_WHITELIST', '127.0.0.1')
if CALLBACK_WHITELIST != '127.0.0.1':
try:
ips = CALLBACK_WHITELIST.split(',')
CALLBACK_WHITELIST = []
for ip in ips:
if not ip.strip():
continue
CALLBACK_WHITELIST.append(str(ipaddress.ip_address(ip.strip())))
if not CALLBACK_WHITELIST:
print("Error found in .env: invalid CALLBACK_WHITELIST config")
except:
print("Error found in .env: invalid CALLBACK_WHITELIST config")
sys.exit(0)
DEBUG_MODE = True if int(os.getenv('DEBUG', 0)) != 0 else False
CALLBACK_TOKEN = os.getenv("CALLBACK_TOKEN", '')
if len(CALLBACK_TOKEN) != 43:
print("Error found in .env: CALLBACK_TOKEN is missing or invalid, you can use 'python3 rai_node_gateway.py -t' to generate a secure token")
sys.exit(0)
CHECK_CF_CONNECTING_IP = True if int(os.getenv('USE_CLOUDFLARE', 0)) == 1 else False
GS = {} # Global States
CLIENTS = GS['clients'] = {}
NODES = GS['nodes'] = []
LIMITS = GS['limits'] = {}
SUBS = GS['subscriptions'] = {}
class Util:
def __init__(self, check_cf : bool):
self.check_cf = check_cf
def get_request_ip(self, r : web.Request) -> str:
#X-FORWARDED-FOR not safe, don't use
try:
if self.check_cf:
host = r.headers.get('CF-Connecting-IP', None) #Added by Cloudflare
if host != None:
return host
host = r.headers.get('X-Real-IP', None) #Added by Nginx
if host != None:
return host
return self.get_connecting_ip(r)
except:
return '0.0.0.0'
def get_connecting_ip(self, r : web.Request):
peername = r.transport.get_extra_info('peername')
if peername is not None:
host, _ = peername
return host
return None
UTIL = Util(CHECK_CF_CONNECTING_IP)
def websocket_rate_limit(r : web.Request, ws : web.WebSocketResponse):
global LIMITS
if LIMIT == 0:
return False
burst_max = LIMIT * 100
pps = LIMIT
ip = UTIL.get_request_ip(r)
now = int(time.time())
if ip not in LIMITS:
LIMITS[ip] = {'count':burst_max, 'ts':now}
else:
if LIMITS[ip]['ts'] < now:
LIMITS[ip]['count'] += (now - LIMITS[ip]['ts']) * pps
LIMITS[ip]['ts'] = now
if LIMITS[ip]['count'] > burst_max:
LIMITS[ip]['count'] = burst_max
if LIMITS[ip]['count'] <= 0:
log.server_logger.error('client messaging too quickly: %s; %s; User-Agent: %s', ip, ws.id, str(
r.headers.get('User-Agent')))
return True
LIMITS[ip]['count'] -= 1
return False
def sessions_count(ip):
count = 0
for client in CLIENTS.values():
if client['ip'] == ip:
count += 1
return count
def random_node():
if len(NODES) == 0:
return None
return NODES[random.randint(0, len(NODES) - 1)]
def main_node():
if len(NODES) == 0:
return None
return NODES[0]
def is_main_node(node_id):
node = main_node()
if not node:
return False
return node['id'] == node_id
async def forward_to_node(r: web.Request, request_json : dict, uid):
request_json['client_id'] = uid
node = main_node() if request_json['action'] == "event_subscribe" else random_node()
if not node:
return {'error':'node is offline'}
try:
await node['ws'].send_str(json.dumps(request_json))
except Exception as e:
log.server_logger.error('rpc error;%s;%s', str(e), node['ip'])
# Primary handler for all client websocket connections
async def handle_client_messages(r : web.Request, message : str, ws : web.WebSocketResponse):
"""Process data sent by client"""
if websocket_rate_limit(r, ws):
return {'error': 'Messaging too quickly'}
ip = UTIL.get_request_ip(r)
uid = ws.id
log.server_logger.info('request; %s, %s, %s', message, ip, uid)
try:
client = CLIENTS[uid]
request_json = json.loads(message)
action = request_json['action']
res = {'ack':action}
if 'request_id' in request_json and len(request_json['request_id']) <= 256:
res['request_id'] = request_json['request_id']
if request_json['action'] not in ALLOWED_RPC_ACTIONS:
res.update({'error':'action not allowed'})
return res
# adjust counts so nobody can block the node with a huge request
if 'count' in request_json:
count = int(request_json['count'])
if (count < 0) or (count > 1000):
request_json['count'] = 1000
# rpc: account_unsubscribe, check no other subscribers first
if request_json['action'] == "event_subscribe":
log.server_logger.info('event_subscribe request; %s; %s', ip, uid)
event = str(request_json['event'])
if len(event) <= 32 and len(client['events']) <= 16:
client['events'].add(event)
try:
return await forward_to_node(r, request_json, uid)
except Exception as e:
log.server_logger.error('rpc error;%s;%s;%s', str(e), ip, uid)
res.update({
'error':'rpc error',
'detail': str(e)
})
return res
except Exception as e:
log.server_logger.exception('uncaught error;%s;%s', ip, uid)
return {
'error':'general error',
'detail':str(sys.exc_info())
}
async def client_handler(r : web.Request):
"""Handler for websocket connections and messages"""
if not NODES:
return web.HTTPBadGateway()
ip = UTIL.get_request_ip(r)
if (sessions_count(ip) >= 2):
return web.HTTPForbidden()
ws = web.WebSocketResponse(heartbeat=30)
try:
await ws.prepare(r)
# Connection Opened
except:
log.server_logger.error('Failed to prepare websocket: %s', UTIL.get_request_ip(r))
return ws
ws.id = str(uuid.uuid4())
global CLIENTS
CLIENTS[ws.id] = {'ws':ws, 'events':set(), 'ip':ip}
try:
log.server_logger.info('new client connection;%s;%s;User-Agent:%s', ip, ws.id, str(
r.headers.get('User-Agent')))
async for msg in ws:
if msg.type == WSMsgType.TEXT:
if msg.data == 'close':
await ws.close()
else:
res = await handle_client_messages(r, msg.data, ws=ws)
if res:
res = json.dumps(res)
log.server_logger.debug('Sending response %s to %s', res, ip)
await ws.send_str(res)
elif msg.type == WSMsgType.CLOSE:
log.server_logger.info('Client Connection closed normally')
break
elif msg.type == WSMsgType.ERROR:
log.server_logger.info('Client Connection closed with error %s', ws.exception())
break
log.server_logger.info('Client connection closed normally')
except Exception:
log.server_logger.exception('Client Closed with exception')
finally:
del CLIENTS[ws.id]
return ws
# Primary handler for callback
def callback_check_ip(r : web.Request):
ip = UTIL.get_request_ip(r)
if not ip or ip not in CALLBACK_WHITELIST:
return True
return False
async def send_to_clients(message : str, event = None):
ids = list(CLIENTS.keys())
for i in ids:
if i not in CLIENTS:
continue
client = CLIENTS[i]
if event != None and event not in client['events']:
continue
try:
await client['ws'].send_str(message)
except:
pass
async def send_to_client(message : str, client_id):
if client_id not in CLIENTS:
return
try:
await CLIENTS[client_id]['ws'].send_str(message)
except:
pass
# Primary handler for all node websocket connections
async def handle_node_messages(r : web.Request, message : str, ws : web.WebSocketResponse):
"""Process data sent by node"""
ip = UTIL.get_request_ip(r)
node_id = ws.id
log.server_logger.info('request; %s, %s, %s', message, ip, node_id)
try:
request_json = json.loads(message)
if 'notify' in request_json:
notify = request_json['notify']
if notify == 'block_confirm':
await send_to_clients(message)
else:
await send_to_clients(message, notify)
elif 'ack' in request_json:
client_id = request_json['client_id']
del request_json['client_id']
await send_to_client(json.dumps(request_json), client_id)
else:
log.server_logger.error('unexpected node message;%s;%s;%s', message, ip, node_id)
except Exception as e:
log.server_logger.exception('uncaught error;%s;%s;%s', str(e), ip, node_id)
async def destroy_node(r: web.Request, node_id):
main = is_main_node(node_id)
for i in range(len(NODES)):
if NODES[i]['id'] == node_id:
NODES.pop(i)
break
message = {'notify':'node_offline', 'main':'true' if main else 'false'}
await send_to_clients(json.dumps(message))
async def node_handler(r : web.Request):
ip = UTIL.get_request_ip(r)
if callback_check_ip(r):
log.server_logger.error('callback from unauthorized ip: %s', ip)
return web.HTTPUnauthorized()
ws = web.WebSocketResponse(heartbeat=30)
try:
await ws.prepare(r)
# Connection Opened
except:
log.server_logger.error('Failed to prepare websocket: %s', UTIL.get_request_ip(r))
return ws
ws.id = str(uuid.uuid4())
NODES.append({'ws':ws, 'ip':ip, 'id':ws.id})
try:
log.server_logger.info('new node connection;%s;%s;User-Agent:%s', ip, ws.id, str(r.headers.get('User-Agent')))
async for msg in ws:
if msg.type == WSMsgType.TEXT:
if msg.data == 'close':
await ws.close()
else:
await handle_node_messages(r, msg.data, ws=ws)
elif msg.type == WSMsgType.CLOSE:
log.server_logger.info('Node connection closed normally')
break
elif msg.type == WSMsgType.ERROR:
log.server_logger.info('Node connection closed with error %s', ws.exception())
break
log.server_logger.info('Node connection closed normally')
except Exception as e:
log.server_logger.exception('Node closed with exception=%s', e)
finally:
await destroy_node(r, ws.id)
return ws
def debug_check_ip(r : web.Request):
ip = UTIL.get_request_ip(r)
if not ip or ip != '127.0.0.1':
return True
return False
class JsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, set):
return list(obj)
if isinstance(obj, web.WebSocketResponse):
return 'WebSocketResponse object'
if isinstance(obj, ClientWebSocketResponse):
return 'ClientWebSocketResponse object'
return json.JSONEncoder.default(self, obj)
DEBUG_DUMPS = partial(json.dumps, cls=JsonEncoder, indent=4)
async def debug_handler(r : web.Request):
if debug_check_ip(r):
log.server_logger.error('debug request from unauthorized ip: %s', UTIL.get_request_ip(r))
return web.HTTPUnauthorized()
try:
request_json = await r.json()
query = request_json['query']
if query == 'subscriptions':
return web.json_response(SUBS, dumps=DEBUG_DUMPS)
elif query == 'nodes':
return web.json_response(NODES, dumps=DEBUG_DUMPS)
elif query == 'clients':
return web.json_response(CLIENTS, dumps=DEBUG_DUMPS)
elif query == 'limits':
return web.json_response(LIMITS, dumps=DEBUG_DUMPS)
else:
pass
return web.HTTPOk()
except Exception:
log.server_logger.exception('exception in debug request')
return web.HTTPInternalServerError(reason="Something went wrong %s" % str(sys.exc_info()))
async def init_app():
# Setup logger
if DEBUG_MODE:
print("debug mode")
logging.basicConfig(level=logging.DEBUG)
else:
root = logging.getLogger('aiohttp.server')
logging.basicConfig(level=logging.WARN)
handler = WatchedFileHandler(LOG_FILE)
formatter = logging.Formatter("%(asctime)s;%(levelname)s;%(message)s", "%Y-%m-%d %H:%M:%S %z")
handler.setFormatter(formatter)
root.addHandler(handler)
root.addHandler(TimedRotatingFileHandler(LOG_FILE, when="d", interval=1, backupCount=30))
app = web.Application()
app.add_routes([web.get('/', client_handler)]) # All client WS requests
app.add_routes([web.get(f'/callback/{CALLBACK_TOKEN}', node_handler)]) # ws/wss callback from nodes
app.add_routes([web.post('/debug', debug_handler)]) # local debug interface
return app
APP = LOOP.run_until_complete(init_app())
def main():
global APP, GS
# Start web/ws server
async def start():
runner = web.AppRunner(APP)
await runner.setup()
site = web.TCPSite(runner, LISTEN_HOST, LISTEN_PORT)
await site.start()
async def end():
await APP.shutdown()
LOOP.run_until_complete(start())
# Main program
try:
LOOP.run_forever()
except KeyboardInterrupt:
pass
finally:
LOOP.run_until_complete(end())
LOOP.close()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
dfa38f16796343775aab6b5aaab97725728628ce | 72c4cea551df0bee51c5740926b2fdb1feaf3470 | /tools/refactor/js_parse_perfstatic.py | d552ba2460a12738457a2a00a6339456e891d9f0 | [
"MIT"
] | permissive | joeedh/fairmotion | 2955fda64b36a634c42c3382b20f18dae189d13f | fc0bca395057cd3e78f91bcb1796b6c5eda73d2b | refs/heads/master | 2023-05-26T20:03:51.583970 | 2023-05-14T16:07:53 | 2023-05-14T16:07:53 | 34,099,859 | 1 | 0 | MIT | 2023-03-05T21:45:57 | 2015-04-17T06:13:09 | JavaScript | UTF-8 | Python | false | false | 6,077 | py | import ply.yacc as yacc
import sys, os, os.path
import traceback
# Get the token map from the lexer. This is required.
from js_global import glob
from js_ast import *
from js_lex import tokens, StringLit, HexInt
from ply.lex import LexToken, Lexer
#AST nodes that are used in intermediate stages of parsing,
#but are NEVER EVER in the final AST tree.
from js_parser_only_ast import *
from js_process_ast_parser_only import *
from js_parse import *
"""
This is a special "mode" that changes
the syntax to a statically-typed language,
optimized and checked for writing high-performance
code, but can still compile into JavaScript
"""
def p_statementlist(p):
''' statementlist : statement
| statement_nonctrl
| statementlist statement
| statementlist statement_nonctrl
|
'''
set_parse_globals(p);
if len(p) == 1:
p[0] = StatementList()
elif len(p) == 2:
n = StatementList()
n.add(p[1])
p[0] = n
elif len(p) == 3:
if type(p[1]) != StatementList:
p[0] = StatementList()
p[0].add(p[1])
p[0].add(p[2])
else:
p[0] = p[1]
if p[2] != None:
p[0].add(p[2])
def p_class(p):
'''class : CLASS ID template_opt class_tail'''
set_parse_globals(p)
tail = p[4]
heritage = tail[0]
cls = ClassNode(p[2], heritage)
for n in tail[1]:
cls.add(n)
p[0] = cls
if p[3] != None:
p[0].template = p[3]
def p_exprclass(p):
'''exprclass : CLASS id_opt class_tail'''
set_parse_globals(p)
tail = p[3]
heritage = tail[0]
if p[2] == None:
p[2] = "(anonymous)"
cls = ClassNode(p[2], heritage)
for n in tail[1]:
cls.add(n)
p[0] = expand_harmony_class(cls)
def p_class_tail(p):
'''class_tail : class_heritage_opt LBRACKET class_body_opt RBRACKET'''
set_parse_globals(p)
p[0] = [p[1], p[3]]
for i in range(2):
if p[0][i] == None:
p[0][i] = []
def p_class_list(p):
'''class_list : var_type
| class_list COMMA var_type
'''
set_parse_globals(p)
if len(p) == 2:
p[0] = [p[1]];
else:
p[0] = p[1];
if type(p[0]) != list:
p[0] = [p[0]]
p[0].append(p[3])
def p_class_heritage(p):
'''class_heritage : EXTENDS class_list'''
set_parse_globals(p)
p[0] = p[2]
def p_class_heritage_opt(p):
'''class_heritage_opt : class_heritage
|
'''
set_parse_globals(p)
if len(p) == 2:
p[0] = p[1]
def p_class_body_opt(p):
'''class_body_opt : class_element_list
|
'''
set_parse_globals(p)
if len(p) == 1:
p[0] = []
else:
p[0] = p[1]
if p[0] == None:
p[0] = []
def p_class_element_list(p):
'''class_element_list : class_element
| class_element_list class_element
'''
set_parse_globals(p)
if len(p) == 2:
p[0] = [p[1]]
else:
p[0] = p[1]
p[0].append(p[2])
def p_class_element(p):
'''class_element : method_def
| STATIC method_def
| class_var
'''
set_parse_globals(p)
if len(p) == 2:
p[0] = p[1]
else:
p[0] = p[2]
p[0].is_static = True
def p_class_var(p):
'''class_var : class_vartype ID SEMI
| class_vartype ID ASSIGN expr SEMI
'''
set_parse_globals(p)
p[0] = ClassMember(p[2])
if len(p) == 6:
p[0].add(p[4])
def p_basic_var_type(p):
'''
basic_var_type : BYTE
| INT
| SHORT
| FLOAT
| DOUBLE
| CHAR
'''
p[0] = BuiltinTypeNode(p[1])
def p_var_type2(p):
''' var_type2 : basic_var_type
| ID
| ID template_ref
'''
if len(p) == 2:
if type(p[1]) == str:
p[0] = TypeRefNode(p[1])
else:
p[0] = p[1]
else:
p[0] = TypeRefNode(p[1])
p[0].template = p[2]
def p_class_vartype(p):
'''class_vartype : var_type2
| prop_modifiers var_type2
'''
set_parse_globals(p)
if len(p) == 2:
p[0] = p[1]
else:
p[0] = p[2]
p[0].modifiers = p[1]
def p_prop_modifiers(p):
'''prop_modifiers : type_modifiers UNSIGNED
| type_modifiers SIGNED
| type_modifiers CONST
| STATIC
| UNSIGNED
| CONST
|
'''
set_parse_globals(p)
if len(p) == 2:
p[0] = set([p[1]])
else:
p[0] = p[1]
p[0].add(p[2])
def p_method(p):
'''method : ID LPAREN funcdeflist RPAREN func_type_opt LBRACKET statementlist_opt RBRACKET'''
set_parse_globals(p)
name = p[1]
params = p[3]
statementlist = p[7]
if statementlist == None:
statementlist = StatementList()
p[0] = MethodNode(name)
p[0].add(params)
p[0].add(statementlist)
if p[5] != None:
p[0].type = p[5]
def p_method_def(p):
#I don't want to make get/set exclusive parse tokens,
#so I'm going to enforce that here in the production function.
'''method_def : method
| ID ID LPAREN RPAREN func_type_opt LBRACKET statementlist_opt RBRACKET
| ID ID LPAREN setter_param_list RPAREN func_type_opt LBRACKET statementlist_opt RBRACKET
'''
set_parse_globals(p)
if len(p) == 2:
p[0] = p[1]
elif p[1] == "get" and len(p) == 9:
name = p[2]
p[0] = MethodGetter(name)
if p[7] == None: p[7] = StatementList()
p[0].add(p[7])
if p[5] != None:
p[0].type = p[5]
elif p[1] == "set" and len(p) == 10:
name = p[2]
p[0] = MethodSetter(name)
p[0].add(p[4])
if p[8] == None: p[8] = StatementList()
p[0].add(p[8])
if p[6] != None:
p[0].type = p[6]
else:
glob.g_error = True
glob.g_error_pre = p
print_err(p, True)
raise SyntaxError("Expected 'get' or 'set'");
def p_setter_param_list(p):
'''
setter_param_list : ID
'''
set_parse_globals(p)
p[0] = ExprListNode([VarDeclNode(ExprNode([]), name=p[1])])
return
_parser = yacc.yacc()
parser = Parser(_parser);
| [
"[email protected]"
] | |
1f29edeb5b173d78d62774a09a1d46f9f6df82b7 | c8963254f944e40478f39c7828c3c1713e1f0270 | /googlesearch/models.py | acb87fceb21054ce9cdf7991205f0195a08942ec | [
"MIT"
] | permissive | Animenosekai/googlesearch | 452d00bf56b7384d794054f684fea2bdfaefa670 | 346bcaf7b344c6cf7c8fdd63d9493648dad9a163 | refs/heads/main | 2023-06-01T02:05:01.088463 | 2021-07-02T01:48:22 | 2021-07-02T01:48:22 | 381,765,261 | 13 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,054 | py | """
File containing the different classes used in googlesearch
"""
from urllib.parse import urlparse, parse_qsl
from bs4 import BeautifulSoup
from googlesearch.constants import BASE
class SearchResultElement():
def __init__(self, resultobj: BeautifulSoup) -> None:
# get the url
href = str(resultobj.find("a")["href"])
if href.startswith("/"):
href = BASE + href
self.url = str(dict(parse_qsl(urlparse(href).query))["q"])
# get the title
self.title = str(resultobj.find("h3").text)
self.displayed_url = str(resultobj.select_one("div:nth-child(1) > a > div").text)
self.description = str(resultobj.find_all("div")[-1].text)
def __repr__(self) -> str:
return '<SearchResult title="{title}" ({url})>'.format(title=self.title, url=self.displayed_url)
def as_dict(self) -> str:
return {
"url": self.url,
"title": self.title,
"displayedURL": self.displayed_url,
"description": self.description
} | [
"[email protected]"
] | |
30463a6893689613709e9817f4c3415559c6cbd7 | 4f93dfb321f96d54a38db5645e4bc22f1091c9f8 | /index.py | a5ac30d3bb5ced9be89a6c2a8f0273c9313f0a78 | [] | no_license | Cybercube125/100-Days-of-Code | c76b485a51d0e841ca5a4782789349bb8c6ef5a2 | af049e16305d8c56940afb84b1ec5d9c93528cad | refs/heads/main | 2023-05-31T17:30:18.866224 | 2021-05-29T16:18:28 | 2021-05-29T16:18:28 | 369,295,209 | 0 | 0 | null | 2021-05-29T16:18:29 | 2021-05-20T17:57:37 | JavaScript | UTF-8 | Python | false | false | 49 | py | # Yes! my first python code!
print ("hello word") | [
"[email protected]"
] | |
abc41d47cf966fd630060950a5299a94b2fe8138 | 2edbf14b9423274293b1f804b65b3982644290ae | /library/migrations/0004_publishinghouse.py | f12fef5cfbc135f7886a118cb1d459b5b07f92ee | [] | no_license | kobtsev-m/Library-Site | fb4d2c197d4c7e01bf0a0100624724f723715ab6 | 80f2440bbf46366ad2eca25b25a6221f29c0f83c | refs/heads/master | 2023-01-02T02:28:19.352013 | 2020-10-21T21:59:29 | 2020-10-21T21:59:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 635 | py | # Generated by Django 2.2.6 on 2020-10-19 12:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('library', '0003_auto_20201018_0119'),
]
operations = [
migrations.CreateModel(
name='PublishingHouse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.TextField()),
('foundation_year', models.SmallIntegerField()),
('country', models.CharField(max_length=2)),
],
),
]
| [
"[email protected]"
] | |
13c62545d97b06087f70b2eed17fd540d01bc661 | f54fecc8bd9d7bb260d6a5d096bd6725de3fbb76 | /utilst.py | 918c54ff23de689908c46eadc0c354f1775f2bdf | [] | no_license | shrutarv/Time-Series-Classification | 72712c11ecf13bc77effd5c810067350e30ccef8 | dcc97340fac663502f05dd94e9a8e7e3a2acd45f | refs/heads/master | 2022-12-15T22:27:59.809235 | 2020-08-28T22:02:08 | 2020-08-28T22:02:08 | 291,153,642 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,259 | py |
import numpy as np
import pandas as pd
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
matplotlib.rcParams['font.family'] = 'sans-serif'
matplotlib.rcParams['font.sans-serif'] = 'Arial'
import os
import operator
import utils
from constants import UNIVARIATE_DATASET_NAMES as DATASET_NAMES
from constants import UNIVARIATE_DATASET_NAMES_2018 as DATASET_NAMES_2018
from constants import ARCHIVE_NAMES as ARCHIVE_NAMES
from constants import CLASSIFIERS
from constants import ITERATIONS
from constants import MTS_DATASET_NAMES
from sklearn.metrics import accuracy_score
from sklearn.metrics import precision_score
from sklearn.metrics import recall_score
from sklearn.preprocessing import LabelEncoder
from scipy.interpolate import interp1d
from scipy.io import loadmat
def readucr(filename):
data = np.loadtxt(filename, delimiter=',')
Y = data[:, 0]
X = data[:, 1:]
return X, Y
def create_directory(directory_path):
if os.path.exists(directory_path):
return None
else:
try:
os.makedirs(directory_path)
except:
# in case another machine created the path meanwhile !:(
return None
return directory_path
def create_path(root_dir, classifier_name, archive_name):
output_directory = root_dir + '/results/' + classifier_name + '/' + archive_name + '/'
if os.path.exists(output_directory):
return None
else:
os.makedirs(output_directory)
return output_directory
def read_dataset(root_dir, archive_name, dataset_name):
datasets_dict = {}
cur_root_dir = root_dir.replace('-temp', '')
if archive_name == 'mts_archive':
file_name = cur_root_dir
#file_name = '/media/shrutarv/Drive/Job/Time Series analysis/Task 2/dl-4-tsc-master/dl-4-tsc-master/results/data-indus1/fcn/'
x_train = np.load(file_name + 'x_train.npy')
y_train = np.load(file_name + 'y_train.npy')
x_test = np.load(file_name + 'x_test.npy')
y_test = np.load(file_name + 'y_test.npy')
datasets_dict[dataset_name] = (x_train.copy(), y_train.copy(), x_test.copy(),
y_test.copy())
elif archive_name == 'UCRArchive_2018':
root_dir_dataset = cur_root_dir + '/archives/' + archive_name + '/' + dataset_name + '/'
df_train = pd.read_csv(root_dir_dataset + '/' + dataset_name + '_TRAIN.tsv', sep='\t', header=None)
df_test = pd.read_csv(root_dir_dataset + '/' + dataset_name + '_TEST.tsv', sep='\t', header=None)
y_train = df_train.values[:, 0]
y_test = df_test.values[:, 0]
x_train = df_train.drop(columns=[0])
x_test = df_test.drop(columns=[0])
x_train.columns = range(x_train.shape[1])
x_test.columns = range(x_test.shape[1])
x_train = x_train.values
x_test = x_test.values
# znorm
std_ = x_train.std(axis=1, keepdims=True)
std_[std_ == 0] = 1.0
x_train = (x_train - x_train.mean(axis=1, keepdims=True)) / std_
std_ = x_test.std(axis=1, keepdims=True)
std_[std_ == 0] = 1.0
x_test = (x_test - x_test.mean(axis=1, keepdims=True)) / std_
datasets_dict[dataset_name] = (x_train.copy(), y_train.copy(), x_test.copy(),
y_test.copy())
else:
file_name = cur_root_dir + '/archives/' + archive_name + '/' + dataset_name + '/' + dataset_name
x_train, y_train = readucr(file_name + '_TRAIN')
x_test, y_test = readucr(file_name + '_TEST')
datasets_dict[dataset_name] = (x_train.copy(), y_train.copy(), x_test.copy(),
y_test.copy())
return datasets_dict
def read_all_datasets(root_dir, archive_name, split_val=False):
datasets_dict = {}
cur_root_dir = root_dir.replace('-temp', '')
dataset_names_to_sort = []
if archive_name == 'mts_archive':
for dataset_name in MTS_DATASET_NAMES:
root_dir_dataset = cur_root_dir + '/archives/' + archive_name + '/' + dataset_name + '/'
x_train = np.load(root_dir_dataset + 'x_train.npy')
y_train = np.load(root_dir_dataset + 'y_train.npy')
x_test = np.load(root_dir_dataset + 'x_test.npy')
y_test = np.load(root_dir_dataset + 'y_test.npy')
datasets_dict[dataset_name] = (x_train.copy(), y_train.copy(), x_test.copy(),
y_test.copy())
elif archive_name == 'UCRArchive_2018':
for dataset_name in DATASET_NAMES_2018:
root_dir_dataset = cur_root_dir + '/archives/' + archive_name + '/' + dataset_name + '/'
df_train = pd.read_csv(root_dir_dataset + '/' + dataset_name + '_TRAIN.tsv', sep='\t', header=None)
df_test = pd.read_csv(root_dir_dataset + '/' + dataset_name + '_TEST.tsv', sep='\t', header=None)
y_train = df_train.values[:, 0]
y_test = df_test.values[:, 0]
x_train = df_train.drop(columns=[0])
x_test = df_test.drop(columns=[0])
x_train.columns = range(x_train.shape[1])
x_test.columns = range(x_test.shape[1])
x_train = x_train.values
x_test = x_test.values
# znorm
std_ = x_train.std(axis=1, keepdims=True)
std_[std_ == 0] = 1.0
x_train = (x_train - x_train.mean(axis=1, keepdims=True)) / std_
std_ = x_test.std(axis=1, keepdims=True)
std_[std_ == 0] = 1.0
x_test = (x_test - x_test.mean(axis=1, keepdims=True)) / std_
datasets_dict[dataset_name] = (x_train.copy(), y_train.copy(), x_test.copy(),
y_test.copy())
else:
for dataset_name in DATASET_NAMES:
root_dir_dataset = cur_root_dir + '/archives/' + archive_name + '/' + dataset_name + '/'
file_name = root_dir_dataset + dataset_name
x_train, y_train = readucr(file_name + '_TRAIN')
x_test, y_test = readucr(file_name + '_TEST')
datasets_dict[dataset_name] = (x_train.copy(), y_train.copy(), x_test.copy(),
y_test.copy())
dataset_names_to_sort.append((dataset_name, len(x_train)))
dataset_names_to_sort.sort(key=operator.itemgetter(1))
for i in range(len(DATASET_NAMES)):
DATASET_NAMES[i] = dataset_names_to_sort[i][0]
return datasets_dict
def get_func_length(x_train, x_test, func):
if func == min:
func_length = np.inf
else:
func_length = 0
n = x_train.shape[0]
for i in range(n):
func_length = func(func_length, x_train[i].shape[1])
n = x_test.shape[0]
for i in range(n):
func_length = func(func_length, x_test[i].shape[1])
return func_length
def transform_to_same_length(x, n_var, max_length):
n = x.shape[0]
# the new set in ucr form np array
ucr_x = np.zeros((n, max_length, n_var), dtype=np.float64)
# loop through each time series
for i in range(n):
mts = x[i]
curr_length = mts.shape[1]
idx = np.array(range(curr_length))
idx_new = np.linspace(0, idx.max(), max_length)
for j in range(n_var):
ts = mts[j]
# linear interpolation
f = interp1d(idx, ts, kind='cubic')
new_ts = f(idx_new)
ucr_x[i, :, j] = new_ts
return ucr_x
def transform_mts_to_ucr_format():
#mts_root_dir = '/home/awasthi/Task2/data-indus1/'
mts_root_dir= '/home/awasthi/Task2/data-indus1/'
mts_out_dir = '/home/awasthi/Task2/data-indus1/'
dataset_name = 'data-indus1'
#for dataset_name in MTS_DATASET_NAMES:
# print('dataset_name',dataset_name)
out_dir = mts_out_dir
# if create_directory(out_dir) is None:
# print('Already_done')
# continue
a = loadmat(mts_root_dir + dataset_name + '.mat')
a = a['s']
a = a[0, 0]
dt = a.dtype.names
dt = list(dt)
for i in range(len(dt)):
if dt[i] == 'train':
x_train = a[i].reshape(max(a[i].shape))
elif dt[i] == 'test':
x_test = a[i].reshape(max(a[i].shape))
elif dt[i] == 'trainlabels':
y_train = a[i].reshape(max(a[i].shape))
elif dt[i] == 'testlabels':
y_test = a[i].reshape(max(a[i].shape))
# x_train = a[1][0]
# y_train = a[0][:,0]
# x_test = a[3][0]
# y_test = a[2][:,0]
n_var = x_train[0].shape[0]
max_length = get_func_length(x_train, x_test, func=max)
min_length = get_func_length(x_train, x_test, func=min)
print(dataset_name, 'max', max_length, 'min', min_length)
print()
# continue
x_train = transform_to_same_length(x_train, n_var, max_length)
x_test = transform_to_same_length(x_test, n_var, max_length)
# save them
np.save(out_dir + 'x_train.npy', x_train)
np.save(out_dir + 'y_train.npy', y_train)
np.save(out_dir + 'x_test.npy', x_test)
np.save(out_dir + 'y_test.npy', y_test)
print('Done')
def calculate_metrics(y_true, y_pred, duration, y_true_val=None, y_pred_val=None):
res = pd.DataFrame(data=np.zeros((1, 4), dtype=np.float), index=[0],
columns=['precision', 'accuracy', 'recall', 'duration'])
res['precision'] = precision_score(y_true, y_pred, average='macro')
res['accuracy'] = accuracy_score(y_true, y_pred)
if not y_true_val is None:
# this is useful when transfer learning is used with cross validation
res['accuracy_val'] = accuracy_score(y_true_val, y_pred_val)
res['recall'] = recall_score(y_true, y_pred, average='macro')
res['duration'] = duration
return res
def save_test_duration(file_name, test_duration):
res = pd.DataFrame(data=np.zeros((1, 1), dtype=np.float), index=[0],
columns=['test_duration'])
res['test_duration'] = test_duration
res.to_csv(file_name, index=False)
def generate_results_csv(output_file_name, root_dir):
res = pd.DataFrame(data=np.zeros((0, 7), dtype=np.float), index=[],
columns=['classifier_name', 'archive_name', 'dataset_name',
'precision', 'accuracy', 'recall', 'duration'])
for classifier_name in CLASSIFIERS:
for archive_name in ARCHIVE_NAMES:
datasets_dict = read_all_datasets(root_dir, archive_name)
for it in range(ITERATIONS):
curr_archive_name = archive_name
if it != 0:
curr_archive_name = curr_archive_name + '_itr_' + str(it)
for dataset_name in datasets_dict.keys():
output_dir = root_dir + '/results/' + classifier_name + '/' \
+ curr_archive_name + '/' + dataset_name + '/' + 'df_metrics.csv'
if not os.path.exists(output_dir):
continue
df_metrics = pd.read_csv(output_dir)
df_metrics['classifier_name'] = classifier_name
df_metrics['archive_name'] = archive_name
df_metrics['dataset_name'] = dataset_name
res = pd.concat((res, df_metrics), axis=0, sort=False)
res.to_csv(root_dir + output_file_name, index=False)
# aggreagte the accuracy for iterations on same dataset
res = pd.DataFrame({
'accuracy': res.groupby(
['classifier_name', 'archive_name', 'dataset_name'])['accuracy'].mean()
}).reset_index()
return res
def plot_epochs_metric(hist, file_name, metric='loss'):
plt.figure()
plt.plot(hist.history[metric])
plt.plot(hist.history['val_' + metric])
plt.title('model ' + metric)
plt.ylabel(metric, fontsize='large')
plt.xlabel('epoch', fontsize='large')
plt.legend(['train', 'val'], loc='upper left')
plt.savefig(file_name, bbox_inches='tight')
plt.close()
def save_logs_t_leNet(output_directory, hist, y_pred, y_true, duration):
hist_df = pd.DataFrame(hist.history)
hist_df.to_csv(output_directory + 'history.csv', index=False)
df_metrics = calculate_metrics(y_true, y_pred, duration)
df_metrics.to_csv(output_directory + 'df_metrics.csv', index=False)
index_best_model = hist_df['loss'].idxmin()
row_best_model = hist_df.loc[index_best_model]
df_best_model = pd.DataFrame(data=np.zeros((1, 6), dtype=np.float), index=[0],
columns=['best_model_train_loss', 'best_model_val_loss', 'best_model_train_acc',
'best_model_val_acc', 'best_model_learning_rate', 'best_model_nb_epoch'])
df_best_model['best_model_train_loss'] = row_best_model['loss']
df_best_model['best_model_val_loss'] = row_best_model['val_loss']
df_best_model['best_model_train_acc'] = row_best_model['acc']
df_best_model['best_model_val_acc'] = row_best_model['val_acc']
df_best_model['best_model_nb_epoch'] = index_best_model
df_best_model.to_csv(output_directory + 'df_best_model.csv', index=False)
# plot losses
plot_epochs_metric(hist, output_directory + 'epochs_loss.png')
def save_logs(output_directory, hist, y_pred, y_true, duration, lr=True, y_true_val=None, y_pred_val=None):
hist_df = pd.DataFrame(hist.history)
hist_df.to_csv(output_directory + 'history.csv', index=False)
df_metrics = calculate_metrics(y_true, y_pred, duration, y_true_val, y_pred_val)
df_metrics.to_csv(output_directory + 'df_metrics.csv', index=False)
index_best_model = hist_df['loss'].idxmin()
row_best_model = hist_df.loc[index_best_model]
df_best_model = pd.DataFrame(data=np.zeros((1, 6), dtype=np.float), index=[0],
columns=['best_model_train_loss', 'best_model_val_loss', 'best_model_train_acc',
'best_model_val_acc', 'best_model_learning_rate', 'best_model_nb_epoch'])
df_best_model['best_model_train_loss'] = row_best_model['loss']
df_best_model['best_model_val_loss'] = row_best_model['val_loss']
df_best_model['best_model_train_acc'] = row_best_model['accuracy']
df_best_model['best_model_val_acc'] = row_best_model['val_accuracy']
if lr == True:
df_best_model['best_model_learning_rate'] = row_best_model['lr']
df_best_model['best_model_nb_epoch'] = index_best_model
df_best_model.to_csv(output_directory + 'df_best_model.csv', index=False)
# for FCN there is no hyperparameters fine tuning - everything is static in code
# plot losses
plot_epochs_metric(hist, output_directory + 'epochs_loss.png')
return df_metrics
def visualize_filter(root_dir):
import tensorflow.keras as keras
classifier = 'resnet'
archive_name = 'UCRArchive_2018'
dataset_name = 'GunPoint'
datasets_dict = read_dataset(root_dir, archive_name, dataset_name)
x_train = datasets_dict[dataset_name][0]
y_train = datasets_dict[dataset_name][1]
x_train = x_train.reshape(x_train.shape[0], x_train.shape[1], 1)
model = keras.models.load_model(
root_dir + 'results/' + classifier + '/' + archive_name + '/' + dataset_name + '/best_model.hdf5')
# filters
filters = model.layers[1].get_weights()[0]
new_input_layer = model.inputs
new_output_layer = [model.layers[1].output]
new_feed_forward = keras.backend.function(new_input_layer, new_output_layer)
classes = np.unique(y_train)
colors = [(255 / 255, 160 / 255, 14 / 255), (181 / 255, 87 / 255, 181 / 255)]
colors_conv = [(210 / 255, 0 / 255, 0 / 255), (27 / 255, 32 / 255, 101 / 255)]
idx = 10
idx_filter = 1
filter = filters[:, 0, idx_filter]
plt.figure(1)
plt.plot(filter + 0.5, color='gray', label='filter')
for c in classes:
c_x_train = x_train[np.where(y_train == c)]
convolved_filter_1 = new_feed_forward([c_x_train])[0]
idx_c = int(c) - 1
plt.plot(c_x_train[idx], color=colors[idx_c], label='class' + str(idx_c) + '-raw')
plt.plot(convolved_filter_1[idx, :, idx_filter], color=colors_conv[idx_c], label='class' + str(idx_c) + '-conv')
plt.legend()
plt.savefig(root_dir + 'convolution-' + dataset_name + '.pdf')
return 1
def viz_perf_themes(root_dir, df):
df_themes = df.copy()
themes_index = []
# add the themes
for dataset_name in df.index:
themes_index.append(utils.constants.dataset_types[dataset_name])
themes_index = np.array(themes_index)
themes, themes_counts = np.unique(themes_index, return_counts=True)
df_themes.index = themes_index
df_themes = df_themes.rank(axis=1, method='min', ascending=False)
df_themes = df_themes.where(df_themes.values == 1)
df_themes = df_themes.groupby(level=0).sum(axis=1)
df_themes['#'] = themes_counts
for classifier in CLASSIFIERS:
df_themes[classifier] = df_themes[classifier] / df_themes['#'] * 100
df_themes = df_themes.round(decimals=1)
df_themes.to_csv(root_dir + 'tab-perf-theme.csv')
def viz_perf_train_size(root_dir, df):
df_size = df.copy()
train_sizes = []
datasets_dict_ucr = read_all_datasets(root_dir, archive_name='UCR_TS_Archive_2015')
datasets_dict_mts = read_all_datasets(root_dir, archive_name='mts_archive')
datasets_dict = dict(datasets_dict_ucr, **datasets_dict_mts)
for dataset_name in df.index:
train_size = len(datasets_dict[dataset_name][0])
train_sizes.append(train_size)
train_sizes = np.array(train_sizes)
bins = np.array([0, 100, 400, 800, 99999])
train_size_index = np.digitize(train_sizes, bins)
train_size_index = bins[train_size_index]
df_size.index = train_size_index
df_size = df_size.rank(axis=1, method='min', ascending=False)
df_size = df_size.groupby(level=0, axis=0).mean()
df_size = df_size.round(decimals=2)
print(df_size.to_string())
df_size.to_csv(root_dir + 'tab-perf-train-size.csv')
def viz_perf_classes(root_dir, df):
df_classes = df.copy()
class_numbers = []
datasets_dict_ucr = read_all_datasets(root_dir, archive_name='UCR_TS_Archive_2015')
datasets_dict_mts = read_all_datasets(root_dir, archive_name='mts_archive')
datasets_dict = dict(datasets_dict_ucr, **datasets_dict_mts)
for dataset_name in df.index:
train_size = len(np.unique(datasets_dict[dataset_name][1]))
class_numbers.append(train_size)
class_numbers = np.array(class_numbers)
bins = np.array([0, 3, 4, 6, 8, 13, 9999])
class_numbers_index = np.digitize(class_numbers, bins)
class_numbers_index = bins[class_numbers_index]
df_classes.index = class_numbers_index
df_classes = df_classes.rank(axis=1, method='min', ascending=False)
df_classes = df_classes.groupby(level=0, axis=0).mean()
df_classes = df_classes.round(decimals=2)
print(df_classes.to_string())
df_classes.to_csv(root_dir + 'tab-perf-classes.csv')
def viz_perf_length(root_dir, df):
df_lengths = df.copy()
lengths = []
datasets_dict_ucr = read_all_datasets(root_dir, archive_name='UCR_TS_Archive_2015')
datasets_dict_mts = read_all_datasets(root_dir, archive_name='mts_archive')
datasets_dict = dict(datasets_dict_ucr, **datasets_dict_mts)
for dataset_name in df.index:
length = datasets_dict[dataset_name][0].shape[1]
lengths.append(length)
lengths = np.array(lengths)
bins = np.array([0, 81, 251, 451, 700, 1001, 9999])
lengths_index = np.digitize(lengths, bins)
lengths_index = bins[lengths_index]
df_lengths.index = lengths_index
df_lengths = df_lengths.rank(axis=1, method='min', ascending=False)
df_lengths = df_lengths.groupby(level=0, axis=0).mean()
df_lengths = df_lengths.round(decimals=2)
print(df_lengths.to_string())
df_lengths.to_csv(root_dir + 'tab-perf-lengths.csv')
def viz_plot(root_dir, df):
df_lengths = df.copy()
lengths = []
datasets_dict_ucr = read_all_datasets(root_dir, archive_name='UCR_TS_Archive_2015')
datasets_dict_mts = read_all_datasets(root_dir, archive_name='mts_archive')
datasets_dict = dict(datasets_dict_ucr, **datasets_dict_mts)
for dataset_name in df.index:
length = datasets_dict[dataset_name][0].shape[1]
lengths.append(length)
lengths_index = np.array(lengths)
df_lengths.index = lengths_index
plt.scatter(x=df_lengths['fcn'], y=df_lengths['resnet'])
plt.ylim(ymin=0, ymax=1.05)
plt.xlim(xmin=0, xmax=1.05)
# df_lengths['fcn']
plt.savefig(root_dir + 'plot.pdf')
def viz_for_survey_paper(root_dir, filename='results-ucr-mts.csv'):
df = pd.read_csv(root_dir + filename, index_col=0)
df = df.T
df = df.round(decimals=2)
# get table performance per themes
# viz_perf_themes(root_dir,df)
# get table performance per train size
# viz_perf_train_size(root_dir,df)
# get table performance per classes
# viz_perf_classes(root_dir,df)
# get table performance per length
# viz_perf_length(root_dir,df)
# get plot
viz_plot(root_dir, df)
def viz_cam(root_dir):
import tensorflow.keras as keras
import sklearn
classifier = 'resnet'
archive_name = 'UCRArchive_2018'
dataset_name = 'GunPoint'
if dataset_name == 'Gun_Point':
save_name = 'GunPoint'
else:
save_name = dataset_name
max_length = 2000
datasets_dict = read_dataset(root_dir, archive_name, dataset_name)
x_train = datasets_dict[dataset_name][0]
y_train = datasets_dict[dataset_name][1]
y_test = datasets_dict[dataset_name][3]
# transform to binary labels
enc = sklearn.preprocessing.OneHotEncoder()
enc.fit(np.concatenate((y_train, y_test), axis=0).reshape(-1, 1))
y_train_binary = enc.transform(y_train.reshape(-1, 1)).toarray()
x_train = x_train.reshape(x_train.shape[0], x_train.shape[1], 1)
model = keras.models.load_model(
root_dir + 'results/' + classifier + '/' + archive_name + '/' + dataset_name + '/best_model.hdf5')
# filters
w_k_c = model.layers[-1].get_weights()[0] # weights for each filter k for each class c
# the same input
new_input_layer = model.inputs
# output is both the original as well as the before last layer
new_output_layer = [model.layers[-3].output, model.layers[-1].output]
new_feed_forward = keras.backend.function(new_input_layer, new_output_layer)
classes = np.unique(y_train)
for c in classes:
plt.figure()
count = 0
c_x_train = x_train[np.where(y_train == c)]
for ts in c_x_train:
ts = ts.reshape(1, -1, 1)
[conv_out, predicted] = new_feed_forward([ts])
pred_label = np.argmax(predicted)
orig_label = np.argmax(enc.transform([[c]]))
if pred_label == orig_label:
cas = np.zeros(dtype=np.float, shape=(conv_out.shape[1]))
for k, w in enumerate(w_k_c[:, orig_label]):
cas += w * conv_out[0, :, k]
minimum = np.min(cas)
cas = cas - minimum
cas = cas / max(cas)
cas = cas * 100
x = np.linspace(0, ts.shape[1] - 1, max_length, endpoint=True)
# linear interpolation to smooth
f = interp1d(range(ts.shape[1]), ts[0, :, 0])
y = f(x)
# if (y < -2.2).any():
# continue
f = interp1d(range(ts.shape[1]), cas)
cas = f(x).astype(int)
plt.scatter(x=x, y=y, c=cas, cmap='jet', marker='.', s=2, vmin=0, vmax=100, linewidths=0.0)
if dataset_name == 'Gun_Point':
if c == 1:
plt.yticks([-1.0, 0.0, 1.0, 2.0])
else:
plt.yticks([-2, -1.0, 0.0, 1.0, 2.0])
count += 1
cbar = plt.colorbar()
# cbar.ax.set_yticklabels([100,75,50,25,0])
plt.savefig(root_dir + '/temp/' + classifier + '-cam-' + save_name + '-class-' + str(int(c)) + '.png',
bbox_inches='tight', dpi=1080)
| [
"[email protected]"
] | |
c54e132e2c3ed48eb672402557a0e4308b32bd75 | d1e28c0ad42708faa19e5ec435590deca7fa3213 | /bin/pip3 | f3694030230c6bf38d6dcd4f5e9c16d7ab5af012 | [] | no_license | ranunup/trydjango-11 | d02853b447442c547bbb835e783c1f6e638a4eb8 | 847341664287f9d64e6e9d58f8f5e3d6b159b891 | refs/heads/master | 2020-03-10T14:49:15.093948 | 2018-04-13T17:49:11 | 2018-04-13T17:49:11 | 129,435,598 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 245 | #!/Users/ndoda/dev/python/django/trydjango-11/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
5af25377a0f2d60e9b5cfb74dc02e330c572e14c | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/8/utp.py | b049a5c5bcf9e29814b26a11d16bdd2b1177806c | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'uTP':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
7d609b1f3ea4936504ad93c5663c75f832096ee4 | cf676db9ee2d56a3c7f296278d5f2818e396068a | /shell.py | 9148011ebe90a1033e1b0f2133fb2763fd08de4c | [
"MIT"
] | permissive | lines-of-codes/ConfusedScript | 9c3043f51a54561a24e5a4079a6802cbc3ef0387 | d706c8dfaa3ad0eb203dec0f3087eb5c8999ff1f | refs/heads/main | 2023-05-13T03:20:35.863742 | 2021-06-07T16:17:15 | 2021-06-07T16:17:15 | 374,379,614 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,036 | py | import processor
from timeit import timeit
class RequestExit(Exception):
pass
print("// ConfusedScript Shell //")
print("Use \"exit ()\" (Without double quotes) or Press CTRL+C to exit")
printNone = False
try:
while True:
command = input("ConfusedScript > ")
if(command.startswith("xi (")): raise RequestExit
if(command.startswith("#df")):
scommand = command.split()
try:
if scommand[1] == "shellSettings":
if scommand[2] == "printWhenReturnNone":
if scommand[3] == "true":
printNone = True
continue
elif scommand[3] == "false":
printNone = False
continue
except IndexError:
print("IS: The Option you wanted to settings is required.")
out = processor.execute(command)
if not printNone:
if out == None:
continue
else: print(out)
else: print(out)
except KeyboardInterrupt:
print("\nKeyboard interrupt recieved. Exiting...")
except RequestExit:
print("Exiting requested. Exiting...")
except Exception:
from traceback import print_exc
print_exc() | [
"[email protected]"
] | |
14e4c28cea28c51a2497f90a5c391d84aabb1828 | d14be9a07437f395c36c73aebfd1c5434ff4300e | /vmware_static_dhcp/cli.py | b7defeb0cbe357c0268b8710c0ca50b5f8014635 | [
"LicenseRef-scancode-warranty-disclaimer",
"MIT"
] | permissive | zcutlip/vmware-static-dhcp | f6122cdb7ca0fcd895c536d3a23e2469bfceaedc | 4f7747703bca8f440c56407c5e1437cfe9ff8cba | refs/heads/master | 2020-09-08T11:13:41.192702 | 2019-11-12T03:11:03 | 2019-11-12T03:11:03 | 221,117,693 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 107 | py | import sys
from .vmware_static_dhcp import main as vmware_main
def main():
vmware_main(sys.argv[1:])
| [
"[email protected]"
] | |
2b60a3dfa41d07aebae1a58a2e613f6e95eaa5d1 | a159732177503832bd6da137b6730c131bb6d60e | /pytorchTUT/302_classification.py | bc8a1bdf368b2283e49fe093637a1a50d8f7849e | [] | no_license | yulizi123/tutorials | aa53281620b23173dbc862054decd4dd43e4adb9 | f792615ef3388f8bb42759d99df2cd16ec48761e | refs/heads/master | 2021-01-20T08:25:31.671634 | 2017-05-03T12:41:03 | 2017-05-03T12:41:03 | 90,144,952 | 0 | 0 | null | 2017-05-03T12:10:59 | 2017-05-03T12:10:59 | null | UTF-8 | Python | false | false | 2,628 | py | """
Know more, visit 莫烦Python: https://morvanzhou.github.io/tutorials/
My Youtube Channel: https://www.youtube.com/user/MorvanZhou
Dependencies:
torch: 0.1.11
matplotlib
"""
import torch
from torch.autograd import Variable
import torch.nn.functional as F
import matplotlib.pyplot as plt
torch.manual_seed(1) # reproducible
# make fake data
n_data = torch.ones(100, 2)
x0 = torch.normal(2*n_data, 1) # class0 x data (tensor), shape=(100, 2)
y0 = torch.zeros(100) # class0 y data (tensor), shape=(100, 1)
x1 = torch.normal(-2*n_data, 1) # class1 x data (tensor), shape=(100, 1)
y1 = torch.ones(100) # class1 y data (tensor), shape=(100, 1)
x = torch.cat((x0, x1), 0).type(torch.FloatTensor) # FloatTensor = 32-bit floating
y = torch.cat((y0, y1), ).type(torch.LongTensor) # LongTensor = 64-bit integer
# torch can only train on Variable, so convert them to Variable
x, y = Variable(x, requires_grad=False), Variable(y, requires_grad=False)
# plt.scatter(x.data.numpy(), y.data.numpy())
# plt.show()
class Net(torch.nn.Module):
def __init__(self, n_feature, n_hidden, n_output):
super(Net, self).__init__()
self.hidden = torch.nn.Linear(n_feature, n_hidden) # hidden layer
self.out = torch.nn.Linear(n_hidden, n_output) # output layer
def forward(self, x):
x = F.relu(self.hidden(x)) # activation function for hidden layer
x = self.out(x)
return x
net = Net(n_feature=2, n_hidden=10, n_output=2) # define the network
print(net) # net architecture
optimizer = torch.optim.SGD(net.parameters(), lr=0.02)
loss_func = torch.nn.CrossEntropyLoss() # the target label is not one-hotted
plt.ion() # something about plotting
plt.show()
for t in range(100):
out = net(x) # input x and predict based on x
loss = loss_func(out, y) # must be (1. nn output, 2. target), the target label is not one-hotted
optimizer.zero_grad() # clear gradients for next train
loss.backward() # backpropagation, compute gradients
optimizer.step() # apply gradients
if t % 2 == 0:
# plot and show learning process
plt.cla()
prediction = torch.max(F.softmax(out), 1)[1]
pred_y = prediction.data.numpy().squeeze()
target_y = y.data.numpy()
plt.scatter(x.data.numpy()[:, 0], x.data.numpy()[:, 1], c=pred_y, s=100, lw=0, cmap='RdYlGn')
accuracy = sum(pred_y == target_y)/200
plt.text(1.5, -4, 'Accuracy=%.2f' % accuracy, fontdict={'size': 20, 'color': 'red'})
plt.pause(0.1)
plt.ioff()
plt.show() | [
"[email protected]"
] |
Subsets and Splits