metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "JoseAlanis/dpx_tools",
"score": 2
} |
#### File: JoseAlanis/dpx_tools/preprocessing.py
```python
import errno
import os
from pathlib import Path
import warnings
import numpy as np
import pandas as pd
from scipy.stats import median_abs_deviation as mad
from mne.io.base import BaseRaw
from mne.io import read_raw_bdf
from mne.filter import filter_data, notch_filter
from mne.time_frequency import psd_array_welch
from mne import find_events, events_from_annotations, Annotations
from mne_bids import BIDSPath, write_raw_bids
from config import montage, sourcedata_fname
from stats import sliding_window_correlation
def robust_z_score(values):
values = np.array(values)
robust_z = 0.67449 * (values - np.nanmedian(values)) / mad(values)
return robust_z
def sourcedata_to_bids(sourcedata_path,
subject, task, datatype, extension,
bids_path=None, events_channel=None, min_duration=0.0,
event_id=None, include_demographics=True, node=False):
"""
Parameters
----------
sourcedata_path : path-like
The path to the root directory of the dataset storage location.
The sourcedata/ directory should be structured according to the
`BIDS`_ standard for electroencephalography
(see :footcite:`pernet2019`).
subject : int | str
The subject ID. Corresponds to “sub”.
task : str
The experimental task. Corresponds to “task”.
datatype : str
Type of data to look for
extension : str
The extension of the filename (e.g., ".bdf").
bids_path : path-like | None
The root directory of the BIDS dataset. If None, `sourcedata_path` is
used as `bids_path`
events_channel : None | str
The name of the channel to use for identifying events in the data
(e.g., usually 'Status' for .bdf). Alternatively, one can pass
`events_channel="Annotations"` if events are to be extracted from the
file's `Annotations` (see Notes).
min_duration : float
The minimum duration of a change in the events channel required to
consider it as an event (in seconds). Only used if `events_channel` is
provided.
event_id : dict | None
Can be:
- **dict**: map descriptions (keys) to integer event codes (values).
Only the descriptions present will be mapped, others will be ignored.
- **None**: Map descriptions to unique integer values based on their
``sorted`` order.
include_demographics : bool
Whether `demographics/` directory is provided for each subject in the
sourcedata
node : bool
Whether to return the data structure for further processing.
Returns
-------
bids_path : Path
The path of the created data file.
Notes
-----
**Data structure**
The required structure of the `sourcedata/` directory is::
|sourcedata/
|--- sub-01/
|------ eeg/
|--------- sub-01_dpx_eeg.bdf
|--- sub-02/
|------ eeg/
|--------- sub-02_dpx_eeg.bdf
|--- sub-03/
|------ eeg/
...
Other data modalities can be included as follows::
|sourcedata/
|--- sub-01/
|------ demographics/
|--------- sub-01_dpx_demographics.tsv
|------ eeg/
|--------- sub-01_dpx_eeg.bdf
...
**Annotations**
Annotations are added to an instance of :class:`mne.io.Raw` as the attribute
:attr:`raw.annotations <mne.io.Raw.annotations>`
(see https://mne.tools/stable/generated/mne.Annotations.html).
"""
if bids_path is None:
bids_path = sourcedata_path
# check if directory exists
if not os.path.isdir(sourcedata_path):
raise FileNotFoundError(
errno.ENOENT, os.strerror(errno.ENOENT), sourcedata_path)
# get path for file in question
file_name = sourcedata_fname.format(sourcedata_path=sourcedata_path,
subject=subject,
task=task,
datatype=datatype,
extension=extension)
# 1) import the data ------------------------------------------------------
raw = read_raw_bdf(file_name, preload=False)
# 2) get data specs, add correct channel types and EGG-montage ------------
# sampling rate
sfreq = raw.info['sfreq']
# channels names
channels = raw.info['ch_names']
# identify channel types based on matching names in montage
types = []
for channel in channels:
if channel in montage.ch_names:
types.append('eeg')
elif channel.startswith('EOG') | channel.startswith('EXG'):
types.append('eog')
else:
types.append('stim')
# add channel types and eeg-montage
raw.set_channel_types(
{channel: typ for channel, typ in zip(channels, types)})
raw.set_montage(montage)
# 3) add subject information to `raw.info` --------------------------------
if include_demographics:
# compute approx. date of birth
# get measurement date from dataset info
date_of_record = raw.info['meas_date']
# convert to date format
date = date_of_record.strftime('%Y-%m-%d')
# here, we compute only and approximate of the subject's birthday
# this is to keep the date anonymous (at least to some degree)
demographics = sourcedata_fname.format(sourcedata_path=sourcedata_path,
subject=subject,
task=task,
datatype='demographics',
extension='.tsv')
demo = pd.read_csv(demographics, sep='\t', header=0)
age = demo[demo.subject_id == 'sub-' + str(subject).rjust(3, '0')].age
sex = demo[demo.subject_id == 'sub-' + str(subject).rjust(3, '0')].sex
year_of_birth = int(date.split('-')[0]) - int(age)
approx_birthday = (year_of_birth,
int(date[5:].split('-')[0]),
int(date[5:].split('-')[1]))
# add modified subject info to dataset
raw.info['subject_info'] = dict(id=subject,
sex=int(sex),
birthday=approx_birthday)
# frequency of power line
raw.info['line_freq'] = 50.0
# 4) add events as annotations --------------------------------------------
if events_channel is None and event_id is None:
pass
elif events_channel is None and event_id is not None:
warnings.warn('Ignoring `event_id` as no `events_channel` was '
'provided.')
else:
if events_channel is not None and events_channel in raw.ch_names:
# extract events from events channel
events = find_events(raw,
stim_channel=events_channel,
output='onset',
min_duration=min_duration)
elif events_channel == 'Annotations':
# check if a valid event_ids were provided
if not isinstance(event_id, dict) or event_id is None:
raise ValueError(
"Invalid `event_id` structure provided. `event_id` must be"
"a `dict` or None")
# extract events from the file's annotations
events = events_from_annotations(raw,
event_id=event_id,
regexp=None)
else:
raise ValueError("`events_channel` must be one of the channels in "
"the dataset (i.e., one of `raw.ch_names`), "
"'Annotations', or None. Stopping execution.")
# events to data frame
events = pd.DataFrame(events,
columns=['onset', 'duration', 'description'])
# onset to seconds
events['onset_in_s'] = events['onset'] / sfreq
# sort by onset
events = events.sort_values(by=['onset_in_s'])
if event_id is not None:
# only keep relevant events
events = events.loc[events['description'].isin(event_id.values())]
# crate annotations object
annotations = Annotations(events['onset_in_s'],
events['duration'],
events['description'])
# apply to raw data
raw.set_annotations(annotations)
# 5) save raw data to a BIDS-compliant folder structure -------------------
output_path = BIDSPath(subject=f'{subject:03}',
task=task,
datatype=datatype,
root=bids_path)
# include events if provided
write_raw_bids(raw,
output_path,
overwrite=True)
if node is False:
return output_path
else:
return raw, output_path
# main function which implements different methods
def find_bad_channels(raw, picks='eeg', sfreq=None, channels=None,
detrend=False, method='correlation',
mad_threshold=1e-15, std_threshold=1e-15,
r_threshold=0.4, percent_threshold=0.01, time_step=1.0,
high_frequency_threshold=50.0,
return_z_scores=False,
n_jobs=1):
"""
Parameters
----------
raw : mne.io.Raw | np.ndarray
An instance of mne.io.Raw containing the raw data where bad (i.e.,
noisy channels are presumed. Alternatively, data can be supplied as a
2-D array (channels x samples). In the latter case, `sampling_freq` and
a list channel names must be provided.
picks : list | 'eeg'
A list of channel names to be included in the analysis,
Can be a str 'eeg' to use all EEG channels. Defaults to 'eeg'.
sfreq : float | None
channels : list | str
detrend : bool
method : 'str
mad_threshold : float
std_threshold : float
r_threshold : float
percent_threshold : float
time_step : int | float
high_frequency_threshold : float
In Herz. Defaults to 50.
return_z_scores : bool
n_jobs : int
"""
# arguments to be passed to pick_types
kwargs = {pick: True for pick in [picks]}
# check that tha input data can be handled by the function
if isinstance(raw, BaseRaw):
# only keep data from desired channels
inst = raw.copy().pick_types(**kwargs)
data = inst.get_data()
channels = inst.ch_names
sfreq = inst.info['sfreq']
elif isinstance(raw, np.ndarray):
if channels is None:
raise ValueError('If "raw" is not an instance of mne.io.Raw, '
'a list of channel names must be provided')
if sfreq is None:
raise ValueError('If "raw" is not an instance of mne.io.Raw, the '
'sampling frequency for the data must be provided')
data = raw
else:
raise ValueError('inst must be an instance of BaseRaw or a numpy array')
# remove slow drifts if specified
if detrend:
dat = filter_data(data, sfreq=sfreq, l_freq=1.0, h_freq=None)
else:
dat = data
# save shape of data
n_channels, n_samples = dat.shape
if n_channels != len(channels):
raise ValueError("Number and channels and data dimensions don't match")
# make sure method arguments are in a list
if not isinstance(method, list):
method = [method]
# placeholder for results
bad_channels = dict()
# 1) find channels with zero or near zero activity
if 'flat' in method:
# compute estimates of channel activity
mad_flats = mad(dat, scale=1, axis=1) < mad_threshold
std_flats = np.std(dat, axis=1) < std_threshold
# flat channels identified
flats = np.argwhere(np.logical_or(mad_flats, std_flats))
flats = np.asarray([channels[int(flat)] for flat in flats])
# warn user if too many channels were identified as flat
if flats.shape[0] > (n_channels / 2):
warnings.warn('Too many channels have been identified as "flat"! '
'Make sure the input values in "inst" are provided '
'on a volt scale. '
'Otherwise try choosing another (meaningful) '
'threshold for identification.')
bad_channels.update(flat=flats)
# 3) find bad channels by deviation (high variability in amplitude)
if 'deviation' in method:
# mean absolute deviation (MAD) scores for each channel
mad_scores = [mad(dat[i, :]) for i in range(n_channels)]
# compute robust z-scores for each channel
rz_scores = robust_z_score(mad_scores)
# channels identified by deviation criterion
bad_deviation = [channels[i]
for i in np.where(np.abs(rz_scores) >= 5.0)[0]]
bad_channels.update(deviation=np.asarray(bad_deviation))
if return_z_scores:
bad_channels.update(deviation_z_scores=rz_scores)
# 3) find channels with low correlation to other channels
if 'correlation' in method:
# check that sampling frequency argument was provided
if sfreq is None:
raise ValueError('If "inst" is not an instance of BaseRaw a '
'sampling frequency must be provided. Usually '
'the sampling frequency of the EEG recording in'
'question.')
# compute channel to channel correlations
ch_corrs = sliding_window_correlation(dat, time_step=time_step,
sampling_frequency=sfreq)
# placeholder for results
max_r = np.ones((ch_corrs.shape[0], ch_corrs.shape[1]))
# loop through individual windows, extract the absolute correlations,
# and estimate the maximum correlation (defined as the 98th
# percentile of the channel-by-channel correlations)
for step in range(max_r.shape[0]):
# set diagonal to zero
corr_no_diag = np.subtract(ch_corrs[step, :, :],
np.diag(np.diag(ch_corrs[step, :, :])))
# get absolute correlations
abs_corr = np.abs(corr_no_diag)
# get 98th percentile
max_r[step, :] = np.percentile(abs_corr, 98, axis=0,
method='median_unbiased')
# check which channels correlate badly with the other channels (i.e.,
# are below correlation threshold) in a certain fraction of windows
# (bad_time_threshold)
thresholded_correlations = max_r < r_threshold
frac_bad_corr_windows = np.mean(thresholded_correlations, axis=0)
# find the corresponding channel names and return
bad_idxs = np.argwhere(frac_bad_corr_windows > percent_threshold)
uncorrelated_channels = [channels[int(bad)] for bad in bad_idxs]
bad_channels.update(correlation=np.asarray(uncorrelated_channels))
if 'high_frequency_noise' in method:
if sfreq < 100.0:
warnings.warn('The sampling rate is to low to noise with a '
'frequency > 50.0 Hz. High-frequency noise detection'
'skipped.')
pass
# compute frequecy power
asds, freqs = psd_array_welch(dat, sfreq=sfreq, n_jobs=n_jobs)
asds = np.sqrt(asds) * 1e6
# compute noise ratios
noise_ratios = []
for i in range(asds.shape[0]):
high_f = asds[i, freqs >= high_frequency_threshold].sum()
low_f = asds[i, freqs < high_frequency_threshold].sum()
noise_ratio = high_f / low_f
noise_ratios.append(noise_ratio)
# compute robust z-scores
rz_scores_hf = robust_z_score(noise_ratios)
# channels identified by high frequency criterion
bad_freq = [channels[i]
for i in np.where(np.abs(rz_scores_hf) >= 5.0)[0]]
bad_channels.update(high_frequency_noise=np.asarray(bad_freq))
return bad_channels
def robust_reference(raw, line_noise=None, n_jobs=1):
"""
raw : mne.io.Raw | np.ndarray
An instance of mne.io.Raw containing the raw data where bad (i.e.,
noisy channels are presumed. Alternatively, data can be supplied as a
2-D array (channels x samples). In the latter case, `sampling_freq` and
a list channel names must be provided
line_noise : float | list
:return:
"""
# make a copy of the data, only keeping EEG channels
raw_copy = raw.copy().pick_types(eeg=True)
if line_noise is not None:
raw_no_line_noise = raw_copy.notch_filter(
freqs=line_noise,
picks=['eeg'],
n_jobs='cuda')
``` |
{
"source": "JoseAlanis/supplementary_dpx_tt",
"score": 3
} |
#### File: JoseAlanis/supplementary_dpx_tt/viz.py
```python
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cm as cm
from sklearn.preprocessing import normalize
def plot_z_scores(z_scores, channels, bads=None, cmap='inferno', show=False):
cmap = cm.get_cmap(cmap)
# plot results
z_colors = normalize(
np.abs(z_scores).reshape((1, z_scores.shape[0]))).ravel()
props = dict(boxstyle='round', facecolor='white', alpha=0.5)
fig, ax = plt.subplots(figsize=(20, 6))
if z_scores.max() < 5.0:
y_lim = 5
else:
y_lim = int(z_scores.max() + 2)
for i in range(z_scores.shape[0]):
ch = channels[i]
# show channel names in red if bad by correlation
if ch in bads:
col = 'crimson'
else:
col = 'k'
ax.axhline(y=5.0, xmin=-1.0, xmax=65,
color='crimson', linestyle='dashed', linewidth=2.0)
ax.text(-5.0, 5.0, 'crit. Z-score', fontsize=14,
verticalalignment='center', horizontalalignment='center',
color='crimson', bbox=props)
ax.bar(i, np.abs(z_scores[i]), width=0.9, color=cmap(z_colors[i]))
ax.text(i, np.abs(z_scores[i]) + 0.25, ch, color=col,
fontweight='bold', fontsize=9,
ha='center', va='center', rotation=45)
ax.set_ylim(0, y_lim)
ax.set_xlim(-1, 64)
plt.title('EEG channel deviation', {'fontsize': 15, 'fontweight': 'bold'})
plt.xlabel('Channels', {'fontsize': 13}, labelpad=10)
plt.ylabel('Abs. Z-Score', {'fontsize': 13}, labelpad=10)
plt.xticks([])
plt.yticks(fontsize=12)
ax.spines['bottom'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['left'].set_bounds(0, y_lim)
plt.close(fig)
return fig.show() if show else fig
def _connection_line(x, fig, sourceax, targetax, y=1.0,
y_source_transform="transAxes"):
"""Connect source and target plots with a line.
Connect source and target plots with a line, such as time series
(source) and topolots (target). Primarily used for plot_joint
functions.
"""
from matplotlib.lines import Line2D
trans_fig = fig.transFigure
trans_fig_inv = fig.transFigure.inverted()
xt, yt = trans_fig_inv.transform(targetax.transAxes.transform([.35, 1.075]))
xs, _ = trans_fig_inv.transform(sourceax.transData.transform([x, 0.]))
_, ys = trans_fig_inv.transform(getattr(sourceax, y_source_transform
).transform([0., y]))
return Line2D((xt, xs), (yt, ys), transform=trans_fig, color='black',
linestyle='-', linewidth=1.5, alpha=.95, zorder=1,
clip_on=False)
``` |
{
"source": "JoseAlban/etc",
"score": 3
} |
#### File: babylon/src/example.py
```python
import SimpleHTTPServer
from SimpleHTTPServer import BaseHTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
import SocketServer
import hashlib
import json
import urlparse
MAX_CHARS = 128
SHORT_URL_CACHE = dict()
PORT = 8082
class Testhandler(SimpleHTTPRequestHandler):
def _set_headers(self):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
def do_GET(self):
print "got get request %s" % (self.path)
if self.path == '/':
self.path = '/index.html'
return SimpleHTTPRequestHandler.do_GET(self)
def do_POST(self):
print "got post!!"
content_len = int(self.headers.getheader('content-length', 0))
post_body = self.rfile.read(content_len)
test_data = json.loads(post_body)
print "post_body(%s)" % (test_data)
# validate
parsed = urlparse.urlparse(test_data['url'])
valid = all([parsed.scheme, parsed.netloc])
if not valid:
self.send_response(400)
self.wfile.write(json.dumps(dict(error='invalid url')))
else:
shortened = url_shortener(test_data['url'])
self.wfile.write(json.dumps(dict(shortened_url=shortened)))
httpd = SocketServer.TCPServer(("", PORT), Testhandler)
def get_shortened(shortened_url):
return SHORT_URL_CACHE.get(shortened_url, None)
def url_shortener(url):
shortened_url = None
for i in range(1, MAX_CHARS):
shortened_url = hashlib.sha512(url).hexdigest()[:i]
if not get_shortened(shortened_url):
break
print SHORT_URL_CACHE
SHORT_URL_CACHE[shortened_url] = url
return shortened_url
if __name__ == '__main__':
print "serving at port", PORT
httpd.serve_forever()
``` |
{
"source": "josealbertorodriguesjunior/python-basics",
"score": 4
} |
#### File: python-basics/classes-and-objects/app.py
```python
class Vehicle:
name = ""
kind = "car"
color = ""
value = 100.00
def description(self):
desc_str = "%s is a %s %s worth $%.2f." % (self.name, self.color, self.kind, self.value)
return desc_str
# your code goes here
car1 = Vehicle()
car2 = Vehicle()
#car1 sets
car1.color = "Red"
car1.kind = "Convertible"
car1.value = 60000.00
car1.name = "Fer"
#car2 sets
car2.color = "Blue"
car2.kind = "Van"
car2.value = 10000.00
car2.name = "Jump"
# test code
print(car1.description())
print(car2.description())
``` |
{
"source": "josealeixopc/get-it-done",
"score": 3
} |
#### File: josealeixopc/get-it-done/get-it-done.py
```python
import sys
import getpass
import subprocess
import os
from os import path
def exit_error(error):
print(error, file=sys.stderr)
exit(1)
ini_local = path.expanduser(path.join("~", ".config/get-it-done.ini"))
ini_global = os.join(os.path.dirname(os.path.abspath(__file__)), 'sites.ini')
if "linux" in sys.platform:
if "ubuntu" in
restart_network_command = ["/etc/init.d/network-manager", "restart"]
elif "darwin" in sys.platform:
restart_network_command = ["dscacheutil", "-flushcache"]
elif "win32" in sys.platform:
restart_network_command = ["ipconfig", "/flushdns"]
else:
# Intention isn't to exit, as it still works, but just requires some
# intervention on the user's part.
message = '"Please contribute DNS cache flush command on GitHub."'
restart_network_command = ['echo', message]
def ini_to_array(ini_file):
# this enables the ini file to be written like
# sites = google.com, facebook.com, quora.com ....
if os.path.exists(ini_file):
f = open(ini_file)
sites = []
for line in f:
key, value = [each.strip() for each in line.partition("=")[::2]]
if key == "sites":
for item in [each.strip() for each in value.split(",")]:
sites.append(item)
return sites
else:
return []
hosts_file = '/etc/hosts'
if "win32" in sys.platform:
hosts_file = '/Windows/System32/drivers/etc/hosts'
start_token = '## start-<PASSWORD>'
end_token = '## end-gsd'
site_list = ini_to_array(ini_global) + ini_to_array(ini_local)
def rehash():
subprocess.check_call(restart_network_command)
def work():
hFile = open(hosts_file, 'a+')
contents = hFile.read()
if start_token in contents and end_token in contents:
exit_error("Work mode already set.")
print(start_token, file=hFile)
# remove duplicates by converting list to a set
for site in set(site_list):
print("127.0.0.1\t" + site, file=hFile)
print("127.0.0.1\twww." + site, file=hFile)
print(end_token, file=hFile)
rehash()
def play():
hosts_file_handle = open(hosts_file, "r+")
lines = hosts_file_handle.readlines()
startIndex = -1
for index, line in enumerate(lines):
if line.strip() == start_token:
startIndex = index
if startIndex > -1:
lines = lines[0:startIndex]
hosts_file_handle.seek(0)
hosts_file_handle.write(''.join(lines))
hosts_file_handle.truncate()
rehash()
def main():
if getpass.getuser() != 'root' and 'win32' not in sys.platform:
exit_error('Please run script as root.')
if len(sys.argv) != 2:
exit_error('usage: ' + sys.argv[0] + ' [work|play]')
try:
{"work": work, "play": play}[sys.argv[1]]()
except KeyError:
exit_error('usage: ' + sys.argv[0] + ' [work|play]')
if __name__ == "__main__":
main()
``` |
{
"source": "JoseALermaIII/automatepracticeprojects",
"score": 5
} |
#### File: src/Ch03/P1_makeCollatzSeq.py
```python
def collatz(number: int) -> int:
"""Collatz
If number is even, then return `number // 2`.
If number is odd, then return `3 * number + 1`.
Args:
number: Integer to generate a Collatz conjecture term for.
Returns:
Integer that is either a quotient or a product and sum.
"""
if not number % 2:
return number // 2
else:
return 3 * number + 1
def main():
n = int(input("Input a number: "))
while n != 1:
print(n)
n = collatz(n)
print(n) # When n == 1
# If program is run (instead of imported), call main():
if __name__ == "__main__":
main()
```
#### File: src/Ch05/P1_gameInventory.py
```python
stuff = {'rope': 1, 'torch': 6, 'gold coin': 42, 'dagger': 1, 'arrow': 12}
def displayInventory(inventory: dict) -> None:
"""Display inventory
Displays each key in a given inventory dictionary.
Args:
inventory: Inventory dictionary to display.
Returns:
None. Prints out inventory.
"""
print("Inventory:")
item_total = 0
for k, v in inventory.items():
item_total += v
print(str(v) + " " + k)
print("Total number of items: " + str(item_total))
def main():
displayInventory(stuff)
if __name__ == '__main__':
main()
```
#### File: src/Ch07/P1_strongPwDetect.py
```python
import re
def is_strong_pw(text: str) -> bool:
"""Is strong password
Uses three :py:mod:`re` object patterns to check if a given text is at least 8 numbers
and characters long, has at least one uppercase and lowercase character, and has at least
one digit.
Args:
text: String containing password to test strength of.
Returns:
True if the given text matches the regex patterns, False otherwise.
"""
length_regex = re.compile(r"[\d\w]{8,}") #: At least 8 numbers and characters
upper_lower_regex = re.compile(r"[a-z|A-Z]?[A-Z]+") #: At least 1 upper and lower character
digit_regex = re.compile(r"[\d]+") #: At least one digit
if not length_regex.search(text):
return False
if not digit_regex.search(text):
return False
if not upper_lower_regex.search(text):
return False
return True
def main():
password = "<PASSWORD>"
print(is_strong_pw(password))
if __name__ == '__main__':
main()
```
#### File: src/Ch09/P2_deleteBigFiles.py
```python
import os
def delete_big_files(folder: str = None, filesize: str = None) -> None:
"""Delete big files
Checks files in given folder (and subfolders) for given filesize. If greater,
file is deleted.
Args:
folder: String with folder to check files of. Relative paths are okay.
filesize: Maximum allowed size for files in given folder.
Returns:
None. Deletes files.
Raises:
AttributeError: If `folder` or `filesize` are not given.
Note:
In debug mode - files to delete are printed to terminal.
Uncomment after testing.
"""
if folder is None:
raise AttributeError('folder must be given.')
if filesize is None:
raise AttributeError('filesize must be given.')
folder = os.path.abspath(folder)
for foldername, subfolders, filenames in os.walk(folder):
for filename in filenames:
if os.path.getsize(filename) > filesize:
print(filename) # DEBUG
#os.unlink(filename) # Uncomment after testing
def main():
folder = "./"
filesize = 100 * (1024 ** 2) # Where (1024 ** 2) == 1 MiB
delete_big_files(folder, filesize)
if __name__ == '__main__':
main()
```
#### File: src/Ch11/P4_linkVerification.py
```python
def main():
import requests, bs4, os
from urllib.request import urlretrieve
# Fetch page
res = requests.get("http://JoseALerma.com")
res.raise_for_status() # raise error if nothing fetched
soup = bs4.BeautifulSoup(res.text, "lxml")
# Parse page for all links
anchors = soup.find_all('a')
links = []
for anchor in anchors:
link = anchor.get("href")
if str(link).startswith("http"):
links.append(link)
# Add code 404 pages
links.append("http://JoseALerma.com/potato")
links.append("http://JoseALerma.com/carrot")
# Download every linked page
os.makedirs("pages", exist_ok=True) # Save in ./pages
for link in links:
try:
res = requests.head(link) # Only fetch head tag for speed
if res.status_code == 404:
# Print code 404 pages
print("Page not found: %s" % link)
else:
filepath = os.path.join("pages", os.path.basename(link + ".html"))
urlretrieve(link, filepath)
except requests.exceptions.ConnectionError:
print("Unable to connect to: %s" % link)
if __name__ == '__main__':
main()
```
#### File: src/Ch12/P1_multiplicationTable.py
```python
def main():
import sys
import openpyxl
from openpyxl.styles import Font
# Get argument from commandline
size = int(''.join(sys.argv[1]))
# Build table
wb = openpyxl.Workbook()
sheet = wb.active
row = []
for i in range(1, size + 1):
row.append(i)
column = row
# Make labels
for element in row:
sheet.cell(row=1, column=element + 1).value = element
sheet.cell(row=element + 1, column=1).value = element
col = sheet.column_dimensions['A']
col.font = Font(bold=True)
ro = sheet.row_dimensions[1]
ro.font = Font(bold=True)
# Calculate table values
for element in row:
for element2 in column:
sheet.cell(row=element + 1, column=element2 + 1).value = element * element2
# Save table
wb.save("multTable.xlsx")
if __name__ == '__main__':
main()
```
#### File: src/Ch15/P1_prettifiedStopwatch.py
```python
def main():
import time
import pyperclip
# Display the program's instructions
print("Press ENTER to begin. Afterwards, press ENTER to 'click' the stopwatch. "
"Press CTRL-C to quit.")
input() # Press Enter to begin
print('Started.')
startTime = time.time() # Get the first lap's start time
lastTime = startTime
lapNum = 1
# Start tracking the lap times.
try:
while True:
input()
lapTime = round(time.time() - lastTime, 2)
totalTime = round(time.time() - startTime, 2)
output = 'Lap #' + str(lapNum).rjust(2) + ':' + str(totalTime).rjust(6) + ' (' + str(lapTime).rjust(6) + ')'
print(output, end='')
pyperclip.copy(output) # copy to clipboard
lapNum += 1
lastTime = time.time() # reset the last lap time
except KeyboardInterrupt:
# Handle the CTRL-C exception to keep its error message from displaying.
print("\nDone.")
if __name__ == '__main__':
main()
```
#### File: src/Ch16/P1_assignChores.py
```python
def main():
import openpyxl, random, smtplib, datetime
# Open the spreadsheet and get the lists of data.
wb = openpyxl.load_workbook('choresList.xlsx')
sheet = wb['Sheet1']
names, emails, chores, prev_chores = [], [], [], []
for row in range(2, sheet.max_row + 1): # skip title row
name = sheet['A' + str(row)].value
email = sheet['B' + str(row)].value
chore = sheet['C' + str(row)].value
prev_chore = sheet['D' + str(row)].value
names.append(name)
emails.append(email)
chores.append(chore)
prev_chores.append(prev_chore)
# Run weekly
saved_time = sheet['E2'].value
interval = datetime.timedelta(days=7)
now = datetime.datetime.now()
if saved_time is None:
saved_time = now - interval # First run, so it's been a week
timedelta = saved_time + interval
if timedelta > now:
time_left = round((timedelta - now).total_seconds()/60, 2)
print(f"RuntimeError: Need to wait {time_left} minutes before running again.")
raise RuntimeError
else:
sheet['E2'].value = now # save to spreadsheet
# Log in to email account.
with open('../smtp_info') as config:
myEmail, password, server, port = config.read().splitlines()
smtpObj = smtplib.SMTP_SSL(server, port) # Using port 465
smtpObj.ehlo()
smtpObj.login(myEmail, password)
# Randomly assign chores
for i in range(0, len(names)):
random_chore = random.choice(chores)
# Check previous chore before assignment
while random_chore == prev_chores[i] and len(chores) > 1:
random_chore = random.choice(chores)
# Keep track of chores assigned
sheet['D' + str(i + 2)].value = random_chore
chores.remove(random_chore) # remove assigned chore from pool
# Send email.
body = "Subject: Chore for the Week: %s.\nDear %s,\n\nThis week, you're in charge of:\n%s. " \
"\n\nThank you in advance for your efforts!" % (random_chore, names[i], random_chore)
print(f'Sending email to {emails[i]}...')
sendmailStatus = smtpObj.sendmail(myEmail, emails[i], body)
if sendmailStatus != {}:
print(f'There was a problem sending email to {emails[i]}: {sendmailStatus}')
smtpObj.quit()
wb.save('choresList.xlsx')
if __name__ == '__main__':
main()
```
#### File: src/Ch16/P2_rememberUmbrella.py
```python
import requests, bs4, datetime
def get_weather(url_arg: str) -> str:
"""Get weather
Uses :py:mod:`requests` to download given weather page url, then uses :py:mod:`bs4` to get
the current weather data text.
Args:
url_arg: String containing url to specified city's http://weather.gov/ weather page.
Returns:
String with current weather data text.
"""
# Download url_arg and soupify
res = requests.get(url_arg)
res.raise_for_status()
soup = bs4.BeautifulSoup(res.text, 'lxml')
# Parse current weather from soup
weather_element = soup.select('.myforecast-current')
return weather_element[0].getText()
def remember_umbrella(weather_arg: str) -> bool:
"""Remember umbrella
Checks current weather data text from :meth:`get_weather` for keywords indicating rain.
Args:
weather_arg: String containing current weather text of specified city.
Returns:
True if any of the rain keywords are found, False otherwise.
"""
# Check weather_arg for rain
tokens = ['rain', 't-storms']
"""list: Strings of keywords that indicate rain."""
weather_arg = weather_arg.lower() # To match tokens' case
for token in tokens:
if token in weather_arg:
return True
return False
def check_time(time_arg: datetime.time) -> bool:
"""Check time
Checks if given time is after current time as given by :meth:`datetime.datetime.now`.
Args:
time_arg: :class:`datetime.time` object to compare with current time.
Returns:
True if given time is after current time.
"""
# Check for time_arg
time_now = datetime.datetime.now().time()
if time_now < time_arg:
print(f'RuntimeError: can\'t run until {time_arg}')
return False
return True
def main():
import time
from books.AutomateTheBoringStuff.Ch16.P5_textMyself import textmyself
# Wait for wake_time
sleep_time = datetime.timedelta(minutes=5)
wake_time = datetime.time(hour=5)
while not check_time(wake_time):
time.sleep(sleep_time.total_seconds())
# Get current weather
url = 'https://forecast.weather.gov/MapClick.php?lat=30.26759000000004&lon=-97.74298999999996'
weather = get_weather(url)
# If raining, text cellphone
if remember_umbrella(weather):
message = f'Bring an umbrella, there\'s {weather.lower()}'
textmyself(message)
# If run directly (instead of imported), run main()
if __name__ == '__main__':
main()
``` |
{
"source": "JoseALermaIII/impracticalpythonprojects",
"score": 4
} |
#### File: ch04/practice/p2_identify_cipher.py
```python
from collections import Counter
def identify_cipher(ciphertext: str, threshold: float) -> bool:
"""Identify letter transposition or substitution cipher.
Compare most frequent letters in **ciphertext** with the most frequent
letters in the English alphabet. If above **threshold**, it is a letter
transposition cipher. If not, it is a letter substitution cipher.
Args:
ciphertext (str): Encrypted message to identify.
threshold (float): Percent match in decimal form.
Returns:
:py:obj:`True` if the **ciphertext** is a letter transposition cipher.
:py:obj:`False` otherwise.
"""
most_freq = 'etaoinshrdlu'
# Convert most frequent English letters into a Counter.
english_freq = Counter(most_freq)
# Identify most frequent letters in ciphertext and convert into Counter.
ciphertext_freq = Counter([i[0] for i in
Counter(ciphertext.replace(' ', ''))
.most_common(len(most_freq))])
# Find letters that they have in common.
intersection = english_freq & ciphertext_freq
# Count letters they had in common.
count = len(intersection.keys())
if count / len(most_freq) >= threshold:
return True
return False
def is_transposition(ciphertext: str) -> bool:
"""Identify letter transposition cipher.
Wrapper for :func:`identify_cipher`. **threshold** defaults to ``0.75``.
Args:
ciphertext (str): Encrypted message to identify.
Returns:
:py:obj:`True` if the **ciphertext** is a letter transposition cipher.
:py:obj:`False` otherwise.
"""
return identify_cipher(ciphertext, 0.75)
def is_substitution(ciphertext: str) -> bool:
"""Identify letter substitution cipher.
Wrapper for :func:`identify_cipher`. **threshold** defaults to ``0.45``.
Args:
ciphertext (str): Encrypted message to identify.
Returns:
:py:obj:`True` if the **ciphertext** is a letter substitution cipher.
:py:obj:`False` otherwise.
"""
return not identify_cipher(ciphertext, 0.45)
def main(ciphertext: str = None) -> None:
"""Demonstrate the cipher identifier.
This is only supposed to be a demo, but coverage necessitates
excessiveness.
Args:
ciphertext (str): Encrypted letter transposition or letter
substitution cipher to demonstrate.
Returns:
:py:obj:`None`. Identifies **ciphertext**'s cipher.
"""
print('I can tell the difference between a letter transposition cipher '
'and a letter\nsubstitution cipher - like those used in decoder '
'rings. Sorry-not-sorry that\nyou collected all those box tops.\n')
if ciphertext is None:
# Used key of XCTJYGPIUWMQBDESOLKZNHFRVA in Al Sweigart's
# Cracking Codes with Python simpleSubCipher.py
ciphertext = 'ziy yxpqy ixk qxdjyj cnz ziy dykz uk ybszv'
print(f'Testing cipher: {ciphertext}\n')
if is_substitution(ciphertext):
print('I hereby decree that this is a pitiable attempt at a '
'substitution cipher.\n')
else:
print('Hmm, I declare this is a pathetic attempt at a transposition '
'cipher.\n')
if __name__ == '__main__':
main()
```
#### File: ch04/practice/p4_generate_keys.py
```python
from itertools import combinations
def generate_keys(length: int) -> list:
"""Generate all possible route cipher keys.
Generates a list of all possible route cipher keys of **length**.
Args:
length (int): Length of route cipher key.
Returns:
List of tuples of integers representing all possible route cipher
keys of **length**.
"""
result = []
master_key = range(1, length + 1)
# Get all possible combinations of direction (pos/neg) of length
combs = set(combinations([-1, 1] * length, length)) # Remove repeats
for comb in combs:
result.append(tuple(sign * key for sign, key in zip(comb, master_key)))
return result
def main():
"""Demonstrate the key generator."""
print('Given a key length, I can generate all possible route cipher '
'keys of that\nlength. I have a lot of free time.\n')
length = 3
print(f'Making keys of length: {length}')
print(generate_keys(length))
if __name__ == '__main__':
main()
```
#### File: src/ch05/p1_encode_null.py
```python
import os
from src.ch01.challenge.c2_name_generator import build_name_list, split_names
from src.ch04.challenge.c1_encode_route import format_plaintext
def encode_null(message: str, word_list: list) -> list:
"""Encode plaintext message with null cipher.
Embed **message** in a list of words using **word_list**. Use second
letter in first word of cipherlist, then third letter in second word of
cipherlist, and repeat until **message** is embedded in cipherlist.
Args:
message (str): Message to encrypt with null cipher. Spaces and
punctuation are okay, but will be removed. Uppercase converted
to lowercase.
word_list (list): List of words to build cipherlist. The
more the merrier.
Returns:
List of words with **message** embedded as described. Context
is *not* provided.
Raises:
ValueError: if the list of names doesn't have a name with the needed
letter.
"""
message = ''.join(format_plaintext(message))
cipherlist = []
for letter in message:
for word in word_list:
if all([len(word) > 2, word not in cipherlist]):
# Even numbered word, use second letter.
if len(cipherlist) % 2 == 0 and \
word[1].lower() == letter:
cipherlist.append(word)
break
# Odd numbered word, use third letter.
elif len(cipherlist) % 2 != 0 and \
word[2].lower() == letter:
cipherlist.append(word)
break
if word == word_list[-1]:
if len(cipherlist) % 2 == 0:
place = 'second'
else:
place = 'third'
raise ValueError(f'Missing word with {place} letter of: '
f'{letter}')
return cipherlist
def main():
"""Demonstrate null cipher encoder.
Encode a message in a list of last names. First last name in list
isn't used and some unused last names are added near the beginning
of the list.
Tip:
The website `bestwordlist.com`_ helped with the missing names.
.. _bestwordlist.com: https://www.bestwordlist.com
"""
message = 'Say the word and we riot'
# Build last names from Chapter 1 name generator.
folder = os.path.abspath('../../src/ch01/challenge/c2files/')
last_names = split_names(build_name_list(folder))['last']
# Add missing names for message.
last_names.extend(['Asher', 'Dwiles', 'Stone'])
# Insert unused last names near beginning of cipherlist.
cipherlist = encode_null(message, last_names)
for name in last_names:
if name not in cipherlist:
cipherlist.insert(0, name)
break
cipherlist.insert(4, 'Scrooge')
cipherlist.insert(7, 'Nero')
# Output encrypted message with context.
print('Hi Mom,\n\nPlease send me stamps, labels, and stationery to write '
'thank you cards for the\nfollowing families:\n')
print(*cipherlist, sep='\n')
print('\nThanks so much for everything.\n\nLuv ya\' lots,\n\n<NAME>')
if __name__ == '__main__':
main()
```
#### File: src/ch06/c1_invisible_ink_mono.py
```python
from pathlib import Path, PurePath
from platform import system
import docx
from docx.shared import RGBColor
from src.ch06.p1_invisible_ink import get_text
def check_fit(plaintext: list, ciphertext: list) -> int:
"""Check if ciphertext can fit in plaintext's whitespace.
Sum number of blanks in **plaintext** and compare to number of characters
in **ciphertext** to see if it can fit.
Args:
plaintext (list): Paragraphs of a fake message in a list of strings
(likely from :func:`~src.ch06.p1_invisible_ink.get_text`).
ciphertext (list): Paragraphs of an encrypted message in a list of
strings (likely from :func:`~src.ch06.p1_invisible_ink.get_text`).
Returns:
Integer representing the number of needed blanks to fit
**ciphertext** in **plaintext**. ``0`` would mean that **ciphertext**
can fit in **plaintext**.
Note:
To separate words, the blanks in **ciphertext** count toward the
needed length of **plaintext**. By contrast, blank lines in
**plaintext** do not count.
"""
blanks = sum(line.count(' ') for line in plaintext if line != '')
letters = sum(len(line) for line in ciphertext if line != '')
if blanks >= letters:
return 0
return letters - blanks
def write_invisible(plaintext: list, ciphertext: list,
template_path: str = None,
filename: str = 'output.docx') -> None:
"""Embed ciphertext in plaintext's letter whitespace.
Open a template file, **template_path**, with the needed fonts, styles,
and margins. Write each line in **plaintext** to the template file
and add each line in **ciphertext** to **plaintext**'s space between
letters by using a monospace font.
Save the new file as **filename**.
Args:
plaintext (list): Lines of a fake message in a list of strings
(likely from :func:`~src.ch06.p1_invisible_ink.get_text`).
ciphertext (list): Lines of an encrypted message in a list of
strings (likely from :func:`~src.ch06.p1_invisible_ink.get_text`).
template_path (str): Absolute path to .docx file with predefined
fonts, styles, and margins. Defaults to :py:obj:`None`. If not
provided, defaults will be created.
filename (str): File name to use for output file. Defaults to
``output.docx``.
Returns:
:py:obj:`None`. **plaintext** is written to the file at
**template_path** with **ciphertext** embedded in the blank space.
Raises:
ValueError: If the number of spaces in **plaintext** aren't
enough to embed **ciphertext** based on output of
:func:`check_fit`.
Note:
As of python-docx v0.8.10, creating custom styles isn't well
supported. More info `here`_.
As a result, if a template isn't provided, the default template is
modified to use a font named ``Courier New`` on Windows and
``Liberation Mono`` on other operating systems in the ``Normal``
style.
.. _here:
https://python-docx.readthedocs.io/en/latest/user/styles-understanding.html
"""
blanks_needed = check_fit(plaintext, ciphertext)
if blanks_needed > 0:
raise ValueError(f'Need {blanks_needed} more spaces in the plaintext '
f'(fake) message.')
if template_path is None:
# Modify default template.
doc = docx.Document()
style = doc.styles['Normal']
font = style.font
if system().lower().startswith('windows'):
font.name = 'Courier New'
else:
font.name = 'Liberation Mono'
else:
doc = docx.Document(template_path)
line_index, letter_index = 0, 0
for line in plaintext:
# Add new paragraph to template.
paragraph = doc.add_paragraph()
paragraph_index = len(doc.paragraphs) - 1
for letter in line:
# Add each letter to paragraph.
if all([letter == ' ',
letter_index < len(ciphertext[line_index])]):
# Add real message to space and set color to white.
paragraph.add_run(ciphertext[line_index][letter_index])
run = doc.paragraphs[paragraph_index].runs[-1]
font = run.font
# Make red for testing.
font.color.rgb = RGBColor(255, 255, 255)
letter_index += 1
else:
paragraph.add_run(letter)
if all([letter_index >= len(ciphertext[line_index]),
line_index < len(ciphertext) - 1]):
# Go to next line in ciphertext if end reached.
line_index += 1
letter_index = 0
doc.save(filename)
def main(fakefile: str = None, cipherfile: str = None,
savepath: str = None) -> None:
"""Demonstrate the invisible ink writer.
Demonstrate :func:`write_invisible`, but for testing,
it is a basic wrapper function for :func:`write_invisible`.
Embed **cipherfile** in **fakefile**'s whitespace.
Args:
fakefile (str): Path to .docx file with fake message.
Defaults to ``./c1files/fake.docx``.
cipherfile (str): Path to .docx file with real message.
Defaults to ``./c1files/real.docx``.
savepath (str): Path to .docx file for output.
Defaults to ``./c1files/DearInternet.docx``.
Returns:
:py:obj:`None`. The contents of **cipherfile**'s text is embedded
in **fakefile**'s whitespace and saved to **savepath**.
"""
print('I can embed a hidden message in a .docx file\'s white space '
'between letters by making the font\ncolor white. It\'s far less '
'bulletproof than it sounds.\n')
current_dir = Path('./c1files').resolve()
if fakefile is None or cipherfile is None:
fakefile = PurePath(current_dir).joinpath('fake.docx')
cipherfile = PurePath(current_dir).joinpath('real.docx')
if savepath is None:
savepath = PurePath(current_dir).joinpath('DearInternet.docx')
faketext = get_text(fakefile, False)
ciphertext = get_text(cipherfile)
write_invisible(faketext, ciphertext, None, savepath)
print('Fin.\n')
print('To read the hidden message, select the entire document and\n'
'highlight it a darkish gray.')
if __name__ == '__main__':
main()
```
#### File: src/ch07/c2_safe_cracker.py
```python
import time
import random
def compare(combo: list, attempt: list) -> int:
"""Compare items in two lists and count number of matches.
Compare each tumbler in **combo** with **attempt** and return
the number of matches.
Args:
combo (list): Integers of safe combination.
attempt (list): Integers of guessed safe combination.
Returns:
Number of tumbler matches between **combo** and **attempt**.
"""
return sum(1 for i, j in zip(combo, attempt) if i == j)
def crack_safe(combo: str) -> tuple:
"""Crack a safe combination with a hill-climbing algorithm.
Solve a lock combination by randomly changing a tumbler's values one
by one and noting whether the safe had a response. If so, lock the
tumbler at that value, remove it from the pool of tumblers, and
continue randomly changing tumbler values.
Args:
combo (str): String of numbers representing combination of safe.
Returns:
Tuple with string of solved combination and number of attempts.
"""
# Convert combo to list.
combo = [int(i) for i in combo]
# Make initial guess and compare.
best_guess = [0] * len(combo)
best_guess_match = compare(combo, best_guess)
count = 0
tumblers = list(range(len(combo)))
# Evolve guess.
while best_guess != combo:
# Crossover.
guess = best_guess.copy()
# Mutate.
lock_tumbler = random.choice(tumblers)
guess[lock_tumbler] = random.randint(0, len(combo) - 1)
# Compare and select.
guess_match = compare(combo, guess)
if guess_match > best_guess_match:
best_guess = guess.copy()
best_guess_match = guess_match
tumblers.remove(lock_tumbler)
print(guess, best_guess)
count += 1
return ''.join([str(i) for i in best_guess]), count
def main():
"""Demonstrate safe cracker.
Use default combination to demonstrate :func:`crack_safe` and display time
(in seconds) it took to run.
"""
start_time = time.time()
combination = '6822858902'
print(f'Combination: {combination}')
guess, count = crack_safe(combination)
print(f'\nCracked! {guess} ')
print(f'in {count} tries!')
end_time = time.time()
duration = end_time - start_time
print(f'\nRuntime for this program was {duration:.5f} seconds.')
if __name__ == '__main__':
main()
```
#### File: impracticalpythonprojects/tests/__init__.py
```python
import string
import random
# Constants
RANDOM_STRING_ERROR = 'Length must be an integer and characters must be a string.'
# Helper functions
def random_string(length: int = 10, characters: str = string.ascii_letters) -> str:
"""Generate random string.
Generates a pseudo-random string of given length with given characters.
Args:
length (int): Length of random string to make. Defaults to ``10``.
characters (str): String with letters to choose from.
Defaults to ASCII uppercase and lowercase.
Returns:
String with random letters.
Raises:
TypeError: If ``length`` isn't an integer or if ``characters`` isn't
a string.
"""
if not all([isinstance(length, int), isinstance(characters, str)]):
raise TypeError(RANDOM_STRING_ERROR)
return ''.join(random.choice(characters) for _ in range(length))
```
#### File: impracticalpythonprojects/tests/test_chapter02.py
```python
import os
import string
import unittest.mock
from io import StringIO
import src.ch02.p1_cleanup_dictionary as cleanup_dictionary
import src.ch02.c1_recursive_palindrome as recursive_palindrome
from tests import random_string
from src.ch02 import CLEANUP_LIST_ERROR, RECURSIVE_ISPALINDROME_ERROR
class TestCleanupDictionary(unittest.TestCase):
"""Test Cleanup Dictionary."""
def test_bad_index(self):
"""Test that it raises an error if word_list is empty."""
with self.assertRaises(IndexError) as err:
cleanup_dictionary.cleanup_list([])
self.assertEqual(CLEANUP_LIST_ERROR, str(err.exception))
with self.assertRaises(IndexError) as err:
cleanup_dictionary.cleanup_list_more([])
self.assertEqual(CLEANUP_LIST_ERROR, str(err.exception))
def test_cleanup_list(self):
"""Test that it removes single letter words from a list of words."""
random_list = [random_string(1) for _ in range(13)]
random_list.extend([random_string(5) for _ in range(10)])
clean_list = cleanup_dictionary.cleanup_list(random_list)
self.assertEqual(len(clean_list), 10)
for element in clean_list:
self.assertEqual(len(element), 5)
def test_cleanup_list_more(self):
"""Test cleanup list more."""
# Test that it adds approved words.
approved_words = cleanup_dictionary.APPROVED_WORDS
test_list = ['test', 'pls']
dictionary = test_list + approved_words
dictionary.sort()
clean_list = cleanup_dictionary.cleanup_list_more(test_list)
self.assertListEqual(dictionary, clean_list)
# Test that it skips double letter words.
dictionary = ['test', 'pls']
dictionary += approved_words
dictionary.sort()
test_list = ['test', 'es', 'pls']
clean_list = cleanup_dictionary.cleanup_list_more(test_list)
self.assertListEqual(dictionary, clean_list)
# Test that it skips words with letters not in string.ascii_lowercase.
dictionary = ['test']
dictionary += approved_words
dictionary.sort()
test_list = ['test', 'mís']
clean_list = cleanup_dictionary.cleanup_list_more(test_list)
self.assertListEqual(dictionary, clean_list)
# Test that it skips words with apostrophes.
dictionary = ['test']
dictionary += approved_words
dictionary.sort()
test_list = ['test', "me's"]
clean_list = cleanup_dictionary.cleanup_list_more(test_list)
self.assertListEqual(dictionary, clean_list)
# Test that it removes duplicates.
dictionary = ['test']
dictionary += approved_words
dictionary.sort()
test_list = ['test', 'test']
clean_list = cleanup_dictionary.cleanup_list_more(test_list)
self.assertListEqual(dictionary, clean_list)
def test_cleanup_dict(self):
"""Test that it removes single letter words from a dictionary file."""
dict_file = os.path.abspath('tests/data/ch02/dictionary.txt')
clean_dict = cleanup_dictionary.cleanup_dict(dict_file)
self.assertEqual(len(clean_dict), 52) # 78 words - 26 letters
for element in clean_dict:
self.assertGreater(len(element), 1)
@unittest.mock.patch('sys.stdout', new_callable=StringIO)
@unittest.mock.patch('src.ch02.p1_cleanup_dictionary.DICTIONARY_FILE_PATH', 'tests/data/ch02/dictionary.txt')
def test_main(self, mock_stdout):
"""Test demo main function."""
cleanup_dictionary.main()
# Test printed output.
with open(os.path.normpath('tests/data/ch02/main/cleanup_dictionary.txt'),
'r') as file:
file_data = ''.join(file.readlines())
self.assertEqual(mock_stdout.getvalue(), file_data)
class TestRecursivePalindrome(unittest.TestCase):
"""Test Recursive Palindrome tester."""
def test_bad_type(self):
"""Test that it raises an error if word is not a string."""
with self.assertRaises(TypeError) as err:
recursive_palindrome.recursive_ispalindrome(5)
self.assertEqual(RECURSIVE_ISPALINDROME_ERROR, str(err.exception))
def test_recursive_ispalindrome(self):
"""Test that it can identify a pseudo-random palindrome."""
random_string_ = random_string(10, string.ascii_lowercase)
random_palindrome = random_string_ + random_string_[::-1]
self.assertTrue(
recursive_palindrome.recursive_ispalindrome(random_palindrome))
# Test a word that isn't a palindrome.
not_palindrome = 'cat'
self.assertFalse(
recursive_palindrome.recursive_ispalindrome(not_palindrome))
@unittest.mock.patch('sys.stdout', new_callable=StringIO)
def test_main(self, mock_stdout):
"""Test demo main function."""
# Test hard-coded word.
recursive_palindrome.main()
# Test inputted word.
recursive_palindrome.main('cat')
# Test printed output.
with open(os.path.normpath('tests/data/ch02/main/recursive_palindrome.txt'),
'r') as file:
file_data = ''.join(file.readlines())
self.assertEqual(mock_stdout.getvalue(), file_data)
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "JoseALermaIII/lightkurve",
"score": 2
} |
#### File: lightkurve/lightkurve/lightcurve.py
```python
from __future__ import division, print_function
import copy
import os
import datetime
import logging
import pandas as pd
import warnings
import numpy as np
from scipy import signal
from matplotlib import pyplot as plt
from astropy.stats import sigma_clip
from astropy.table import Table
from astropy.io import fits
from astropy.time import Time
from astropy import units as u
from . import PACKAGEDIR, MPLSTYLE
from .utils import running_mean, bkjd_to_astropy_time, btjd_to_astropy_time
__all__ = ['LightCurve', 'KeplerLightCurve', 'TessLightCurve',
'FoldedLightCurve']
log = logging.getLogger(__name__)
class LightCurve(object):
"""
Implements a simple class for a generic light curve.
Attributes
----------
time : array-like
Time measurements
flux : array-like
Data flux for every time point
flux_err : array-like
Uncertainty on each flux data point
time_format : str
String specifying how an instant of time is represented,
e.g. 'bkjd' or 'jd'.
time_scale : str
String which specifies how the time is measured,
e.g. tdb', 'tt', 'ut1', or 'utc'.
targetid : str
Identifier of the target.
label : str
Human-friendly object label, e.g. "KIC 123456789"
meta : dict
Free-form metadata associated with the LightCurve.
"""
def __init__(self, time=None, flux=None, flux_err=None, time_format=None,
time_scale=None, targetid=None, label=None, meta={}):
if time is None and flux is None:
raise ValueError('either time or flux must be given')
if time is None:
self.time = np.arange(len(flux))
else:
self.time = np.asarray(time)
# Trigger warning if time=NaN are present
if np.isnan(self.time).any():
log.warning('Warning: NaN times are present in LightCurve')
self.flux = self._validate_array(flux, name='flux')
self.flux_err = self._validate_array(flux_err, name='flux_err')
self.time_format = time_format
self.time_scale = time_scale
self.targetid = targetid
self.label = label
self.meta = meta
def _validate_array(self, arr, name='array'):
"""Ensure the input arrays have the same length as `self.time`."""
if arr is not None:
arr = np.asarray(arr)
else:
arr = np.nan * np.ones_like(self.time)
if not (len(self.time) == len(arr)):
raise ValueError("Input arrays have different lengths."
" len(time)={}, len({})={}"
.format(len(self.time), name, len(arr)))
return arr
def __getitem__(self, key):
copy_self = copy.copy(self)
copy_self.time = self.time[key]
copy_self.flux = self.flux[key]
copy_self.flux_err = self.flux_err[key]
return copy_self
def __add__(self, other):
copy_self = copy.copy(self)
copy_self.flux = copy_self.flux + other
return copy_self
def __radd__(self, other):
return self.__add__(other)
def __sub__(self, other):
return self.__add__(-other)
def __rsub__(self, other):
copy_self = copy.copy(self)
copy_self.flux = other - copy_self.flux
return copy_self
def __mul__(self, other):
copy_self = copy.copy(self)
copy_self.flux = other * copy_self.flux
copy_self.flux_err = abs(other) * copy_self.flux_err
return copy_self
def __rmul__(self, other):
return self.__mul__(other)
def __truediv__(self, other):
return self.__mul__(1./other)
def __rtruediv__(self, other):
copy_self = copy.copy(self)
copy_self.flux = other / copy_self.flux
return copy_self
def __div__(self, other):
return self.__truediv__(other)
def __rdiv__(self, other):
return self.__rtruediv__(other)
@property
def astropy_time(self):
"""Returns an `astropy.time.Time` object.
The Time object will be created using the values in `self.time`
and the `self.time_format` and `self.time_scale` attributes.
For Kepler data products, the times are Barycentric.
Raises
------
ValueError
If `self.time_format` is not set or not one of the formats
allowed by AstroPy.
"""
from astropy.time import Time
if self.time_format is None:
raise ValueError("To retrieve a `Time` object the `time_format` "
"attribute must be set on the LightCurve object, "
"e.g. `lightcurve.time_format = 'jd'`.")
# AstroPy does not support BKJD, so we call a function to convert to JD.
# In the future, we should think about making an AstroPy-compatible
# `TimeFormat` class for BKJD.
if self.time_format == 'bkjd':
return bkjd_to_astropy_time(self.time)
elif self.time_format == 'btjd': # TESS
return btjd_to_astropy_time(self.time)
return Time(self.time, format=self.time_format, scale=self.time_scale)
def show_properties(self):
'''Print out a description of each of the non-callable attributes of a
LightCurve object.
Prints in order of type (ints, strings, lists, arrays and others)
Prints in alphabetical order.'''
attrs = {}
for attr in dir(self):
if not attr.startswith('_'):
res = getattr(self, attr)
if callable(res):
continue
if attr == 'hdu':
attrs[attr] = {'res': res, 'type': 'list'}
for idx, r in enumerate(res):
if idx == 0:
attrs[attr]['print'] = '{}'.format(r.header['EXTNAME'])
else:
attrs[attr]['print'] = '{}, {}'.format(
attrs[attr]['print'], '{}'.format(r.header['EXTNAME']))
continue
else:
attrs[attr] = {'res': res}
if isinstance(res, int):
attrs[attr]['print'] = '{}'.format(res)
attrs[attr]['type'] = 'int'
elif isinstance(res, np.ndarray):
attrs[attr]['print'] = 'array {}'.format(res.shape)
attrs[attr]['type'] = 'array'
elif isinstance(res, list):
attrs[attr]['print'] = 'list length {}'.format(len(res))
attrs[attr]['type'] = 'list'
elif isinstance(res, str):
if res == '':
attrs[attr]['print'] = '{}'.format('None')
else:
attrs[attr]['print'] = '{}'.format(res)
attrs[attr]['type'] = 'str'
elif attr == 'wcs':
attrs[attr]['print'] = 'astropy.wcs.wcs.WCS'.format(attr)
attrs[attr]['type'] = 'other'
else:
attrs[attr]['print'] = '{}'.format(type(res))
attrs[attr]['type'] = 'other'
output = Table(names=['Attribute', 'Description'], dtype=[object, object])
idx = 0
types = ['int', 'str', 'list', 'array', 'other']
for typ in types:
for attr, dic in attrs.items():
if dic['type'] == typ:
output.add_row([attr, dic['print']])
idx += 1
output.pprint(max_lines=-1, max_width=-1)
def append(self, others):
"""
Append LightCurve objects.
Parameters
----------
others : LightCurve object or list of LightCurve objects
Light curves to be appended to the current one.
Returns
-------
new_lc : LightCurve object
Concatenated light curve.
"""
if not hasattr(others, '__iter__'):
others = [others]
new_lc = copy.copy(self)
for i in range(len(others)):
new_lc.time = np.append(new_lc.time, others[i].time)
new_lc.flux = np.append(new_lc.flux, others[i].flux)
new_lc.flux_err = np.append(new_lc.flux_err, others[i].flux_err)
if hasattr(new_lc, 'cadenceno'):
new_lc.cadenceno = np.append(new_lc.cadenceno, others[i].cadenceno) # KJM
if hasattr(new_lc, 'quality'):
new_lc.quality = np.append(new_lc.quality, others[i].quality)
if hasattr(new_lc, 'centroid_col'):
new_lc.centroid_col = np.append(new_lc.centroid_col, others[i].centroid_col)
if hasattr(new_lc, 'centroid_row'):
new_lc.centroid_row = np.append(new_lc.centroid_row, others[i].centroid_row)
return new_lc
def flatten(self, window_length=101, polyorder=2, return_trend=False,
break_tolerance=5, **kwargs):
"""
Removes low frequency trend using scipy's Savitzky-Golay filter.
This method wraps `scipy.signal.savgol_filter`.
Parameters
----------
window_length : int
The length of the filter window (i.e. the number of coefficients).
``window_length`` must be a positive odd integer.
polyorder : int
The order of the polynomial used to fit the samples. ``polyorder``
must be less than window_length.
return_trend : bool
If `True`, the method will return a tuple of two elements
(flattened_lc, trend_lc) where trend_lc is the removed trend.
break_tolerance : int
If there are large gaps in time, flatten will split the flux into
several sub-lightcurves and apply `savgol_filter` to each
individually. A gap is defined as a period in time larger than
`break_tolerance` times the median gap. To disable this feature,
set `break_tolerance` to None.
**kwargs : dict
Dictionary of arguments to be passed to `scipy.signal.savgol_filter`.
Returns
-------
flatten_lc : LightCurve object
Flattened lightcurve.
If `return_trend` is `True`, the method will also return:
trend_lc : LightCurve object
Trend in the lightcurve data
"""
if break_tolerance is None:
break_tolerance = np.nan
if polyorder >= window_length:
polyorder = window_length - 1
log.warning("polyorder must be smaller than window_length, "
"using polyorder={}.".format(polyorder))
lc_clean = self.remove_nans()
# Split the lightcurve into segments by finding large gaps in time
dt = lc_clean.time[1:] - lc_clean.time[0:-1]
with warnings.catch_warnings(): # Ignore warnings due to NaNs
warnings.simplefilter("ignore", RuntimeWarning)
cut = np.where(dt > break_tolerance * np.nanmedian(dt))[0] + 1
low = np.append([0], cut)
high = np.append(cut, len(lc_clean.time))
# Then, apply the savgol_filter to each segment separately
trend_signal = np.zeros(len(lc_clean.time))
for l, h in zip(low, high):
# Reduce `window_length` and `polyorder` for short segments;
# this prevents `savgol_filter` from raising an exception
# If the segment is too short, just take the median
if np.any([window_length > (h - l), (h - l) < break_tolerance]):
trend_signal[l:h] = np.nanmedian(lc_clean.flux[l:h])
else:
trend_signal[l:h] = signal.savgol_filter(x=lc_clean.flux[l:h],
window_length=window_length,
polyorder=polyorder,
**kwargs)
trend_signal = np.interp(self.time, lc_clean.time, trend_signal)
flatten_lc = copy.deepcopy(self)
with warnings.catch_warnings():
# ignore invalid division warnings
warnings.simplefilter("ignore", RuntimeWarning)
flatten_lc.flux = flatten_lc.flux / trend_signal
flatten_lc.flux_err = flatten_lc.flux_err / trend_signal
if return_trend:
trend_lc = copy.deepcopy(self)
trend_lc.flux = trend_signal
return flatten_lc, trend_lc
return flatten_lc
def fold(self, period, phase=0.):
"""Folds the lightcurve at a specified ``period`` and ``phase``.
This method returns a new ``LightCurve`` object in which the time
values range between -0.5 to +0.5. Data points which occur exactly
at ``phase`` or an integer multiple of `phase + n*period` have time
value 0.0.
Parameters
----------
period : float
The period upon which to fold.
phase : float, optional
Time reference point.
Returns
-------
folded_lightcurve : LightCurve object
A new ``LightCurve`` in which the data are folded and sorted by
phase.
"""
fold_time = (((self.time - phase * period) / period) % 1)
# fold time domain from -.5 to .5
fold_time[fold_time > 0.5] -= 1
sorted_args = np.argsort(fold_time)
return FoldedLightCurve(fold_time[sorted_args],
self.flux[sorted_args],
flux_err=self.flux_err[sorted_args])
def normalize(self):
"""Returns a normalized version of the lightcurve.
The normalized lightcurve is obtained by dividing `flux` and `flux_err`
by the median flux.
Returns
-------
normalized_lightcurve : LightCurve object
A new ``LightCurve`` in which `flux` and `flux_err` are divided
by the median.
"""
lc = copy.copy(self)
lc.flux_err = lc.flux_err / np.nanmedian(lc.flux)
lc.flux = lc.flux / np.nanmedian(lc.flux)
return lc
def remove_nans(self):
"""Removes cadences where the flux is NaN.
Returns
-------
clean_lightcurve : LightCurve object
A new ``LightCurve`` from which NaNs fluxes have been removed.
"""
return self[~np.isnan(self.flux)] # This will return a sliced copy
def fill_gaps(lc, method='nearest'):
"""Fill in gaps in time with linear interpolation.
Parameters
----------
method : string {None, 'backfill'/'bfill', 'pad'/'ffill', 'nearest'}
Method to use for gap filling. 'nearest' by default.
Returns
-------
nlc : LightCurve object
A new ``LightCurve`` in which NaNs values and gaps in time have been
filled.
"""
clc = copy.deepcopy(lc.remove_nans())
nlc = copy.deepcopy(lc)
# Average gap between cadences
dt = np.nanmedian(clc.time[1::] - clc.time[:-1:])
# Iterate over flux and flux_err
for idx, y in enumerate([clc.flux, clc.flux_err]):
# We need to ensure pandas gets the correct byteorder
# Background info: https://github.com/astropy/astropy/issues/1156
if y.dtype.byteorder == '>':
y = y.byteswap().newbyteorder()
ts = pd.Series(y, index=clc.time)
newindex = [clc.time[0]]
for t in clc.time[1::]:
prevtime = newindex[-1]
while (t - prevtime) > 1.2*dt:
newindex.append(prevtime + dt)
prevtime = newindex[-1]
newindex.append(t)
ts = ts.reindex(newindex, method=method)
if idx == 0:
nlc.flux = np.asarray(ts)
elif idx == 1:
nlc.flux_err = np.asarray(ts)
nlc.time = np.asarray(ts.index)
return nlc
def remove_outliers(self, sigma=5., return_mask=False, **kwargs):
"""Removes outlier data points using sigma-clipping.
This method returns a new :class:`LightCurve` object from which data
points are removed if their flux values are greater or smaller than
the median flux by at least ``sigma`` times the standard deviation.
Sigma-clipping works by iterating over data points, each time rejecting
values that are discrepant by more than a specified number of standard
deviations from a center value. If the data contains invalid values
(NaNs or infs), they are automatically masked before performing the
sigma clipping.
.. note::
This function is a convenience wrapper around
`astropy.stats.sigma_clip
<http://docs.astropy.org/en/stable/api/astropy.stats.sigma_clip.html>`_
and provides the same functionality.
Parameters
----------
sigma : float
The number of standard deviations to use for both the lower and
upper clipping limit. These limits are overridden by
``sigma_lower`` and ``sigma_upper``, if input. Defaults to 5.
sigma_lower : float or `None`
The number of standard deviations to use as the lower bound for
the clipping limit. Can be set to float('inf') in order to avoid
clipping outliers below the median at all. If `None` then the
value of ``sigma`` is used. Defaults to `None`.
sigma_upper : float or `None`
The number of standard deviations to use as the upper bound for
the clipping limit. Can be set to float('inf') in order to avoid
clipping outliers above the median at all. If `None` then the
value of ``sigma`` is used. Defaults to `None`.
return_mask : bool
Whether or not to return a mask (i.e. a boolean array) indicating
which data points were removed. Entries marked as `True` in the
mask are considered outliers. Defaults to `True`.
iters : int or `None`
The number of iterations to perform sigma clipping, or `None` to
clip until convergence is achieved (i.e., continue until the
last iteration clips nothing). Defaults to 5.
cenfunc : callable
The function used to compute the center for the clipping. Must
be a callable that takes in a masked array and outputs the
central value. Defaults to the median (`numpy.ma.median`).
**kwargs : dict
Dictionary of arguments to be passed to `astropy.stats.sigma_clip`.
Returns
-------
clean_lc : LightCurve object
A new :class:`LightCurve` from which outlier data points have been
removed.
Examples
--------
This example generates a new LightCurve in which all points
that are more than 1 standard deviation from the median are removed::
>>> lc = LightCurve(time=[1, 2, 3, 4, 5], flux=[1, 1000, 1, -1000, 1])
>>> lc_clean = lc.remove_outliers(sigma=1)
>>> lc_clean.time
array([1, 3, 5])
>>> lc_clean.flux
array([1, 1, 1])
This example removes only points where the flux is larger than 1
standard deviation from the median, but leaves negative outliers
in place::
>>> lc = LightCurve(time=[1, 2, 3, 4, 5], flux=[1, 1000, 1, -1000, 1])
>>> lc_clean = lc.remove_outliers(sigma_lower=float('inf'), sigma_upper=1)
>>> lc_clean.time
array([1, 3, 4, 5])
>>> lc_clean.flux
array([ 1, 1, -1000, 1])
"""
# First, we create the outlier mask using AstroPy's sigma_clip function
with warnings.catch_warnings(): # Ignore warnings due to NaNs or Infs
warnings.simplefilter("ignore")
outlier_mask = sigma_clip(data=self.flux, sigma=sigma, **kwargs).mask
# Second, we return the masked lightcurve and optionally the mask itself
if return_mask:
return self[~outlier_mask], outlier_mask
return self[~outlier_mask]
def bin(self, binsize=13, method='mean'):
"""Bins a lightcurve using a function defined by `method`
on blocks of samples of size `binsize`.
Parameters
----------
binsize : int
Number of cadences to include in every bin.
method: str, one of 'mean' or 'median'
The summary statistic to return for each bin. Default: 'mean'.
Returns
-------
binned_lc : LightCurve object
Binned lightcurve.
Notes
-----
- If the ratio between the lightcurve length and the binsize is not
a whole number, then the remainder of the data points will be
ignored.
- If the original lightcurve contains flux uncertainties (flux_err),
the binned lightcurve will report the root-mean-square error.
If no uncertainties are included, the binned curve will return the
standard deviation of the data.
- If the original lightcurve contains a quality attribute, then the
bitwise OR of the quality flags will be returned per bin.
"""
available_methods = ['mean', 'median']
if method not in available_methods:
raise ValueError("method must be one of: {}".format(available_methods))
methodf = np.__dict__['nan' + method]
n_bins = self.flux.size // binsize
binned_lc = copy.copy(self)
binned_lc.time = np.array([methodf(a) for a in np.array_split(self.time, n_bins)])
binned_lc.flux = np.array([methodf(a) for a in np.array_split(self.flux, n_bins)])
if np.any(np.isfinite(self.flux_err)):
# root-mean-square error
binned_lc.flux_err = np.array(
[np.sqrt(np.nansum(a**2))
for a in np.array_split(self.flux_err, n_bins)]
) / binsize
else:
# compute the standard deviation from the data
binned_lc.flux_err = np.array([np.nanstd(a)
for a in np.array_split(self.flux, n_bins)])
if hasattr(binned_lc, 'quality'):
binned_lc.quality = np.array(
[np.bitwise_or.reduce(a) for a in np.array_split(self.quality, n_bins)])
if hasattr(binned_lc, 'centroid_col'):
binned_lc.centroid_col = np.array(
[methodf(a) for a in np.array_split(self.centroid_col, n_bins)])
if hasattr(binned_lc, 'centroid_row'):
binned_lc.centroid_row = np.array(
[methodf(a) for a in np.array_split(self.centroid_row, n_bins)])
return binned_lc
def cdpp(self, **kwargs):
"""DEPRECATED: use `estimate_cdpp()` instead."""
log.warning("WARNING: cdpp() is deprecated and will be removed in v1.0.0; "
"please use estimate_cdpp() instead.")
return self.estimate_cdpp(**kwargs)
def estimate_cdpp(self, transit_duration=13, savgol_window=101,
savgol_polyorder=2, sigma_clip=5.):
"""Estimate the CDPP noise metric using the Savitzky-Golay (SG) method.
A common estimate of the noise in a lightcurve is the scatter that
remains after all long term trends have been removed. This is the idea
behind the Combined Differential Photometric Precision (CDPP) metric.
The official Kepler Pipeline computes this metric using a wavelet-based
algorithm to calculate the signal-to-noise of the specific waveform of
transits of various durations. In this implementation, we use the
simpler "sgCDPP proxy algorithm" discussed by Gilliland et al
(2011ApJS..197....6G) and Van Cleve et al (2016PASP..128g5002V).
The steps of this algorithm are:
1. Remove low frequency signals using a Savitzky-Golay filter with
window length `savgol_window` and polynomial order `savgol_polyorder`.
2. Remove outliers by rejecting data points which are separated from
the mean by `sigma_clip` times the standard deviation.
3. Compute the standard deviation of a running mean with
a configurable window length equal to `transit_duration`.
We use a running mean (as opposed to block averaging) to strongly
attenuate the signal above 1/transit_duration whilst retaining
the original frequency sampling. Block averaging would set the Nyquist
limit to 1/transit_duration.
Parameters
----------
transit_duration : int, optional
The transit duration in units of number of cadences. This is the
length of the window used to compute the running mean. The default
is 13, which corresponds to a 6.5 hour transit in data sampled at
30-min cadence.
savgol_window : int, optional
Width of Savitsky-Golay filter in cadences (odd number).
Default value 101 (2.0 days in Kepler Long Cadence mode).
savgol_polyorder : int, optional
Polynomial order of the Savitsky-Golay filter.
The recommended value is 2.
sigma_clip : float, optional
The number of standard deviations to use for clipping outliers.
The default is 5.
Returns
-------
cdpp : float
Savitzky-Golay CDPP noise metric in units parts-per-million (ppm).
Notes
-----
This implementation is adapted from the Matlab version used by
<NAME> but lacks the normalization factor used there:
svn+ssh://murzim/repo/so/trunk/Develop/jvc/common/compute_SG_noise.m
"""
if not isinstance(transit_duration, int):
raise ValueError("transit_duration must be an integer in units "
"number of cadences, got {}.".format(transit_duration))
detrended_lc = self.flatten(window_length=savgol_window,
polyorder=savgol_polyorder)
cleaned_lc = detrended_lc.remove_outliers(sigma=sigma_clip)
mean = running_mean(data=cleaned_lc.flux, window_size=transit_duration)
cdpp_ppm = np.std(mean) * 1e6
return cdpp_ppm
def _create_plot(self, method='plot', ax=None, normalize=True,
xlabel=None, ylabel=None, title='', style='lightkurve',
show_colorbar=True, colorbar_label='',
**kwargs):
"""Implements `plot()`, `scatter()`, and `errorbar()` to avoid code duplication.
Returns
-------
ax : matplotlib.axes._subplots.AxesSubplot
The matplotlib axes object.
"""
# Configure the default style
if style is None or style == 'lightkurve':
style = MPLSTYLE
# Default xlabel
if xlabel is None:
if self.time_format == 'bkjd':
xlabel = 'Time - 2454833 [BKJD days]'
elif self.time_format == 'btjd':
xlabel = 'Time - 2457000 [BTJD days]'
elif self.time_format == 'jd':
xlabel = 'Time [JD]'
else:
xlabel = 'Time'
# Default ylabel
if ylabel is None:
if normalize:
ylabel = 'Normalized Flux'
else:
ylabel = 'Flux [e$^-$s$^{-1}$]'
# Default legend label
if ('label' not in kwargs):
kwargs['label'] = self.label
# Normalize the data if requested
if normalize:
lc_normed = self.normalize()
flux, flux_err = lc_normed.flux, lc_normed.flux_err
else:
flux, flux_err = self.flux, self.flux_err
# Make the plot
with plt.style.context(style):
if ax is None:
fig, ax = plt.subplots(1)
if method == 'scatter':
sc = ax.scatter(self.time, flux, **kwargs)
# Colorbars should only be plotted if the user specifies, and there is
# a color specified that is not a string (e.g. 'C1') and is iterable.
if show_colorbar and ('c' in kwargs) and \
(not isinstance(kwargs['c'], str)) and hasattr(kwargs['c'], '__iter__'):
cbar = plt.colorbar(sc, ax=ax)
cbar.set_label(colorbar_label)
cbar.ax.yaxis.set_tick_params(tick1On=False, tick2On=False)
cbar.ax.minorticks_off()
elif method == 'errorbar':
ax.errorbar(x=self.time, y=flux, yerr=flux_err, **kwargs)
else:
ax.plot(self.time, flux, **kwargs)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
# Show the legend if labels were set
legend_labels = ax.get_legend_handles_labels()
if (np.sum([len(a) for a in legend_labels]) != 0):
ax.legend()
return ax
def plot(self, **kwargs):
"""Plot the light curve using matplotlib's `plot` method.
Parameters
----------
ax : matplotlib.axes._subplots.AxesSubplot
A matplotlib axes object to plot into. If no axes is provided,
a new one will be generated.
normalize : bool
Normalize the lightcurve before plotting?
xlabel : str
Plot x axis label
ylabel : str
Plot y axis label
title : str
Plot set_title
style : str
Path or URL to a matplotlib style file, or name of one of
matplotlib's built-in stylesheets (e.g. 'ggplot').
Lightkurve's custom stylesheet is used by default.
kwargs : dict
Dictionary of arguments to be passed to `matplotlib.pyplot.plot`.
Returns
-------
ax : matplotlib.axes._subplots.AxesSubplot
The matplotlib axes object.
"""
return self._create_plot(method='plot', **kwargs)
def scatter(self, colorbar_label='', show_colorbar=True, **kwargs):
"""Plots the light curve using matplotlib's `scatter` method.
Parameters
----------
ax : matplotlib.axes._subplots.AxesSubplot
A matplotlib axes object to plot into. If no axes is provided,
a new one will be generated.
normalize : bool
Normalize the lightcurve before plotting?
xlabel : str
Plot x axis label
ylabel : str
Plot y axis label
title : str
Plot set_title
style : str
Path or URL to a matplotlib style file, or name of one of
matplotlib's built-in stylesheets (e.g. 'ggplot').
Lightkurve's custom stylesheet is used by default.
colorbar_label : str
Label to show next to the colorbar (if `c` is given).
show_colorbar : boolean
Show the colorbar if colors are given using the `c` argument?
kwargs : dict
Dictionary of arguments to be passed to `matplotlib.pyplot.scatter`.
Returns
-------
ax : matplotlib.axes._subplots.AxesSubplot
The matplotlib axes object.
"""
return self._create_plot(method='scatter', colorbar_label=colorbar_label,
show_colorbar=show_colorbar, **kwargs)
def errorbar(self, linestyle='', **kwargs):
"""Plots the light curve using matplotlib's `errorbar` method.
Parameters
----------
ax : matplotlib.axes._subplots.AxesSubplot
A matplotlib axes object to plot into. If no axes is provided,
a new one will be generated.
normalize : bool
Normalize the lightcurve before plotting?
xlabel : str
Plot x axis label
ylabel : str
Plot y axis label
title : str
Plot set_title
style : str
Path or URL to a matplotlib style file, or name of one of
matplotlib's built-in stylesheets (e.g. 'ggplot').
Lightkurve's custom stylesheet is used by default.
linestyle : str
Connect the error bars using a line?
kwargs : dict
Dictionary of arguments to be passed to `matplotlib.pyplot.scatter`.
Returns
-------
ax : matplotlib.axes._subplots.AxesSubplot
The matplotlib axes object.
"""
if 'ls' not in kwargs:
kwargs['linestyle'] = linestyle
return self._create_plot(method='errorbar', **kwargs)
def to_table(self):
"""Export the LightCurve as an AstroPy Table.
Returns
-------
table : `astropy.table.Table` object
An AstroPy Table with columns 'time', 'flux', and 'flux_err'.
"""
return Table(data=(self.time, self.flux, self.flux_err),
names=('time', 'flux', 'flux_err'),
meta=self.meta)
def to_pandas(self, columns=['time', 'flux', 'flux_err']):
"""Export the LightCurve as a Pandas DataFrame.
Parameters
----------
columns : list of str
List of columns to include in the DataFrame. The names must match
attributes of the `LightCurve` object (e.g. `time`, `flux`).
Returns
-------
dataframe : `pandas.DataFrame` object
A dataframe indexed by `time` and containing the columns `flux`
and `flux_err`.
"""
try:
import pandas as pd
# lightkurve does not require pandas, so check for import success.
except ImportError:
raise ImportError("You need to install pandas to use the "
"LightCurve.to_pandas() method.")
data = {}
for col in columns:
if hasattr(self, col):
data[col] = vars(self)[col]
df = pd.DataFrame(data=data, index=self.time, columns=columns)
df.index.name = 'time'
return df
def to_csv(self, path_or_buf=None, **kwargs):
"""Writes the LightCurve to a csv file.
Parameters
----------
path_or_buf : string or file handle, default None
File path or object, if None is provided the result is returned as
a string.
**kwargs : dict
Dictionary of arguments to be passed to `pandas.DataFrame.to_csv()`.
Returns
-------
csv : str or None
Returns a csv-formatted string if `path_or_buf=None`,
returns None otherwise.
"""
return self.to_pandas().to_csv(path_or_buf=path_or_buf, **kwargs)
def to_periodogram(self, nterms=1, nyquist_factor=1, oversample_factor=1,
min_frequency=None, max_frequency=None,
min_period=None, max_period=None,
frequency=None, period=None,
freq_unit=1/u.day, **kwargs):
"""Returns a `Periodogram` power spectrum object.
Parameters
----------
min_frequency : float
If specified, use this minimum frequency rather than one over the
time baseline.
max_frequency : float
If specified, use this maximum frequency rather than nyquist_factor
times the nyquist frequency.
min_period : float
If specified, use 1./minium_period as the maximum frequency rather
than nyquist_factor times the nyquist frequency.
max_period : float
If specified, use 1./maximum_period as the minimum frequency rather
than one over the time baseline.
frequency : array-like
The regular grid of frequencies to use. If given a unit, it is
converted to units of freq_unit. If not, it is assumed to be in
units of freq_unit. This over rides any set frequency limits.
period : array-like
The regular grid of periods to use (as 1/period). If given a unit,
it is converted to units of freq_unit. If not, it is assumed to be
in units of 1/freq_unit. This overrides any set period limits.
nterms : int
Default 1. Number of terms to use in the Fourier fit.
nyquist_factor : int
Default 1. The multiple of the average Nyquist frequency. Is
overriden by maximum_frequency (or minimum period).
oversample_factor : int
The frequency spacing, determined by the time baseline of the
lightcurve, is divided by this factor, oversampling frequency space.
This parameter is identical to the samples_per_peak parameter in
astropy.LombScargle()
freq_unit : `astropy.units.core.CompositeUnit`
Default: 1/u.day. The desired frequency units for the Lomb Scargle
periodogram. This implies that 1/freq_unit is the units for period.
kwargs : dict
Keyword arguments passed to `astropy.stats.LombScargle()`
Returns
-------
Periodogram : `Periodogram` object
Returns a Periodogram object extracted from the lightcurve.
"""
from . import Periodogram
return Periodogram.from_lightcurve(lc=self,
min_frequency=min_frequency,
max_frequency=max_frequency,
min_period=min_period,
max_period=max_period,
frequency=frequency,
period=period,
nterms=nterms,
nyquist_factor=nyquist_factor,
oversample_factor=oversample_factor,
freq_unit=freq_unit,
**kwargs)
def to_fits(self, path=None, overwrite=False, **extra_data):
"""Writes the LightCurve to a FITS file.
Parameters
----------
path : string, default None
File path, if `None` returns an astropy.io.fits.HDUList object.
overwrite : bool
Whether or not to overwrite the file
extra_data : dict
Extra keywords or columns to include in the FITS file.
Arguments of type str, int, float, or bool will be stored as
keywords in the primary header.
Arguments of type np.array or list will be stored as columns
in the first extension.
Returns
-------
hdu : astropy.io.fits
Returns an astropy.io.fits object if path is None
"""
typedir = {int: 'J', str: 'A', float: 'D', bool: 'L',
np.int32: 'J', np.int32: 'K', np.float32: 'E', np.float64: 'D'}
def _header_template(extension):
"""Returns a template `fits.Header` object for a given extension."""
template_fn = os.path.join(PACKAGEDIR, "data",
"lc-ext{}-header.txt".format(extension))
return fits.Header.fromtextfile(template_fn)
def _make_primary_hdu(extra_data={}):
"""Returns the primary extension (#0)."""
hdu = fits.PrimaryHDU()
# Copy the default keywords from a template file from the MAST archive
tmpl = _header_template(0)
for kw in tmpl:
hdu.header[kw] = (tmpl[kw], tmpl.comments[kw])
# Override the defaults where necessary
from . import __version__
default = default = {'ORIGIN': "Unofficial data product",
'DATE': datetime.datetime.now().strftime("%Y-%m-%d"),
'CREATOR': "lightkurve",
'PROCVER': str(__version__)}
for kw in default:
hdu.header['{}'.format(kw).upper()] = default[kw]
if default[kw] is None:
log.warning('Value for {} is None.'.format(kw))
if ('quarter' in dir(self)) and (self.quarter is not None):
hdu.header['QUARTER'] = self.quarter
elif ('campaign' in dir(self)) and self.campaign is not None:
hdu.header['CAMPAIGN'] = self.campaign
else:
log.warning('Cannot find Campaign or Quarter number.')
for kw in extra_data:
if isinstance(extra_data[kw], (str, float, int, bool, type(None))):
hdu.header['{}'.format(kw).upper()] = extra_data[kw]
if extra_data[kw] is None:
log.warning('Value for {} is None.'.format(kw))
return hdu
def _make_lightcurve_extension(extra_data={}):
"""Create the 'LIGHTCURVE' extension (i.e. extension #1)."""
# Turn the data arrays into fits columns and initialize the HDU
cols = []
if ~np.asarray(['TIME' in k.upper() for k in extra_data.keys()]).any():
cols.append(fits.Column(name='TIME', format='D', unit=self.time_format,
array=self.time))
if ~np.asarray(['FLUX' in k.upper() for k in extra_data.keys()]).any():
cols.append(fits.Column(name='FLUX', format='E',
unit='counts', array=self.flux))
if 'flux_err' in dir(self):
if ~np.asarray(['FLUX_ERR' in k.upper() for k in extra_data.keys()]).any():
cols.append(fits.Column(name='FLUX_ERR', format='E',
unit='counts', array=self.flux_err))
if 'cadenceno' in dir(self):
if ~np.asarray(['CADENCENO' in k.upper() for k in extra_data.keys()]).any():
cols.append(fits.Column(name='CADENCENO', format='J',
array=self.cadenceno))
for kw in extra_data:
if isinstance(extra_data[kw], (np.ndarray, list)):
cols.append(fits.Column(name='{}'.format(kw).upper(),
format=typedir[type(extra_data[kw][0])],
array=extra_data[kw]))
if 'SAP_QUALITY' not in extra_data:
cols.append(fits.Column(name='SAP_QUALITY',
format='J',
array=np.zeros(len(self.flux))))
coldefs = fits.ColDefs(cols)
hdu = fits.BinTableHDU.from_columns(coldefs)
hdu.header['EXTNAME'] = 'LIGHTCURVE'
return hdu
def _hdulist(**extra_data):
"""Returns an astropy.io.fits.HDUList object."""
return fits.HDUList([_make_primary_hdu(extra_data=extra_data),
_make_lightcurve_extension(extra_data=extra_data)])
hdu = _hdulist(**extra_data)
if path is not None:
hdu.writeto(path, overwrite=overwrite, checksum=True)
return hdu
class FoldedLightCurve(LightCurve):
"""Defines a folded lightcurve with different plotting defaults."""
def __init__(self, *args, **kwargs):
super(FoldedLightCurve, self).__init__(*args, **kwargs)
@property
def phase(self):
return self.time
def plot(self, **kwargs):
"""Plot the folded light curve usng matplotlib's `plot` method.
See `LightCurve.plot` for details on the accepted arguments.
Parameters
----------
kwargs : dict
Dictionary of arguments to be passed to `LightCurve.plot`.
Returns
-------
ax : matplotlib.axes._subplots.AxesSubplot
The matplotlib axes object.
"""
ax = super(FoldedLightCurve, self).plot(**kwargs)
if 'xlabel' not in kwargs:
ax.set_xlabel("Phase")
return ax
def scatter(self, **kwargs):
"""Plot the folded light curve usng matplotlib's `scatter` method.
See `LightCurve.scatter` for details on the accepted arguments.
Parameters
----------
kwargs : dict
Dictionary of arguments to be passed to `LightCurve.scatter`.
Returns
-------
ax : matplotlib.axes._subplots.AxesSubplot
The matplotlib axes object.
"""
ax = super(FoldedLightCurve, self).scatter(**kwargs)
if 'xlabel' not in kwargs:
ax.set_xlabel("Phase")
return ax
class KeplerLightCurve(LightCurve):
"""Defines a light curve class for NASA's Kepler and K2 missions.
Attributes
----------
time : array-like
Time measurements
flux : array-like
Data flux for every time point
flux_err : array-like
Uncertainty on each flux data point
time_format : str
String specifying how an instant of time is represented,
e.g. 'bkjd' or 'jd'.
time_scale : str
String which specifies how the time is measured,
e.g. tdb', 'tt', 'ut1', or 'utc'.
centroid_col : array-like
Centroid column coordinates as a function of time
centroid_row : array-like
Centroid row coordinates as a function of time
quality : array-like
Array indicating the quality of each data point
quality_bitmask : int
Bitmask specifying quality flags of cadences that should be ignored
channel : int
Channel number
campaign : int
Campaign number
quarter : int
Quarter number
mission : str
Mission name
cadenceno : array-like
Cadence numbers corresponding to every time measurement
targetid : int
Kepler ID number
"""
def __init__(self, time=None, flux=None, flux_err=None, time_format=None, time_scale=None,
centroid_col=None, centroid_row=None, quality=None, quality_bitmask=None,
channel=None, campaign=None, quarter=None, mission=None,
cadenceno=None, targetid=None, ra=None, dec=None, label=None, meta={}):
super(KeplerLightCurve, self).__init__(time=time, flux=flux, flux_err=flux_err,
time_format=time_format, time_scale=time_scale,
targetid=targetid, label=label, meta=meta)
self.centroid_col = self._validate_array(centroid_col, name='centroid_col')
self.centroid_row = self._validate_array(centroid_row, name='centroid_row')
self.quality = self._validate_array(quality, name='quality')
self.cadenceno = self._validate_array(cadenceno, name='cadenceno')
self.quality_bitmask = quality_bitmask
self.channel = channel
self.campaign = campaign
self.quarter = quarter
self.mission = mission
self.ra = ra
self.dec = dec
def __getitem__(self, key):
lc = super(KeplerLightCurve, self).__getitem__(key)
# Compared to `LightCurve`, we need to slice a few additional arrays:
lc.quality = self.quality[key]
lc.cadenceno = self.cadenceno[key]
lc.centroid_col = self.centroid_col[key]
lc.centroid_row = self.centroid_row[key]
return lc
def __repr__(self):
return('KeplerLightCurve(ID: {})'.format(self.targetid))
def correct(self, method='sff', **kwargs):
"""Corrects a lightcurve for motion-dependent systematic errors.
Parameters
----------
method : str
Method used to correct the lightcurve.
Right now only 'sff' (Vanderburg's Self-Flat Fielding) is supported.
kwargs : dict
Dictionary of keyword arguments to be passed to the function
defined by `method`.
Returns
-------
new_lc : KeplerLightCurve object
Corrected lightcurve
"""
not_nan = np.isfinite(self.flux)
if method == 'sff':
from .correctors import SFFCorrector
self.corrector = SFFCorrector()
corrected_lc = self.corrector.correct(time=self.time[not_nan],
flux=self.flux[not_nan],
centroid_col=self.centroid_col[not_nan],
centroid_row=self.centroid_row[not_nan],
**kwargs)
else:
raise ValueError("method {} is not available.".format(method))
new_lc = copy.copy(self)
new_lc.time = corrected_lc.time
new_lc.flux = corrected_lc.flux
new_lc.flux_err = self.normalize().flux_err[not_nan]
return new_lc
def to_pandas(self, columns=['time', 'flux', 'flux_err', 'quality',
'centroid_col', 'centroid_row']):
"""Export the LightCurve as a Pandas DataFrame.
Parameters
----------
columns : list of str
List of columns to include in the DataFrame. The names must match
attributes of the `LightCurve` object (e.g. `time`, `flux`).
Returns
-------
dataframe : `pandas.DataFrame` object
A dataframe indexed by `time` and containing the columns `flux`
and `flux_err`.
"""
return super(KeplerLightCurve, self).to_pandas(columns=columns)
def to_fits(self, path=None, overwrite=False, **extra_data):
"""Writes the KeplerLightCurve to a FITS file.
Parameters
----------
path : string, default None
File path, if `None` returns an astropy.io.fits.HDUList object.
overwrite : bool
Whether or not to overwrite the file
extra_data : dict
Extra keywords or columns to include in the FITS file.
Arguments of type str, int, float, or bool will be stored as
keywords in the primary header.
Arguments of type np.array or list will be stored as columns
in the first extension.
Returns
-------
hdu : astropy.io.fits
Returns an astropy.io.fits object if path is None
"""
kepler_specific_data = {
'TELESCOP': "KEPLER",
'INSTRUME': "Kepler Photometer",
'OBJECT': '{}'.format(self.targetid),
'KEPLERID': self.targetid,
'CHANNEL': self.channel,
'MISSION': self.mission,
'RA_OBJ': self.ra,
'DEC_OBJ': self.dec,
'EQUINOX': 2000,
'DATE-OBS': Time(self.time[0]+2454833., format=('jd')).isot,
'SAP_QUALITY': self.quality}
for kw in kepler_specific_data:
if ~np.asarray([kw.lower == k.lower() for k in extra_data]).any():
extra_data[kw] = kepler_specific_data[kw]
return super(KeplerLightCurve, self).to_fits(path=path,
overwrite=overwrite,
**extra_data)
class TessLightCurve(LightCurve):
"""Defines a light curve class for NASA's TESS mission.
Attributes
----------
time : array-like
Time measurements
flux : array-like
Data flux for every time point
flux_err : array-like
Uncertainty on each flux data point
time_format : str
String specifying how an instant of time is represented,
e.g. 'bkjd' or 'jd'.
time_scale : str
String which specifies how the time is measured,
e.g. tdb', 'tt', 'ut1', or 'utc'.
centroid_col, centroid_row : array-like, array-like
Centroid column and row coordinates as a function of time
quality : array-like
Array indicating the quality of each data point
quality_bitmask : int
Bitmask specifying quality flags of cadences that should be ignored
cadenceno : array-like
Cadence numbers corresponding to every time measurement
targetid : int
Tess Input Catalog ID number
"""
def __init__(self, time=None, flux=None, flux_err=None, time_format=None, time_scale=None,
centroid_col=None, centroid_row=None, quality=None, quality_bitmask=None,
cadenceno=None, sector=None, camera=None, ccd=None,
targetid=None, ra=None, dec=None, label=None, meta={}):
super(TessLightCurve, self).__init__(time=time, flux=flux, flux_err=flux_err,
time_format=time_format, time_scale=time_scale,
targetid=targetid, label=label, meta=meta)
self.centroid_col = self._validate_array(centroid_col, name='centroid_col')
self.centroid_row = self._validate_array(centroid_row, name='centroid_row')
self.quality = self._validate_array(quality, name='quality')
self.cadenceno = self._validate_array(cadenceno)
self.quality_bitmask = quality_bitmask
self.mission = "TESS"
self.sector = sector
self.camera = camera
self.ccd = ccd
self.ra = ra
self.dec = dec
def __getitem__(self, key):
lc = super(TessLightCurve, self).__getitem__(key)
# Compared to `LightCurve`, we need to slice a few additional arrays:
lc.quality = self.quality[key]
lc.cadenceno = self.cadenceno[key]
lc.centroid_col = self.centroid_col[key]
lc.centroid_row = self.centroid_row[key]
return lc
def __repr__(self):
return('TessLightCurve(TICID: {})'.format(self.targetid))
```
#### File: lightkurve/lightkurve/periodogram.py
```python
from __future__ import division, print_function
import copy
import logging
import numpy as np
from matplotlib import pyplot as plt
import astropy
from astropy.table import Table
from astropy.stats import LombScargle
from astropy import __version__
from astropy import units as u
from astropy.units import cds
from astropy.convolution import convolve, Box1DKernel
from . import MPLSTYLE
log = logging.getLogger(__name__)
__all__ = ['Periodogram']
class Periodogram(object):
"""Class to represents a power spectrum, i.e. frequency vs power.
The Periodogram class represents a power spectrum, with values of
frequency on the x-axis (in any frequency units) and values of power on the
y-axis (in units of ppm^2 / [frequency units]).
Attributes
----------
frequency : `astropy.units.Quantity` object
Array of frequencies with associated astropy unit.
power : `astropy.units.Quantity` object
Array of power-spectral-densities. The Quantity array must have units
of `ppm^2 / freq_unit`, where freq_unit is the unit of the frequency
attribute.
nyquist : float, optional
The Nyquist frequency of the lightcurve. In units of freq_unit, where
freq_unit is the unit of the frequency attribute.
targetid : str, optional
Identifier of the target.
label : str, optional
Human-friendly object label, e.g. "KIC 123456789".
meta : dict, optional
Free-form metadata associated with the Periodogram.
"""
def __init__(self, frequency, power, nyquist=None, label=None,
targetid=None, meta={}):
# Input validation
if not isinstance(frequency, u.quantity.Quantity):
raise ValueError('frequency must be an `astropy.units.Quantity` object.')
if not isinstance(power, u.quantity.Quantity):
raise ValueError('power must be an `astropy.units.Quantity` object.')
# Frequency must have frequency units
try:
frequency.to(u.Hz)
except u.UnitConversionError:
raise ValueError('Frequency must be in units of 1/time.')
# Frequency and power must have sensible shapes
if frequency.shape[0] <= 1:
raise ValueError('frequency and power must have a length greater than 1.')
if frequency.shape != power.shape:
raise ValueError('frequency and power must have the same length.')
self.frequency = frequency
self.power = power
self.nyquist = nyquist
self.label = label
self.targetid = targetid
self.meta = meta
@property
def period(self):
"""Returns the array of periods, i.e. 1/frequency."""
return 1. / self.frequency
@property
def max_power(self):
"""Returns the power of the highest peak in the periodogram."""
return np.nanmax(self.power)
@property
def frequency_at_max_power(self):
"""Returns the frequency corresponding to the highest peak in the periodogram."""
return self.frequency[np.nanargmax(self.power)]
@property
def period_at_max_power(self):
"""Returns the period corresponding to the highest peak in the periodogram."""
return 1. / self.frequency_at_max_power
@staticmethod
def from_lightcurve(lc, min_frequency=None, max_frequency=None,
min_period=None, max_period=None,
frequency=None, period=None,
nterms=1, nyquist_factor=1, oversample_factor=1,
freq_unit=1/u.day, **kwargs):
"""Creates a Periodogram from a LightCurve using the Lomb-Scargle method.
By default, the periodogram will be created for a regular grid of
frequencies from one frequency separation to the Nyquist frequency,
where the frequency separation is determined as 1 / the time baseline.
The min frequency and/or max frequency (or max period and/or min period)
can be passed to set custom limits for the frequency grid. Alternatively,
the user can provide a custom regular grid using the `frequency`
parameter or a custom regular grid of periods using the `period`
parameter.
The spectrum can be oversampled by increasing the oversample_factor
parameter. The parameter nterms controls how many Fourier terms are used
in the model. Note that many terms could lead to spurious peaks. Setting
the Nyquist_factor to be greater than 1 will sample the space beyond the
Nyquist frequency, which may introduce aliasing.
The unit parameter allows a request for alternative units in frequency
space. By default frequency is in (1/day) and power in (ppm^2 * day).
Asteroseismologists for example may want frequency in (microHz) and
power in (ppm^2 / microHz), in which case they would pass
`unit = u.microhertz` where `u` is `astropy.units`
By default this method uses the LombScargle 'fast' method, which assumes
a regular grid. If a regular grid of periods (i.e. an irregular grid of
frequencies) it will use the 'slow' method. If nterms > 1 is passed, it
will use the 'fastchi2' method for regular grids, and 'chi2' for
irregular grids. The normalizatin of the Lomb Scargle periodogram is
fixed to `psd`, and cannot be overridden.
Caution: this method assumes that the LightCurve's time (lc.time)
is given in units of days.
Parameters
----------
lc : LightCurve object
The LightCurve from which to compute the Periodogram.
min_frequency : float
If specified, use this minimum frequency rather than one over the
time baseline.
max_frequency : float
If specified, use this maximum frequency rather than nyquist_factor
times the nyquist frequency.
min_period : float
If specified, use 1./minium_period as the maximum frequency rather
than nyquist_factor times the nyquist frequency.
max_period : float
If specified, use 1./maximum_period as the minimum frequency rather
than one over the time baseline.
frequency : array-like
The regular grid of frequencies to use. If given a unit, it is
converted to units of freq_unit. If not, it is assumed to be in
units of freq_unit. This over rides any set frequency limits.
period : array-like
The regular grid of periods to use (as 1/period). If given a unit,
it is converted to units of freq_unit. If not, it is assumed to be
in units of 1/freq_unit. This overrides any set period limits.
nterms : int
Default 1. Number of terms to use in the Fourier fit.
nyquist_factor : int
Default 1. The multiple of the average Nyquist frequency. Is
overriden by maximum_frequency (or minimum period).
oversample_factor : int
The frequency spacing, determined by the time baseline of the
lightcurve, is divided by this factor, oversampling the frequency
space. This parameter is identical to the samples_per_peak parameter
in astropy.LombScargle()
freq_unit : `astropy.units.core.CompositeUnit`
Default: 1/u.day. The desired frequency units for the Lomb Scargle
periodogram. This implies that 1/freq_unit is the units for period.
kwargs : dict
Keyword arguments passed to `astropy.stats.LombScargle()`
Returns
-------
Periodogram : `Periodogram` object
Returns a Periodogram object extracted from the lightcurve.
"""
# Make sure the lightcurve object is normalized
lc = lc.normalize()
# Check if any values of period have been passed and set format accordingly
if not all(b is None for b in [period, min_period, max_period]):
format = 'period'
else:
format = 'frequency'
# If period and frequency keywords have both been set, throw an error
if (not all(b is None for b in [period, min_period, max_period])) & \
(not all(b is None for b in [frequency, min_frequency, max_frequency])):
raise ValueError('You have input keyword arguments for both frequency and period. '
'Please only use one.')
if (~np.isfinite(lc.flux)).any():
raise ValueError('Lightcurve contains NaN values. Use lc.remove_nans()'
' to remove NaN values from a LightCurve.')
# Hard coding that time is in days.
time = lc.time.copy() * u.day
# Calculate Nyquist Frequency and frequency bin width in terms of days
nyquist = 0.5 * (1./(np.median(np.diff(time))))
fs = (1./(time[-1] - time[0])) / oversample_factor
# Convert these values to requested frequency unit
nyquist = nyquist.to(freq_unit)
fs = fs.to(freq_unit)
# Warn if there is confusing input
if (frequency is not None) & (any([a is not None for a in [min_frequency, max_frequency]])):
log.warning("You have passed both a grid of frequencies "
"and min_frequency/max_frequency arguments; "
"the latter will be ignored.")
if (period is not None) & (any([a is not None for a in [min_period, max_period]])):
log.warning("You have passed a grid of periods "
"and min_period/max_period arguments; "
"the latter will be ignored.")
# Tidy up the period stuff...
if max_period is not None:
# min_frequency MUST be none by this point.
min_frequency = 1. / max_period
if min_period is not None:
# max_frequency MUST be none by this point.
max_frequency = 1. / min_period
# If the user specified a period, copy it into the frequency.
if (period is not None):
frequency = 1. / period
# Do unit conversions if user input min/max frequency or period
if frequency is None:
if min_frequency is not None:
min_frequency = u.Quantity(min_frequency, freq_unit)
if max_frequency is not None:
max_frequency = u.Quantity(max_frequency, freq_unit)
if (min_frequency is not None) & (max_frequency is not None):
if (min_frequency > max_frequency):
if format == 'frequency':
raise ValueError('min_frequency cannot be larger than max_frequency')
if format == 'period':
raise ValueError('min_period cannot be larger than max_period')
# If nothing has been passed in, set them to the defaults
if min_frequency is None:
min_frequency = fs
if max_frequency is None:
max_frequency = nyquist * nyquist_factor
# Create frequency grid evenly spaced in frequency
frequency = np.arange(min_frequency.value, max_frequency.value, fs.to(freq_unit).value)
# Convert to desired units
frequency = u.Quantity(frequency, freq_unit)
if nterms > 1:
raise NotImplementedError('Increasing the number of terms is not implemented yet.')
else:
method = 'fast'
if period is not None:
method = 'slow'
log.warning("You have passed an evenly-spaced grid of periods. "
"These are not evenly spaced in frequency space.\n"
"Method has been set to 'slow' to allow for this.")
if float(__version__[0]) >= 3:
LS = LombScargle(time, lc.flux * 1e6,
nterms=nterms, normalization='psd', **kwargs)
power = LS.power(frequency, method=method)
else:
LS = LombScargle(time, lc.flux * 1e6,
nterms=nterms, **kwargs)
power = LS.power(frequency, method=method, normalization='psd')
# Normalise the according to Parseval's theorem
norm = np.std(lc.flux * 1e6)**2 / np.sum(power)
power *= norm
power = power * (cds.ppm**2)
# Rescale power to units of ppm^2 / [frequency unit]
power = power / fs
# Periodogram needs properties
return Periodogram(frequency=frequency, power=power, nyquist=nyquist,
targetid=lc.targetid, label=lc.label)
def bin(self, binsize=10, method='mean'):
"""Bins the power spectrum.
Parameters
----------
binsize : int
The factor by which to bin the power spectrum, in the sense that
the power spectrum will be smoothed by taking the mean in bins
of size N / binsize, where N is the length of the original
frequency array. Defaults to 10.
method : str, one of 'mean' or 'median'
Method to use for binning. Default is 'mean'.
Returns
-------
binned_periodogram : a `Periodogram` object
Returns a new `Periodogram` object which has been binned.
"""
# Input validation
if binsize < 1:
raise ValueError('binsize must be larger than or equal to 1')
if method not in ('mean', 'median'):
raise ValueError("{} is not a valid method, must be 'mean' or 'median'.".format(method))
m = int(len(self.power) / binsize) # length of the binned arrays
if method == 'mean':
binned_freq = self.frequency[:m*binsize].reshape((m, binsize)).mean(1)
binned_power = self.power[:m*binsize].reshape((m, binsize)).mean(1)
elif method == 'median':
binned_freq = np.nanmedian(self.frequency[:m*binsize].reshape((m, binsize)), axis=1)
binned_power = np.nanmedian(self.power[:m*binsize].reshape((m, binsize)), axis=1)
binned_pg = copy.deepcopy(self)
binned_pg.frequency = binned_freq
binned_pg.power = binned_power
return binned_pg
def smooth(self, method='boxkernel', filter_width=0.1):
"""Smooths the power spectrum using the 'boxkernel' or 'logmedian' method.
If `method` is set to 'boxkernel', this method will smooth the power
spectrum by convolving with a numpy Box1DKernel with a width of
`filter_width`, where `filter width` is in units of frequency.
This is best for filtering out noise while maintaining seismic mode
peaks. This method requires the Periodogram to have an evenly spaced
grid of frequencies. A `ValueError` exception will be raised if this is
not the case.
If `method` is set to 'logmedian', it smooths the power spectrum using
a moving median which moves across the power spectrum in a steps of
log10(x0) + 0.5 * filter_width
where `filter width` is in log10(frequency) space. This is best for
estimating the noise background, as it filters over the seismic peaks.
Parameters
----------
method : str, one of 'boxkernel' or 'logmedian'
The smoothing method to use. Defaults to 'boxkernel'.
filter_width : float
If `method` = 'boxkernel', this is the width of the smoothing filter
in units of frequency.
If method = `logmedian`, this is the width of the smoothing filter
in log10(frequency) space.
Returns
-------
smoothed_pg : `Periodogram` object
Returns a new `Periodogram` object in which the power spectrum
has been smoothed.
"""
# Input validation
if method not in ('boxkernel', 'logmedian'):
raise ValueError("the `method` parameter must be one of "
"'boxkernel' or 'logmedian'.")
if method == 'boxkernel':
if filter_width <= 0.:
raise ValueError("the `filter_width` parameter must be "
"larger than 0 for the 'boxkernel' method.")
try:
filter_width = u.Quantity(filter_width, self.frequency.unit)
except u.UnitConversionError:
raise ValueError("the `filter_width` parameter must have "
"frequency units.")
# Check to see if we have a grid of evenly spaced periods instead.
fs = np.mean(np.diff(self.frequency))
if not np.isclose(np.median(np.diff(self.frequency.value)), fs.value):
raise ValueError("the 'boxkernel' method requires the periodogram "
"to have a grid of evenly spaced frequencies.")
box_kernel = Box1DKernel(np.ceil(filter_width/fs))
smooth_power = convolve(self.power.value, box_kernel)
smooth_pg = copy.deepcopy(self)
smooth_pg.power = u.Quantity(smooth_power, self.power.unit)
return smooth_pg
if method == 'logmedian':
if isinstance(filter_width, astropy.units.quantity.Quantity):
raise ValueError("the 'logmedian' method requires a dimensionless "
"value for `filter_width` in log10(frequency) space.")
count = np.zeros(len(self.frequency.value), dtype=int)
bkg = np.zeros_like(self.frequency.value)
x0 = np.log10(self.frequency[0].value)
while x0 < np.log10(self.frequency[-1].value):
m = np.abs(np.log10(self.frequency.value) - x0) < filter_width
if len(bkg[m] > 0):
bkg[m] += np.nanmedian(self.power[m].value)
count[m] += 1
x0 += 0.5 * filter_width
bkg /= count
smooth_pg = copy.deepcopy(self)
smooth_pg.power = u.Quantity(bkg, self.power.unit)
return smooth_pg
def plot(self, scale='linear', ax=None, xlabel=None, ylabel=None, title='',
style='lightkurve', format='frequency', unit=None, **kwargs):
"""Plots the Periodogram.
Parameters
----------
scale: str
Set x,y axis to be "linear" or "log". Default is linear.
ax : matplotlib.axes._subplots.AxesSubplot
A matplotlib axes object to plot into. If no axes is provided,
a new one will be generated.
xlabel : str
Plot x axis label
ylabel : str
Plot y axis label
title : str
Plot set_title
style : str
Path or URL to a matplotlib style file, or name of one of
matplotlib's built-in stylesheets (e.g. 'ggplot').
Lightkurve's custom stylesheet is used by default.
format : str
{'frequency', 'period'}. Default 'frequency'. If 'frequency', x-axis
units will be frequency. If 'period', the x-axis units will be
period and 'log' scale.
kwargs : dict
Dictionary of arguments to be passed to `matplotlib.pyplot.plot`.
Returns
-------
ax : matplotlib.axes._subplots.AxesSubplot
The matplotlib axes object.
"""
if isinstance(unit, u.quantity.Quantity):
unit = unit.unit
if unit is None:
unit = self.frequency.unit
if format == 'period':
unit = self.period.unit
if style is None or style == 'lightkurve':
style = MPLSTYLE
if ylabel is None:
ylabel = "Power Spectral Density [{}]".format(self.power.unit.to_string('latex'))
# This will need to be fixed with housekeeping. Self.label currently doesnt exist.
if ('label' not in kwargs) and ('label' in dir(self)):
kwargs['label'] = self.label
with plt.style.context(style):
if ax is None:
fig, ax = plt.subplots()
# Plot frequency and power
if format.lower() == 'frequency':
ax.plot(self.frequency.to(unit), self.power, **kwargs)
if xlabel is None:
xlabel = "Frequency [{}]".format(unit.to_string('latex'))
elif format.lower() == 'period':
ax.plot(self.period.to(unit), self.power, **kwargs)
if xlabel is None:
xlabel = "Period [{}]".format(unit.to_string('latex'))
else:
raise ValueError('{} is not a valid plotting format'.format(format))
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
# Show the legend if labels were set
legend_labels = ax.get_legend_handles_labels()
if (np.sum([len(a) for a in legend_labels]) != 0):
ax.legend()
ax.set_yscale(scale)
ax.set_xscale(scale)
ax.set_title(title)
return ax
def flatten(self, method='logmedian', filter_width=0.01, return_trend=False):
"""Estimates the Signal-To-Noise (SNR) spectrum by dividing out an
estimate of the noise background.
This method divides the power spectrum by a background estimated
using a moving filter in log10 space by default. For details on the
`method` and `filter_width` parameters, see `Periodogram.smooth()`
Dividing the power through by the noise background produces a spectrum
with no units of power. Since the signal is divided through by a measure
of the noise, we refer to this as a `Signal-To-Noise` spectrum.
Parameters
----------
method : str, one of 'boxkernel' or 'logmedian'
Background estimation method passed on to `Periodogram.smooth()`.
Defaults to 'logmedian'.
filter_width : float
If `method` = 'boxkernel', this is the width of the smoothing filter
in units of frequency.
If method = `logmedian`, this is the width of the smoothing filter
in log10(frequency) space.
Returns
-------
snr_spectrum : `Periodogram` object
Returns a periodogram object where the power is an estimate of the
signal-to-noise of the spectrum, creating by dividing the powers
with a simple estimate of the noise background using a smoothing filter.
"""
bkg = self.smooth(method=method, filter_width=filter_width)
snr_pg = self / bkg.power
snr = SNRPeriodogram(snr_pg.frequency, snr_pg.power,
nyquist=self.nyquist, targetid=self.targetid,
label=self.label, meta=self.meta)
if return_trend:
return snr, bkg
return snr
def to_table(self):
"""Exports the Periodogram as an Astropy Table.
Returns
-------
table : `astropy.table.Table` object
An AstroPy Table with columns 'frequency', 'period', and 'power'.
"""
return Table(data=(self.frequency, self.period, self.power),
names=('frequency', 'period', 'power'),
meta=self.meta)
def __repr__(self):
return('Periodogram(ID: {})'.format(self.targetid))
def __getitem__(self, key):
copy_self = copy.copy(self)
copy_self.frequency = self.frequency[key]
copy_self.power = self.power[key]
return copy_self
def __add__(self, other):
copy_self = copy.copy(self)
copy_self.power = copy_self.power + u.Quantity(other, self.power.unit)
return copy_self
def __radd__(self, other):
return self.__add__(other)
def __sub__(self, other):
return self.__add__(-other)
def __rsub__(self, other):
copy_self = copy.copy(self)
copy_self.power = other - copy_self.power
return copy_self
def __mul__(self, other):
copy_self = copy.copy(self)
copy_self.power = other * copy_self.power
return copy_self
def __rmul__(self, other):
return self.__mul__(other)
def __truediv__(self, other):
return self.__mul__(1./other)
def __rtruediv__(self, other):
copy_self = copy.copy(self)
copy_self.power = other / copy_self.power
return copy_self
def __div__(self, other):
return self.__truediv__(other)
def __rdiv__(self, other):
return self.__rtruediv__(other)
def properties(self):
"""Prints a summary of the non-callable attributes of the Periodogram object.
Prints in order of type (ints, strings, lists, arrays and others).
Prints in alphabetical order.
"""
attrs = {}
for attr in dir(self):
if not attr.startswith('_'):
res = getattr(self, attr)
if callable(res):
continue
if isinstance(res, astropy.units.quantity.Quantity):
unit = res.unit
res = res.value
attrs[attr] = {'res': res}
attrs[attr]['unit'] = unit.to_string()
else:
attrs[attr] = {'res': res}
attrs[attr]['unit'] = ''
if attr == 'hdu':
attrs[attr] = {'res': res, 'type': 'list'}
for idx, r in enumerate(res):
if idx == 0:
attrs[attr]['print'] = '{}'.format(r.header['EXTNAME'])
else:
attrs[attr]['print'] = '{}, {}'.format(
attrs[attr]['print'], '{}'.format(r.header['EXTNAME']))
continue
if isinstance(res, int):
attrs[attr]['print'] = '{}'.format(res)
attrs[attr]['type'] = 'int'
elif isinstance(res, float):
attrs[attr]['print'] = '{}'.format(np.round(res, 4))
attrs[attr]['type'] = 'float'
elif isinstance(res, np.ndarray):
attrs[attr]['print'] = 'array {}'.format(res.shape)
attrs[attr]['type'] = 'array'
elif isinstance(res, list):
attrs[attr]['print'] = 'list length {}'.format(len(res))
attrs[attr]['type'] = 'list'
elif isinstance(res, str):
if res == '':
attrs[attr]['print'] = '{}'.format('None')
else:
attrs[attr]['print'] = '{}'.format(res)
attrs[attr]['type'] = 'str'
elif attr == 'wcs':
attrs[attr]['print'] = 'astropy.wcs.wcs.WCS'.format(attr)
attrs[attr]['type'] = 'other'
else:
attrs[attr]['print'] = '{}'.format(type(res))
attrs[attr]['type'] = 'other'
output = Table(names=['Attribute', 'Description', 'Units'], dtype=[object, object, object])
idx = 0
types = ['int', 'str', 'float', 'list', 'array', 'other']
for typ in types:
for attr, dic in attrs.items():
if dic['type'] == typ:
output.add_row([attr, dic['print'], dic['unit']])
idx += 1
print('lightkurve.Periodogram properties:')
output.pprint(max_lines=-1, max_width=-1)
class SNRPeriodogram(Periodogram):
"""Defines a Signal-to-Noise Ratio (SNR) Periodogram class.
This class is nearly identical to the standard :class:`Periodogram` class,
but has different plotting defaults.
"""
def __init__(self, *args, **kwargs):
super(SNRPeriodogram, self).__init__(*args, **kwargs)
def __repr__(self):
return('SNRPeriodogram(ID: {})'.format(self.targetid))
def plot(self, **kwargs):
"""Plot the SNR spectrum using matplotlib's `plot` method.
See `Periodogram.plot` for details on the accepted arguments.
Parameters
----------
kwargs : dict
Dictionary of arguments ot be passed to `Periodogram.plot`.
Returns
-------
ax : matplotlib.axes._subplots.AxesSubplot
The matplotlib axes object.
"""
ax = super(SNRPeriodogram, self).plot(**kwargs)
if 'ylabel' not in kwargs:
ax.set_ylabel("Signal to Noise Ratio (SNR)")
return ax
``` |
{
"source": "JoseALermaIII/python-tutorials",
"score": 4
} |
#### File: AutomateTheBoringStuff/Ch01/P1_basics.py
```python
def main():
# Expressions
print(2 + 2)
print(2 + 3 * 6)
print((2 + 3) * 6)
print(48565878 * 578453)
print(2 ** 8)
print(23 / 7)
print(23 // 7)
print(23 % 7)
print(2 + 2)
print((5 - 1) * ((7 + 1) / (3 - 1)))
# Uncomment to see what happens
#print(5 + )
#print(42 + 5 + * 2)
# The Integer, Floating-Point, and String Data Types
#print("Hello world!) # Uncomment to see what happens
print("Alice" + "Bob")
#print("Alice" + 42) # Uncomment to see what happens
print("Alice" * 5)
# Uncomment to see what happens
#print("Alice" * "Bob")
#print("Alice" * 5.0)
# Storing Values in Variables
spam = 40
print(spam)
eggs = 2
print(spam + eggs)
print(spam + eggs + spam)
spam = spam + 2
print(spam)
spam = "Hello"
print(spam)
spam = "Goodbye"
print(spam)
# The len() Function
print(len("hello"))
print(len("My very energetic monster just scarfed nachos."))
print(len(''))
#print("I am" + 29 + " years old.") # Uncomment to see what happens
# The str(), int(), and float() Functions
print(str(29))
print("I am " + str(29) + " years old.")
print(str(0))
print(str(-3.14))
print(int("42"))
print(int("-99"))
print(int(1.25))
print(int(1.99))
print(float("3.14"))
print(float(10))
spam = input("Type 101 here: ") # Type 101 when prompted
print(spam)
spam = int(spam)
print(spam)
print(spam * 10 / 5)
# Uncomment to see what happens
#print(int("99.99"))
#print(int("twelve"))
print(int(7.7))
print(int(7.7) + 1)
# If P1_basics.py is run (instead of imported as a module), call
# the main() function:
if __name__ == '__main__':
main()
```
#### File: AutomateTheBoringStuff/Ch02/P09_fiveTimes2.py
```python
def main():
print('My name is')
i = 0
while i < 5:
print('<NAME> Times (' + str(i) + ')')
i = i + 1
# If P09_fiveTimes2.py is run (instead of imported as a module), call
# the main() function:
if __name__ == '__main__':
main()
```
#### File: AutomateTheBoringStuff/Ch03/P09_zeroDivide2.py
```python
def spam(divideBy: int) -> float:
"""Spam
Divides integer 42 by given integer, but also handles a :class:`ZeroDivisionError`.
Args:
divideBy: Integer to divide 42 by.
Returns:
Float result of 42 divided by given integer.
"""
try:
return 42 / divideBy
except ZeroDivisionError:
print('Error: Invalid argument.')
def main():
print(spam(2))
print(spam(12))
print(spam(0))
print(spam(1))
if __name__ == '__main__':
main()
```
#### File: AutomateTheBoringStuff/Ch03/P11_guessTheNumber.py
```python
def main():
import random
secretNumber = random.randint(1, 20)
print('I am thinking of a number between 1 and 20.')
# Ask the player to guess 6 times.
for guessesTaken in range(1, 7):
print('Take a guess.')
guess = int(input())
if guess < secretNumber:
print('Your guess is too low.')
elif guess > secretNumber:
print('Your guess is too high.')
else:
break # This condition is the correct guess!
if guess == secretNumber:
print('Good job! You guessed my number in ' + str(guessesTaken) + ' guesses!')
else:
print('Nope. The number I was thinking of was ' + str(secretNumber))
if __name__ == '__main__':
main()
```
#### File: AutomateTheBoringStuff/Ch04/P3_myPets.py
```python
def main():
myPets = ['Zophie', 'Pooka', 'Fat-tail']
print('Enter a pet name:')
name = input()
if name not in myPets:
print('I do not have a pet named ' + name)
else:
print(name + ' is my pet.')
if __name__ == "__main__":
main()
```
#### File: AutomateTheBoringStuff/Ch09/P4_renameDates.py
```python
def main():
import shutil, os, re
# Create a regex that matches files with the American date format.
datePattern = re.compile(r"""
^(.*?) # all text before the date
((0|1)?\d)- # one or two digits for the month
((0|1|2|3)?\d)- # one or two digits for the day
((19|20)\d\d) # four digits for the year
(.*?)$ # all the text after the date
""", re.VERBOSE)
"""re.compile: Regex object representing American date format MM-DD-YYYY"""
# Loop over the files in the working directory.
for amerFilename in os.listdir('.'):
mo = datePattern.search(amerFilename)
# Skip files without a date.
if mo is None:
continue
# Get the different parts of the filename.
beforePart = mo.group(1)
monthPart = mo.group(2)
dayPart = mo.group(4)
yearPart = mo.group(6)
afterPart = mo.group(8)
# Form the European-style filename.
euroFilename = beforePart + dayPart + '-' + monthPart + '-' + yearPart + afterPart
# Get the full, absolute file paths.
absWorkingDir = os.path.abspath('.')
amerFilename = os.path.join(absWorkingDir, amerFilename)
euroFilename = os.path.join(absWorkingDir, euroFilename)
# Rename the files.
print('Renaming "%s" to "%s"...' % (amerFilename, euroFilename)) # DEBUG
#shutil.move(amerFilename, euroFilename) # uncomment after testing
if __name__ == '__main__':
main()
```
#### File: AutomateTheBoringStuff/Ch10/P4_podBayDoor.py
```python
def main():
podBayDoorStatus = "open"
assert podBayDoorStatus == "open", "The pod bay doors need to be 'open'."
podBayDoorStatus = 'I\'m sorry, Dave. I\'m afraid I can\'t do that.'
assert podBayDoorStatus == "open", "The pod bay doors need to be 'open'."
if __name__ == '__main__':
main()
```
#### File: AutomateTheBoringStuff/Ch11/P2_parseHTML.py
```python
def main():
import requests, bs4
# Creating a BeautifulSoup Object from HTML
res = requests.get("http://nostarch.com")
res.raise_for_status() # Raise error if nothing fetched
noStarchSoup = bs4.BeautifulSoup(res.text, "lxml") # Specify parser to avoid warning
print(type(noStarchSoup))
exampleFile = open("example.html")
exampleSoup = bs4.BeautifulSoup(exampleFile, "lxml")
print(type(exampleSoup))
# Finding an Element with the select() Method
exampleFile = open("example.html")
exampleSoup = bs4.BeautifulSoup(exampleFile.read(), "lxml")
elems = exampleSoup.select("#author")
print(type(elems))
print(len(elems))
print(type(elems[0]))
print(elems[0].getText())
print(str(elems[0]))
print(elems[0].attrs)
pElems = exampleSoup.select('p')
print(str(pElems[0]))
print(pElems[0].getText())
print(str(pElems[1]))
print(pElems[1].getText())
print(str(pElems[2]))
print(pElems[2].getText())
# Getting Data from an Element's Attributes
soup = bs4.BeautifulSoup(open("example.html"), "lxml")
spanElem = soup.select('span')[0]
print(str(spanElem))
print(spanElem.get("id"))
print(spanElem.get("some_nonexistent_addr") is None)
print(spanElem.attrs)
if __name__ == '__main__':
main()
```
#### File: AutomateTheBoringStuff/Ch12/P3_writingExcel.py
```python
def main():
# Creating and Saving Excel Documents
import openpyxl
wb = openpyxl.Workbook()
print(wb.sheetnames)
sheet = wb.active
print(sheet.title)
sheet.title = "Spam Bacon Eggs Sheet"
print(wb.sheetnames)
wb = openpyxl.load_workbook("example.xlsx")
sheet = wb.active
sheet.title = "Spam Spam Spam"
wb.save("example_copy.xlsx")
# Creating and Removing Sheets
wb = openpyxl.Workbook()
print(wb.sheetnames)
wb.create_sheet()
print(wb.sheetnames)
wb.create_sheet(index=0, title="First Sheet")
print(wb.sheetnames)
wb.create_sheet(index=2, title="Middle Sheet")
print(wb.sheetnames)
wb.remove(wb["Middle Sheet"])
wb.remove(wb["Sheet1"])
print(wb.sheetnames)
# Writing Values to Cells
sheet = wb["Sheet"]
sheet["A1"] = "Hello world!"
print(sheet["A1"].value)
if __name__ == '__main__':
main()
```
#### File: AutomateTheBoringStuff/Ch14/P4_readWriteJSON.py
```python
def main():
import json
# Reading JSON with the loads() Function
stringOfJsonData = '{"name": "Zophie", "isCat": true, "miceCaught": 0,' \
'"felineIQ": null}'
jsonDataAsPythonValue = json.loads(stringOfJsonData)
print(jsonDataAsPythonValue)
# Writing JSON with the dumps() Function
pythonValue = {'isCat': True, 'miceCaught': 0, 'name': 'Zophie',
'felineIQ': None}
stringOfJsonData = json.dumps(pythonValue)
print(stringOfJsonData)
if __name__ == '__main__':
main()
```
#### File: AutomateTheBoringStuff/Ch14/P5_quickWeather.py
```python
import json, requests, sys, shelve, datetime
def getWeather(loc: str, apikey: str) -> dict:
"""Get weather
Uses `OpenWeatherMap.org`_ API to get JSON data of given location with given API key. Data is stored as a
:class:`dict` and the current date and time (in UTC) is also stored using :meth:`datetime.datetime.now`.
Args:
loc: Location to get weather data of in ``City,Country Code`` format.
apikey: API key used to interface with `OpenWeatherMap.org`_'s API.
Returns:
Dictionary with weather JSON data and current date time (in UTC) added.
.. _OpenWeatherMap.org:
https://openweathermap.org/api
"""
# Download the JSON data from OpenWeatherMap.org's API.
url = 'http://api.openweathermap.org/data/2.5/forecast?q=%s&APPID=%s' % (loc, apikey)
response = requests.get(url)
response.raise_for_status()
# Load JSON data into a Python variable.
data = json.loads(response.text)
now = datetime.datetime.now(tz=datetime.timezone.utc)
data["savedTime"] = now
return data
def main() -> None:
"""P5_quickWeather.py
Displays given location's 3-day weather information.
Returns:
None. Weather data is printed to terminal and JSON data is stored in a :py:mod:`shelve` shelf, ``weather``.
Note:
To prevent excessive API requests, JSON data is stored in a :py:mod:`shelve` shelf and only redownloaded every
10 minutes. Time is kept track using :meth:`datetime.datetime.now` and :class:`datetime.timedelta`.
"""
# Compute location from command line arguments.
if len(sys.argv) < 2:
print('Usage: P5_quickWeather.py city,country code')
sys.exit()
location = ' '.join(sys.argv[1:])
# Get API Key from file
with open("apikey.txt") as file:
apiKey = file.read()
# Open shelf to read data
weatherShelf = shelve.open("weather")
# Download and save data to shelf
if not list(weatherShelf.keys()): # Shelf empty, download data
weatherShelf["data"] = getWeather(location, apiKey)
else:
# Check for 10 minute interval between API requests
timeNow = datetime.datetime.now(tz=datetime.timezone.utc)
savedTime = weatherShelf["data"]["savedTime"]
timedelta = timeNow - savedTime
interval = datetime.timedelta(minutes=10)
if timedelta < interval:
city = weatherShelf["data"]["city"]
print("RequestError: Need to wait %s minutes. Using saved data for: %s, %s" %
(round((interval - timedelta).total_seconds()/60, 2), city["name"], city["country"]))
else:
weatherShelf["data"] = getWeather(location, apiKey)
# Print weather descriptions
w = weatherShelf["data"]['list']
count = int(weatherShelf["data"]["cnt"])
# Print current weather
print('Current weather in %s:' % location)
print(w[0]['weather'][0]['main'], '-', w[0]['weather'][0]['description'])
currentDate = datetime.datetime.strptime(w[0]["dt_txt"][:10], '%Y-%m-%d')
tomorrowDate = currentDate + datetime.timedelta(days=1)
dayAfterDate = currentDate + datetime.timedelta(days=2)
print()
for i in range(1, count):
currentDate = datetime.datetime.strptime(w[i]["dt_txt"][:10], '%Y-%m-%d')
# If current date is greater than tomorrow date, print tomorrow weather
if currentDate > tomorrowDate:
print('Tomorrow:')
print(w[i]['weather'][0]['main'], '-', w[i]['weather'][0]['description'])
tomorrowDate = currentDate + datetime.timedelta(days=7) # past the 5-day forecast
print()
# If current date is greater than day after date, print day after tomorrow weather
elif currentDate > dayAfterDate:
print('Day after tomorrow:')
print(w[i]['weather'][0]['main'], '-', w[i]['weather'][0]['description'])
break
weatherShelf.close()
if __name__ == '__main__':
main()
```
#### File: AutomateTheBoringStuff/Ch18/P3_mouseInteraction.py
```python
def main():
import pyautogui, time
# Clicking the Mouse
# No delay, so don't blink
pyautogui.click(100, 150, button='left')
pyautogui.click(10, 5)
pyautogui.click(200, 250, button='right')
pyautogui.rightClick(20, 25)
pyautogui.click(300, 350, button='middle')
pyautogui.middleClick(30, 35)
pyautogui.mouseDown() # pushes left button down at current location
pyautogui.mouseUp() # releases left button at current location
pyautogui.doubleClick() # double click left button
# Scrolling the Mouse
pyautogui.scroll(200) # Units vary depending on OS and application
import pyperclip # imports should be at top of file
numbers = ''
for i in range(200):
numbers = numbers + str(i) + '\n'
pyperclip.copy(numbers) # 200 lines of numbers
time.sleep(10) # time to paste the lines in a text editor
pyautogui.scroll(100)
if __name__ == '__main__':
main()
```
#### File: CrackingCodes/Ch03/PracticeQuestions.py
```python
def main():
spam = "Cats"
# 1. If you assign spam = "Cats", what do the following lines print?
print(spam + spam + spam)
print(spam * 3)
# 2. What do the following lines print?
print("Dear Alice, \nHow are you?\nSincerely,\nBob")
print("Hello" + "Hello")
spam = "Four score and seven years is eighty seven years."
# 3. If you assign spam = "Four score and seven years is eighty seven years.",
# what would each of the following lines print?
print(spam[5])
print(spam[-3])
print(spam[0:4] + spam[5])
print(spam[-3:-1])
print(spam[:10])
print(spam[-5:])
print(spam[:])
# 4. Which window displays the >>> prompt, the interactive shell or the file editor?
# Hint: Check page 30
answers = ["interactive shell", "file editor"]
print("The window that displays the >>> prompt is the %s." % answers[-2 + 5 * 7 * 9 * 0])
# 5. What does the following line print?
#print("Hello, world!")
# If PracticeQuestions.py is run (instead of imported as a module), call
# the main() function:
if __name__ == '__main__':
main()
```
#### File: CrackingCodes/Ch06/PracticeQuestion.py
```python
from pythontutorials.books.CrackingCodes.Ch01.caesarHacker import hackCaesar
def main():
ciphertext = ["qeFIP?eGSeECNNS,",
"5coOMXXcoPSZIWoQI,",
"avnl1olyD4l'ylDohww6DhzDjhuDil,",
"z.GM?.cEQc. 70c.7KcKMKHA9AGFK,",
"?MFYp2pPJJUpZSIJWpRdpMFY,",
"ZqH8sl5HtqHTH4s3lyvH5zH5spH4t pHzqHlH3l5K",
"Zfbi,!tif!xpvme!qspcbcmz!fbu!nfA"] # ROFL
for line in ciphertext:
print(hackCaesar(line))
input("Press enter to continue\n")
# If PracticeQuestion.py is run (instead of imported as a module), call
# the main() function:
if __name__ == '__main__':
main()
```
#### File: CrackingCodes/Ch07/helloFunction.py
```python
def hello(name: str) -> None:
"""Prints hello.
Prints hello to given name.
Args:
name: Name to say hello to.
Returns:
Prints hello to given name.
"""
print('Hello, ' + name)
def main():
print('Start.')
hello('Clarice')
print('Call the function again:')
hello('Dr. Lecter')
print('Done.')
if __name__ == '__main__':
main()
```
#### File: Ch07/PracticeQuestions/Question3.py
```python
def main():
print([0, 1, 2, 3, 4][2])
print([[1, 2], [3, 4]][0])
print([[1, 2], [3, 4]][0][1])
print(['hello'][0][1])
print([2, 4, 6, 8, 10][1:3])
print(list('Hello world!')) # Nifty?
print(list(range(10))[2]) # Huh
print(list(range(10))) # Oh, okay (added for clarity)
# If Question3.py is run (instead of imported as a module), call
# the main() function:
if __name__ == '__main__':
main()
```
#### File: Ch07/PracticeQuestions/Question5.py
```python
def main():
word = "Bo"
number = 42
word += "Bo"
print(word)
number -= 2
print(number)
word *= 10
print(word)
number /= 4
print(number)
# If Question5.py is run (instead of imported as a module), call
# the main() function:
if __name__ == '__main__':
main()
```
#### File: CrackingCodes/Ch11/PracticeQuestions.py
```python
def main():
# 1. What does the following code print?
spam = {'name': 'Al'}
print(spam['name'])
# 2. What does this code print?
spam = {'eggs': 'bacon'}
print('bacon' in spam)
# 3. What for loop code would print the values in the following spam
# dictionary?
spam = {'name': 'Zophie', 'species': 'cat', 'age': 8}
for key in spam:
print(spam[key])
# 4. What does the following line print?
print('Hello, world!'.split())
# 5. What will the following code print?
def spam(eggs=42):
print(eggs)
spam()
spam('Hello')
# 6. What percentage of words in this sentence are valid English words?
sentence = "Whether it's flobulllar in the mind to quarfalog the slings and arrows of outrageous guuuuuuuuur."
UPPERLETTERS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
LETTERS_AND_SPACE = UPPERLETTERS + UPPERLETTERS.lower() + ' \t\n'
def loadDictionary():
dictionaryFile = open('dictionary.txt')
englishWords = {}
for word in dictionaryFile.read().split('\n'):
englishWords[word] = None
dictionaryFile.close()
return englishWords
ENGLISH_WORDS = loadDictionary()
# Not in dictionary. Too small?
ENGLISH_WORDS['IN'] = None
ENGLISH_WORDS['TO'] = None
ENGLISH_WORDS['OF'] = None
def getEnglishCount(message):
message = message.upper()
message = removeNonLetters(message)
possibleWords = message.split()
if not possibleWords: # Given [] = False, if possibleWords = [] then if not possibleWords = True
return 0.0 # No words at all, so return 0.0
matches = 0
for word in possibleWords:
if word in ENGLISH_WORDS:
matches += 1
print(matches, possibleWords)
return float(matches) / len(possibleWords)
def removeNonLetters(message):
lettersOnly = []
for symbol in message:
if symbol in LETTERS_AND_SPACE:
lettersOnly.append(symbol)
return ''.join(lettersOnly)
print(getEnglishCount(sentence) * 100)
# If PracticeQuestions.py is run (instead of imported as a module), call
# the main() function:
if __name__ == '__main__':
main()
```
#### File: CrackingCodes/Ch13/cryptomath.py
```python
def gcd(a: int, b: int) -> int:
"""Greatest common divisor
Returns greatest common divisor of given inputs using Euclid's algorithm.
Args:
a: First integer input.
b: Second integer input.
Returns:
Integer representing GCD.
"""
# Return the GCD of a and b using Euclid's algorithm:
while a != 0:
a, b = b % a, a
return b
def findModInverse(a: int, m: int):
"""Modular inverse
Returns modular inverse of given inputs using Euclid's extended algorithm.
Args:
a: First integer input.
m: Second integer input.
Returns:
Modular inverse as an integer if it exists, None otherwise.
"""
# Return the modular inverse of a % m, which is
# the number x such that a * x % m = 1
if gcd(a, m) != 1:
return None # No mod inverse if a & m aren't relatively prime.
# Calculate using the extended Euclidean algorithm:
u1, u2, u3 = 1, 0, a
v1, v2, v3 = 0, 1, m
while v3 != 0:
q = u3 // v3 # Note that // is the integer division operator.
v1, v2, v3, u1, u2, u3 = (u1 - q * v1), (u2 - q * v2), (u3 - q * v3), v1, v2, v3
return u1 % m
```
#### File: CrackingCodes/Ch14/affineKeyTest.py
```python
from pythontutorials.books.CrackingCodes.Ch14.affineCipher import encryptMessage, SYMBOLS
from pythontutorials.books.CrackingCodes.Ch13.cryptomath import gcd
def main():
message = 'Make things as simple as possible, but not simpler.'
for keyA in range(2, 80):
key = keyA * len(SYMBOLS) + 1
if gcd(keyA, len(SYMBOLS)) == 1:
print(keyA, encryptMessage(key, message))
if __name__ == '__main__':
main()
```
#### File: CrackingCodes/Ch20/vigenereDictionaryHacker.py
```python
from pythontutorials.books.CrackingCodes.Ch11.detectEnglish import isEnglish
from pythontutorials.books.CrackingCodes.Ch18.vigenereCipher import decryptMessage
DICTIONARY_FILE = "/home/jose/PycharmProjects/python-tutorials/pythontutorials/books/CrackingCodes/Ch11/dictionary.txt"
def main():
from pyperclip import copy
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage:
print('Copying hacked message to clipboard:')
print(hackedMessage)
copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext: str):
"""Hack Vigenère Dictionary
Brute-forces ciphertext by using every word in the dictionary file as a key. Checks if decrypted message is
English with the :func:`~books.CrackingCodes.Ch11.detectEnglish.isEnglish` module, and prompts user
for confirmation by displaying first 100 characters.
Args:
ciphertext: String containing Vigenère cipher encrypted message.
Returns:
Decrypted message, if confirmed, None otherwise.
"""
fo = open(DICTIONARY_FILE)
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = decryptMessage(word, ciphertext)
if isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
```
#### File: CrackingCodes/Ch22/PracticeQuestions.py
```python
from pythontutorials.books.CrackingCodes.Ch18.vigenereCipher import decryptMessage
def main():
# 1. How many prime numbers are there?
# Hint: Check page 322
message = "Iymdi ah rv urxxeqfi fjdjqv gu gzuqw clunijh." # Encrypted with key "PRIMES"
#print(decryptMessage(blank, blank)) # Fill in the blanks
# 2. What are integers that are not prime called?
# Hint: Check page 323
message = "Vbmggpcw wlvx njr bhv pctqh emi psyzxf czxtrwdxr fhaugrd." # Encrypted with key "NOTCALLEDEVENS"
#print(decryptMessage(blank, blank)) # Fill in the blanks
# 3. What are two algorithms for finding prime numbers?
# Hint: Check page 323
# Encrypted with key "ALGORITHMS"
message = "Tsk hyzxl mdgzxwkpfz gkeo ob kpbz ngov gfv: bkpmd dtbwjqhu, eaegk cw Mkhfgsenseml, hzv Rlhwe-Ubsxwr."
#print(decryptMessage(blank, blank)) # Fill in the blanks
# If PracticeQuestions.py is run (instead of imported as a module), call
# the main() function:
if __name__ == '__main__':
main()
```
#### File: CrackingCodes/Ch22/primeNum.py
```python
import math, random
def isPrimeTrialDiv(num: int) -> bool:
"""Is prime trial division
Uses the `trial division`_ algorithm for testing if a given number is prime.
Args:
num: Integer to determine if prime.
Returns:
True if num is a prime number, otherwise False.
.. _trial division:
https://en.wikipedia.org/wiki/Trial_division
"""
# All numbers less than 2 are not prime:
if num < 2:
return False
# See if num is divisible by any number up to the square root of num:
for i in range(2, int(math.sqrt(num)) + 1):
if num % i == 0:
return False
return True
def primeSieve(sieveSize: int) -> list:
"""Prime sieve
Calculates prime numbers using the `Sieve of Eratosthenes`_ algorithm.
Args:
sieveSize: Largest number to check if prime starting from zero.
Returns:
List containing prime numbers from 0 to given number.
.. _Sieve of Eratosthenes:
https://en.wikipedia.org/wiki/Sieve_of_Eratosthenes
"""
sieve = [True] * sieveSize
sieve[0] = False # Zero and one are not prime numbers.
sieve[1] = False
# Create the sieve:
for i in range(2, int(math.sqrt(sieveSize)) + 1):
pointer = i * 2
while pointer < sieveSize:
sieve[pointer] = False
pointer += i
# Compile the list of primes:
primes = []
for i in range(sieveSize):
if sieve[i] is True:
primes.append(i)
return primes
def rabinMiller(num: int) -> bool:
"""Rabin-Miller primality test
Uses the `Rabin-Miller`_ primality test to check if a given number is prime.
Args:
num: Number to check if prime.
Returns:
True if num is prime, False otherwise.
Note:
* The Rabin-Miller primality test relies on unproven assumptions, therefore it can return false positives when
given a pseudoprime.
.. _Rabin-Miller:
https://en.wikipedia.org/wiki/Miller%E2%80%93Rabin_primality_test
"""
if num % 2 == 0 or num < 2:
return False # Rabin-Miller doesn't work on even integers.
if num == 3:
return True
s = num - 1
t = 0
while s % 2 == 0:
# Keep halving s until it is odd (and use t
# to count how many times we halve s):
s = s // 2
t += 1
for trials in range(5): # Try to falsify num's primality 5 times.
a = random.randrange(2, num - 1)
v = pow(a, s, num)
if v != 1: # This test does not apply if v is 1.
i = 0
while v != (num - 1):
if i == t - 1:
return False
else:
i = i + 1
v = (v ** 2) % num
return True
# Most of the time we can quickly determine if num is not prime
# by dividing by the first few dozen prime numbers. This is quicker
# than rabinMiller(), but does not detect all composites.
LOW_PRIMES = primeSieve(100)
def isPrime(num: int) -> bool:
"""Is prime
This function checks divisibility by LOW_PRIMES before calling
:func:`~books.CrackingCodes.Ch22.primeNum.rabinMiller`.
Args:
num: Integer to check if prime.
Returns:
True if num is prime, False otherwise.
Note:
* If a number is divisible by a low prime number, it is not prime.
"""
if num < 2:
return False # 0, 1, and negative numbers are not prime.
if num in LOW_PRIMES:
return True # Low prime numbers are still prime numbers
# See if any of the low prime numbers can divide num:
for prime in LOW_PRIMES:
if num % prime == 0:
return False
# If all else fails, call rabinMiller() to determine if num is a prime:
return rabinMiller(num)
def generateLargePrime(keysize: int=1024) -> int:
"""Generate large prime number
Generates random numbers of given bit size until one is prime.
Args:
keysize: Number of bits prime number should be.
Returns:
Random prime number that is keysize bits in size.
Note:
* keysize defaults to 1024 bits.
"""
while True:
num = random.randrange(2**(keysize-1), 2**keysize)
if isPrime(num):
return num
```
#### File: CrackingCodes/Ch23/PracticeQuestions.py
```python
def main():
# 1. What is the difference between a symmetric cipher and an asymmetric
# cipher?
# Hint: Check page 336
message = ".noitpyrced dna noitpyrcne rof yek emas eht esu taht srehpiC :cirtemmyS"
message2 = ".noitpyrced rof rehtona dna noitpyrcne rof yek eno esu taht srehpiC :cirtemmysA"
#print(blank[::-1]) # Fill in the blank
#print(blank[::-1])
# 2. Alice generates a public key and a private key. Unfortunately, she later
# loses her private key.
# a. Will other people be able to send her encrypted messages?
# b. Will she be able to decrypt messages previously sent to her?
# c. Will she be able to digitally sign documents?
# d. Will other people be able to verify her previously signed documents?
# Hint: Check pages 336 and 338 - 339
yesno = ["Yes", "No"]
print("a.: %s" % yesno[8 * 0 + 4 * 5 * 0])
print("b.: %s" % yesno[3 + 7 - 6 - 3])
print("c.: %s" % yesno[10 * 10 // 50 - 1])
print("d.: %s" % yesno[100 // 25 + 6 - 5 * 2])
# 3. What are authentication and confidentiality? How are they different?
# Hint: Check page 338
# Don't do this - imports should be at the top of the file
import pythontutorials.books.CrackingCodes.Ch01.caesarCipher
message = "L65spy5tnl5tzy:H13zzqH5sl5H8szH0z6'3pHnzxx6ytnl5tyrH8t5sHt4H8szH5sp0H4l0H5sp0Hl3pK" # Key 11
message2 = "O1zrupqz6umxu6 :Iwqq2uzsI6tqIyq55msqImI5qo4q6L" # Key 12
diff = "X99Tz6?52ABT6 TC52Ty!!8T?A!E612Tz! 3612 C6x96CH,TyDCTxDC52 C6zxC6! T6BT3A2.D2 C9HTyxB21T! TF5!T5xBTC52TA645CT82HW" # Key 23
#print(books.CrackingCodes.Ch01.caesarCipher.decryptMessage(blank, blank)) # Fill in the blanks
#print(books.CrackingCodes.Ch01.caesarCipher.decryptMessage(blank, blank))
#print(books.CrackingCodes.Ch01.caesarCipher.decryptMessage(blank, blank))
# 4. What is non-repudiation?
# Hint: Check page 339
# Don't do this - imports should be at the top of the file
import pythontutorials.books.CrackingCodes.Ch20.vigenereDictionaryHacker
message = "Klt axirtvhrv xwuw aofmcav awi kis tchufvtx d uelaotv adh'w je tjzr ks syqg anbvbimca wpam usfjevy db a eihri xxgh."
#print(books.CrackingCodes.Ch20.vigenereDictionaryHacker.hackVigenereDictionary(blank)) # Fill in the blank
# If PracticeQuestions.py is run (instead of imported as a module), call
# the main() function:
if __name__ == '__main__':
main()
```
#### File: wikibook/Chapter 08/C8-Exercise1.py
```python
def square(L):
return L* L
def rectangle(width , height):
return width * height
def circle(redius):
return 3.14159 * radius ** 2
def options():
print()
print("Options:")
print("s = calculate the area of a square.")
print("c = calculate the area of a circle.")
print("r = calculate the area of a rectangle.")
print("q = quit")
print()
print("This program will calculate the area of a square, circle or rectangle.")
choice = "x"
options()
while choice != "q":
choice = input("Please enter your choice:")
if choice == "s":
L = float(input("Length of square side: "))
print("The area of this square is", square(L))
options()
elif choice == "c":
radius = float(input("Radius of the circle: "))
print("The area of the circle is", circle(radius))
options()
elif choice = "r":
width = float(input("Width of the rectangle: "))
height = float(input("Height of the rectangle: "))
print("The area of the rectangle is", rectangle(width, height))
options()
elif choice == "q":
print(" ", end="")
else:
print("Unrecognized option.")
options()
```
#### File: wikibook/Chapter 16/C16-1_shout.py
```python
def shout(string):
for character in string:
print("Gimme a " + character)
print("'" + character + "'")
shout("Lose")
def middle(string):
print("The middle character is:", string[len(string) // 2])
middle("abcdefg")
middle("The Python Programming Language")
middle("Atlanta")
```
#### File: CS101/Lesson 05 - How to Repeat/Q21-Biggest.py
```python
def biggest (n1, n2, n3):
if n1 > n2:
if n1 > n3:
return n1
else:
return n3
if n2 > n3:
return n2
return n3
print biggest(3, 6, 9)
#>>> 9
print biggest(6, 9, 3)
#>>> 9
print biggest(9, 3, 6)
#>>> 9
print biggest(3, 3, 9)
#>>> 9
print biggest(9, 3, 9)
#>>> 9
```
#### File: CS101/Lesson 11 - How to Manage Data/Q26-Find Element.py
```python
def find_element(in_list, find):
for element in in_list:
if element == find:
return in_list.index(element)
return -1
print find_element([1,2,3],3)
#>>> 2
print find_element(['alpha','beta'],'gamma')
#>>> -1
```
#### File: CS101/Lesson 13 - Problem Set Optional/Q3-Mean of a List.py
```python
def list_mean(list_):
sum_ = 0
if not list_:
raise TypeError
for element in list_:
sum_ += element
return float(sum_) / len(list_)
print list_mean([1,2,3,4])
#>>> 2.5
print list_mean([1,3,4,5,2])
#>>> 3.0
print list_mean([])
#>>> ??? You decide. If you decide it should give an error, comment
# out the print line above to prevent your code from being graded as
# incorrect.
print list_mean([2])
#>>> 2.0
```
#### File: CS101/Lesson 14 - Problem Set Optional 2/Q4-Frequency Analysis.py
```python
def freq_analysis(message):
alphabet = [chr(i) for i in range(ord('a'), ord('z') + 1)]
freq_list = []
for letter in alphabet:
# DEBUG: variables and lists
# NOTE: Divide by zero exception during debug cycle. Move to end of for loop?
#print("""\n message: {} freq_list: {}
#\n letter: {} count: {}
#\n len: {} frequency: {}""".format(message, freq_list, letter, message.count(letter),
# len(message), len(message) / message.count(letter)))
if message.count(letter) == 0:
freq_list.append(0.0)
else:
freq_list.append(message.count(letter) / float(len(message)))
return freq_list
#Tests
print freq_analysis("abcd")
#>>> [0.25, 0.25, 0.25, 0.25, 0.0, ..., 0.0]
print freq_analysis("adca")
#>>> [0.5, 0.0, 0.25, 0.25, 0.0, ..., 0.0]
print freq_analysis('bewarethebunnies')
#>>> [0.0625, 0.125, 0.0, 0.0, ..., 0.0]
```
#### File: CS101/Lesson 27 - Cumulative Practice Problems/Q4-Remove Tags.py
```python
def remove_tags(string):
startpos = string.find('<')
while startpos != -1: # startpos is -1 if not found
endpos = string.find('>', startpos) # start endpos search from startpos
string = string[:startpos] + ' ' + string[endpos + 1:]
startpos = string.find('<')
return string.split()
print remove_tags('''<h1>Title</h1><p>This is a
<a href="http://www.udacity.com">link</a>.<p>''')
# >>> ['Title','This','is','a','link','.']
print remove_tags('''<table cellpadding='3'>
<tr><td>Hello</td><td>World!</td></tr>
</table>''')
# >>> ['Hello','World!']
print remove_tags("<hello><goodbye>")
# >>> []
print remove_tags("This is plain text.")
# >>> ['This', 'is', 'plain', 'text.']
```
#### File: CS101/Lesson 28 - Challenging Practice Problems/Q3-Elementary Cellular Automaton.py
```python
def cellular_automaton(string, pattern, n):
rules = make_rules(pattern)
inlist = list(string)
tmplist = []
maxlength = len(inlist) - 1
#print rules # DEBUG
#print inlist # DEBUG
while n > 0:
for index in range(0, len(inlist)):
#print index # DEBUG
testslice = []
if len(inlist) == 1:
testslice = inlist[index] * 3
elif index == 0:
testslice = inlist[maxlength] + inlist[index] + inlist[index + 1]
elif index == maxlength:
testslice = inlist[index - 1] + inlist[index] + inlist[0]
else:
testslice = inlist[index - 1] + inlist[index] + inlist[index + 1]
#print testslice # DEBUG
tmplist.append(rules[''.join(testslice)])
#print tmplist # DEBUG
inlist = tmplist
tmplist = []
n -= 1
return ''.join(inlist)
def make_rules(patternnum):
patternvalues = [[128, 'xxx'], [64, 'xx.'], [32, 'x.x'],
[16, 'x..'], [8, '.xx'], [4, '.x.'],
[2, '..x'], [1, '...']]
if patternnum == 0:
return {'xxx': '.', 'xx.': '.', 'x.x': '.',
'x..': '.', '.xx': '.', '.x.': '.',
'..x': '.', '...': '.'}
rules = {}
while patternnum > 0:
for patternvalue in patternvalues:
if patternvalue[0] > patternnum:
rules[patternvalue[1]] = '.'
else:
rules[patternvalue[1]] = 'x'
patternnum -= patternvalue[0]
return rules
def test_make_rules():
patternvalues = ['xxx', 'xx.', 'x.x', 'x..', '.xx', '.x.',
'..x', '...']
for pattern in range(0, 256):
rules = make_rules(pattern)
for patternvalue in patternvalues:
# First, check if all patterns in rules
if patternvalue not in rules and pattern != 0:
print "Pattern number " + str(pattern) + " missing rule " + patternvalue
return False
# Next, check a few patterns
if pattern == 0:
correctrules = {'...': '.', 'x.x': '.', 'xxx': '.',
'.xx': '.', '..x': '.', '.x.': '.', 'xx.': '.', 'x..': '.'}
if rules [patternvalue] != correctrules[patternvalue]:
print "Pattern number 0 mismatched: pattern " + patternvalue + " incorrect"
return False
if pattern == 128:
correctrules = {'...': '.', 'x.x': '.', 'xxx': 'x',
'.xx': '.', '..x': '.', '.x.': '.', 'xx.': '.', 'x..': '.'}
if rules[patternvalue] != correctrules[patternvalue]:
print "Pattern number 128 mismatched: pattern " + patternvalue + " incorrect"
return False
if pattern == 256:
correctrules = {'...': 'x', 'x.x': '.', 'xxx': '.',
'.xx': '.', '..x': '.', '.x.': '.', 'xx.': '.', 'x..': '.'}
if rules[patternvalue] != correctrules[patternvalue]:
print "Pattern number 255 mismatched: pattern " + patternvalue + " incorrect"
return False
return True
# print test_make_rules()
print cellular_automaton('.x.x.x.x.', 17, 2)
# >>> xxxxxxx..
print cellular_automaton('.x.x.x.x.', 249, 3)
# >>> .x..x.x.x
print cellular_automaton('...x....', 125, 1)
# >>> xx.xxxxx
print cellular_automaton('...x....', 125, 2)
# >>> .xxx....
print cellular_automaton('...x....', 125, 3)
# >>> .x.xxxxx
print cellular_automaton('...x....', 125, 4)
# >>> xxxx...x
print cellular_automaton('...x....', 125, 5)
# >>> ...xxx.x
print cellular_automaton('...x....', 125, 6)
# >>> xx.x.xxx
print cellular_automaton('...x....', 125, 7)
# >>> .xxxxx..
print cellular_automaton('...x....', 125, 8)
# >>> .x...xxx
print cellular_automaton('...x....', 125, 9)
# >>> xxxx.x.x
print cellular_automaton('...x....', 125, 10)
# >>> ...xxxxx
print cellular_automaton('.', 21, 1)
# >>> x
print cellular_automaton('.', 21, 2)
# >>> .
``` |
{
"source": "JoseALermaIII/webbot",
"score": 2
} |
#### File: JoseALermaIII/webbot/webbot.py
```python
from subprocess import Popen, PIPE
'''
Todo:
Webserver to serve RSS file
-> Custom frontend w/ JS parser
-> DOM parser?
Threads for webserver and bot
Generate JSON alongside RSS
Logging?
Make into custom website for moderating remotely?!
Config file
Always:
Clean up code
'''
def main():
# Run bot
Popen(['./venv/bin/python3', 'bot.py'])
# Run server
Popen(['./venv/bin/python3', 'webserver.py'], stdout=PIPE).communicate()
if __name__ == '__main__':
main()
``` |
{
"source": "josealfredo189/3D-Test-",
"score": 3
} |
#### File: josealfredo189/3D-Test-/Test_3d.py
```python
import numpy as np
import matplotlib.pyplot as plt
#---Coordenadas generales
xg=[]
yg=[]
zg=[]
#----Centro
xc = 80
yc = 40
zc = 40
#----Coordinates locales
x=[40, 30, 80, 0]
y=[60, 10, 60, 0]
z=[ 0, 0, 0, 0]
#Plotea las lineas del triangulo
def plotPlaneLine(xg,yg,zg):
plt.axis([80,250,120,20])
plt.grid()
#Plotea el plano: el triangulo
plt.plot([xg[0],xg[1]],[yg[0],yg[1]],color='k')
plt.plot([xg[1],xg[2]],[yg[1],yg[2]],color='k')
plt.plot([xg[2],xg[0]],[yg[2],yg[0]],color='k')
#plotea el punto
plt.scatter(xg[3],yg[3],s=20,color='r')
#----interseccion de los triangulos
plt.plot([xg[0],xg[3]],[yg[0],yg[3]],color='r',linestyle=':')
plt.plot([xg[1],xg[3]],[yg[1],yg[3]],color='r',linestyle=':')
plt.plot([xg[2],xg[3]],[yg[2],yg[3]],color='r',linestyle=':')
plt.gca().set_aspect('equal')#iguala la grafica
plt.show()#muestra la grafica
def hitPoint(x,y,z):
#Distancia del punto 0 al 1
a = x[1]-x[0]
b = y[1]-y[0]
c = z[1]-z[0]
Q01 = np.sqrt(a*a+b*b+c*c)
#Distancia del punto 1 al 2
a = x[2]-x[1]
b = y[2]-y[1]
c = z[2]-z[1]
Q12 = np.sqrt(a*a+b*b+c*c)
#Distancia del punto 0 al 2
a = x[2]-x[0]
b = y[2]-y[0]
c = z[2]-z[0]
Q02 = np.sqrt(a*a+b*b+c*c)
#Distancia del punto 1 al 3
a = x[3]-x[1]
b = y[3]-y[1]
c = z[3]-z[1]
Q13 = np.sqrt(a*a+b*b+c*c)
#Distancia del punto 2 al 3
a = x[2]-x[3]
b = y[2]-y[3]
c = z[2]-z[3]
Q23 = np.sqrt(a*a+b*b+c*c)
#Distancia del punto 0 al 3
a = x[0]-x[3]
b = y[0]-y[3]
c = z[0]-z[3]
Q03 = np.sqrt(a*a+b*b+c*c)
#Calcula el area del triangulo A
#con la formula s=(a+b+c)/2 A=raiz(s(s-a)(s-b)(s-c))
s = (Q01+Q12+Q02)/2
A = np.sqrt(s * (s-Q01) * (s-Q12) * (s*Q02))
#Calcula el area del triangulo A1
s1 = (Q01 + Q03 + Q13) /2
A1 = np.sqrt(s1*(s1-Q01) * (s1-Q03) * (s1-Q13))
#Calcula el area del triangulo A2
s2 = (Q02 + Q23 + Q03) /2
A2 = np.sqrt(s2 * (s2-Q02) * (s2-Q23) * (s1-Q03))
#Regresa el area de los triangulos
return A,A1,A2
#Plotea el triangulo y manda mensaje
def plotSquareLinex(xc,yc,zc):
[A,A1,A2]=hitPoint(x,y,z)
print('A=',A)
print('A1=',A1)
print('A2=',A2)
print ('A1+A2',(A1+A2))
if((A1+A2)>A):
plt.text(150,60,'El HIT PONT esta fuera de los limites')
plt.text(175,45,'A1+A2=',color='orange')
plt.text(200,45,(A1+A2),color='orange')
elif((A1+A2)<A):
plt.text(150,60,'El HIT PONT esta dentro del limite')
plt.text(175,45,'A1+A2=',color='r')
plt.text(200,45,(A1+A2),color='r')
plt.text(175,80,'A=',color='b')
plt.text(185,80,A,color='b')
plt.text(175,85,'A1=',color='purple')
plt.text(187,85,A1,color='purple')
plt.text(175,90,'A2=',color='k')
plt.text(187,90,A2,color='k')
plotPlaneLine(xg,yg,zg)
#Pide el hitpoint x, y o el numeoro de control
while True:
enterx=input('donde esta el histpoint en "x"? o Ingrese se numero de control para salir (18390045)')
if enterx=='18390045':
break
else:
x[3]=int(enterx)
entery=input('donde esta el histpoint en "y"? o Ingrese se numero de control para salir (18390045)')
if entery=='18390045':
break
else:
y[3]=int(entery)
for i in range(len(x)):
xg.append(x[i]+xc)
yg.append(y[i]+yc)
zg.append(z[i]+zc)
plotSquareLinex(xc,yc,zc)
#Limpia las variables generales
j=0
while j< 4:
xg.remove(xg[0])
yg.remove(yg[0])
zg.remove(zg[0])
j+=1
``` |
{
"source": "josealobato/go-over",
"score": 2
} |
#### File: goodreads/complement_file/test_comp_file_modification_effect.py
```python
from typing import Dict
import json
import pytest
from ...func_test_tools import load_result, load_result_from_path
from ...constants import *
# Under test
from go_over.goodreads.processor import process
# Test
def test_comp_file_modification_effect_on_title(csv_one_book, complemetary_data_modified, results_path):
""" The output use the title in the complementary file """
# GIVEN a soucedata and a complementary file that differ on the title.
# WHEN we process the data.
process(csv_one_book, complemetary_data_modified, results_path, {"verbose": False})
# THEN the result files uses the complementary data title.
results = load_result("books_read_2022.json", results_path)
book = results['books'][0]
assert book["title"] == "A new title"
def test_comp_file_modification_effect_on_language(csv_one_book, complemetary_data_modified, results_path):
""" The output use the language in the complementary file """
# GIVEN a soucedata and a complementary file that differ on the language.
# WHEN we process the data.
process(csv_one_book, complemetary_data_modified, results_path, {"verbose": False})
# THEN the result files uses the complementary data language.
results = load_result("books_read_2022.json", results_path)
book = results['books'][0]
assert book["language"] == "JP"
def test_comp_file_modification_effect_on_tags(csv_one_book, complemetary_data_modified, results_path):
""" The output use the tags in the complementary file """
# GIVEN a soucedata and a complementary file that differ on the tags.
# WHEN we process the data.
process(csv_one_book, complemetary_data_modified, results_path, {"verbose": False})
# THEN the result files uses the complementary data tags.
results = load_result("books_read_2022.json", results_path)
book = results['books'][0]
assert book["tags"] == ["ai","be"]
def test_comp_file_modification_effect_on_format(csv_one_book, complemetary_data_modified, results_path):
""" The output use the format in the complementary file """
# GIVEN a soucedata and a complementary file that differ on the format.
# WHEN we process the data.
process(csv_one_book, complemetary_data_modified, results_path, {"verbose": False})
# THEN the result files uses the complementary data format.
results = load_result("books_read_2022.json", results_path)
book = results['books'][0]
assert book["format"] == "flimsybook"
def test_comp_file_modification_effect_on_review(csv_one_book, complemetary_data_modified, results_path):
""" The output use the review in the complementary file """
# GIVEN a soucedata and a complementary file that differ on the review.
# WHEN we process the data.
process(csv_one_book, complemetary_data_modified, results_path, {"verbose": False})
# THEN the result files uses the complementary data review.
results = load_result("books_read_2022.json", results_path)
book = results['books'][0]
assert book["my_review_url"] == "/a_review/"
```
#### File: func/goodreads/test_one_book_stats.py
```python
from typing import Dict
import pytest
from ..func_test_tools import load_result
# Under test
from go_over.goodreads.processor import process
# Tests
def test_process_one_book_stats(csv_one_book, json_empty, results_path):
""" Process and generate data for one book and test the stats. """
# GIVEN a CVS file with just one book and empty complement file.
source_file = csv_one_book
complement_file = json_empty
results_folder = results_path
# WHEN we process that file.
process(source_file, complement_file, results_folder, {"verbose": True})
# THEN the stats should be attached to the results
results = load_result("books_read_2022.json", results_folder)
stats = results['stats']
assert stats["read"] == 1
assert len(stats["languages"]) == 1
assert stats["languages"]['EN'] == 1
```
#### File: goodreads/to_read/test_to_read_export.py
```python
from typing import Dict
import json
import pytest
from ...func_test_tools import load_result, load_result_from_path, exist_in_path
from ...constants import *
# Fixtures
@pytest.fixture(scope='function', name="csv_one_read_book")
def csv_one_read_book_file(tmpdir_factory):
""" Create a CSV file with just one book. """
csv_text = '''Book Id,Title,Author,Author l-f,Additional Authors,ISBN,ISBN13,My Rating,Average Rating,Publisher,Binding,Number of Pages,Year Published,Original Publication Year,Date Read,Date Added,Bookshelves,Bookshelves with positions,Exclusive Shelf,My Review,Spoiler,Private Notes,Read Count,Recommended For,Recommended By,Owned Copies,Original Purchase Date,Original Purchase Location,Condition,Condition Description,BCID
35755822,Building Evolutionary Architectures: Support Constant Change,Neal Ford,"Ford, Neal","<NAME>, <NAME>","=""1491986360""","=""9781491986363""",0,3.76,O'Reilly Media,Paperback,190,2017,,,2021/12/16,,,read,,,,0,,,0,,,,,'''
file = tmpdir_factory.mktemp(SOURCE_DATA_PATH).join(CSV_FILE_NAME)
with file.open('w') as f:
f.write(csv_text)
return file
@pytest.fixture(scope='function', name="csv_one_to_read_book")
def csv_one_to_read_book_file(tmpdir_factory):
""" Create a CSV file with just one book. """
csv_text = '''Book Id,Title,Author,Author l-f,Additional Authors,ISBN,ISBN13,My Rating,Average Rating,Publisher,Binding,Number of Pages,Year Published,Original Publication Year,Date Read,Date Added,Bookshelves,Bookshelves with positions,Exclusive Shelf,My Review,Spoiler,Private Notes,Read Count,Recommended For,Recommended By,Owned Copies,Original Purchase Date,Original Purchase Location,Condition,Condition Description,BCID
35755822,Building Evolutionary Architectures: Support Constant Change,Neal Ford,"<NAME>","<NAME>, <NAME>","=""1491986360""","=""9781491986363""",0,3.76,O'Reilly Media,Paperback,190,2017,,,2021/12/16,to-read,to-read (#12),to-read,,,,0,,,0,,,,,'''
file = tmpdir_factory.mktemp(SOURCE_DATA_PATH).join(CSV_FILE_NAME)
with file.open('w') as f:
f.write(csv_text)
return file
@pytest.fixture(scope='function', name="csv_to_read_books_unsorted")
def csv_to_read_books_unsorted_file(tmpdir_factory):
""" Create a CSV file with just one book. """
csv_text = '''Book Id,Title,Author,Author l-f,Additional Authors,ISBN,ISBN13,My Rating,Average Rating,Publisher,Binding,Number of Pages,Year Published,Original Publication Year,Date Read,Date Added,Bookshelves,Bookshelves with positions,Exclusive Shelf,My Review,Spoiler,Private Notes,Read Count,Recommended For,Recommended By,Owned Copies,Original Purchase Date,Original Purchase Location,Condition,Condition Description,BCID
36398423,"Sarah (La Brigade des Cauchemars, #1)",Franck Thilliez,"<NAME>","<NAME>, DRAC","=""""","=""9782822221603""",4,3.81,Jungle,Hardcover,56,2017,2017,2021/10/16,2021/06/20,,,read,,,,1,,,0,,,,,
59207618,A Radical Enterprise: Pioneering the Future of High-Performing Organizations,<NAME>,"Parker, <NAME>",,"=""1950508021""","=""9781950508020""",0,4.00,It Revolution Press,ebook,192,2022,,,2022/02/18,to-read,to-read (#18),to-read,,,,0,,,0,,,,,
4268826,"Growing Object-Oriented Software, Guided by Tests",Steve Freeman,"<NAME>",<NAME>,"=""0321503627""","=""9780321503626""",0,4.19,Addison-Wesley Professional,Paperback,345,2009,2009,,2022/02/18,to-read,to-read (#17),to-read,,,,0,,,0,,,,,
21343,The Five Dysfunctions of a Team: A Leadership Fable,<NAME>,"<NAME>",,"=""0787960756""","=""9780787960759""",0,4.08,Jossey-Bass,Hardcover,227,2002,2002,,2022/01/13,to-read,to-read (#8),to-read,,,,0,,,0,,,,,
35755822,Building Evolutionary Architectures: Support Constant Change,Neal Ford,"Ford, Neal","<NAME>, <NAME>","=""1491986360""","=""9781491986363""",0,3.76,O'Reilly Media,Paperback,190,2017,,,2021/12/16,to-read,to-read (#12),to-read,,,,0,,,0,,,,,'''
file = tmpdir_factory.mktemp(SOURCE_DATA_PATH).join(CSV_FILE_NAME)
with file.open('w') as f:
f.write(csv_text)
return file
# Under test
from go_over.goodreads.processor import process
# Test
def test_not_to_read(csv_one_read_book, json_none, results_path):
""" No to read file is created if there is not book to read. """
# GIVEN a CSV source with not books to read
# WHEN processing.
process(csv_one_read_book, json_none, results_path, {})
# THEN not "to read" file will be generated.
assert not exist_in_path("books_to_read.json", results_path)
def test_one_to_read(csv_one_to_read_book, json_none, results_path):
""" No want to read file is created if there is not book to read. """
# GIVEN a CSV source with not books to read
# WHEN processing.
process(csv_one_to_read_book, json_none, results_path, {})
# THEN not "want to read" file will be generated.
assert exist_in_path("books_to_read.json", results_path)
results = load_result("books_to_read.json", results_path)
book = results['books'][0]
assert book["id"] == '35755822'
def test_to_read_sort(csv_to_read_books_unsorted, json_none, results_path):
""" to read books are sorted by possition. """
# GIVEN a CSV source with 4 books to read
# WHEN processing.
process(csv_to_read_books_unsorted, json_none, results_path, {})
# THEN The to read books will be sorted by position.
results = load_result("books_to_read.json", results_path)
books = results['books']
assert books[0]["id"] == '21343'
assert books[1]["id"] == '35755822'
assert books[2]["id"] == '4268826'
assert books[3]["id"] == '59207618'
def test_to_read_does_not_include_read(csv_to_read_books_unsorted, json_none, results_path):
""" to read books contains only to read books. """
# GIVEN a CSV source with 4 books to read and 1 read
# WHEN processing.
process(csv_to_read_books_unsorted, json_none, results_path, {})
# THEN only to read books are listed in the to read file.
results = load_result("books_to_read.json", results_path)
books = results['books']
assert len(books) == 4
```
#### File: tests/unit/test_book_position.py
```python
from datetime import datetime
import pytest
from go_over.goodreads import Book, BookRead, Bookshelf
# pylint: disable=C0301
# Line too long
BOOKS = [
{"Book Id": "00", "Title": "Book 0", "Author": "Cervantes", "Exclusive Shelf": "read", "Date Read": "", "My Rating": "3","Bookshelves with positions": "to-read (#16)"},
{"Book Id": "01", "Title": "Book 0", "Author": "Cervantes", "Exclusive Shelf": "read", "Date Read": "2019/11/23", "My Rating": "3", "Bookshelves with positions": "to-read (#1)"},
{"Book Id": "10", "Title": "Book 2", "Author": "Cervantes", "Exclusive Shelf": "read", "Date Read": "2020/11/23", "My Rating": "3"},
]
# Last read
def test_book_with_position_on_known_bookshelf():
book = Book(**BOOKS[0])
assert book.to_read_position == 16
def test_book_with_position_on_unknown_bookshelf():
book = Book(**BOOKS[1])
assert book.to_read_position == 1
def test_book_without_position():
book = Book(**BOOKS[2])
assert book.to_read_position == 0
```
#### File: tests/unit/test_shelf_all_reads.py
```python
import pytest
from go_over.goodreads import Book, BookRead, Bookshelf
# pylint: disable=C0301
# Line too long
BOOKS_MIX = [
# unread
{"Book Id": "00", "Title": "Book 0", "Author": "Cervantes", "Exclusive Shelf": "currently-reading", "Date Read": "", "My Rating": "3"},
{"Book Id": "01", "Title": "Book 0", "Author": "Cervantes", "Exclusive Shelf": "read", "Date Read": "", "My Rating": "3"},
{"Book Id": "02", "Title": "Book 0", "Author": "Cervantes", "Exclusive Shelf": "read", "Date Read": "", "My Rating": "3"}
]
# Read books a year
<EMAIL>()
def test_getting_all_reads():
# In a shelf with multiple books with multiple reads get them all.
# Prepare
shelf = Bookshelf()
shelf.books = [Book(**b) for b in BOOKS_MIX]
shelf.books[1].add_reads(["2019/01/01", "2020/01/01"])
shelf.books[2].add_reads(["2018/01/01"])
# Execute
result = shelf.all_reads
# Verify
assert len(result) == 3
print(result)
for r in result:
assert isinstance(r, BookRead)
``` |
{
"source": "JoseAltamir1/AtlassianAPI",
"score": 2
} |
#### File: src/utils/Utilities.py
```python
import ConfigParser
def singleton(cls):
instances = {}
def getInstance(*args, **kwargs):
if cls not in instances:
instances[cls] = cls(*args, **kwargs)
return instances[cls]
def loadIni():
config = ConfigParser.ConfigParser()
config.read("res\\AtlassianConfig.ini")
return config
``` |
{
"source": "josealvarez97/Scientific-Computing",
"score": 4
} |
#### File: Scientific-Computing/Computing integrals/test_trapezoidal.py
```python
from trapezoidal import trapezoidal
def test_trapezoidal_one_exact_result():
"""Compare one hand-computed result."""
from math import exp
v = lambda t: 3*(t**2)*exp(t**3)
n = 2
computed = trapezoidal(v, 0, 1, n)
expected = 2.463642041244344
error = abs(expected - computed)
tol = 1E-14
success = error < tol
msg = 'error=%g > tol=%g' % (error, tol)
assert success, msg
def test_trapezoidal_linear():
"""Check that linear functions are integrated exactly."""
f = lambda x: 6*x - 4
F = lambda x: 3*x**2 - 4*x # Anti-derivative
a = 1.2; b = 4.4
expected = F(b) - F(a)
tol = 1E-14
for n in 2, 20, 21:
computed = trapezoidal(f, a, b, n)
error = abs(expected - computed)
success = error < tol # Should be the exact same but we need a tolerance because computers are not perfect
msg = 'n=%d, err=%g' % (n, error)
assert success, msg
test_trapezoidal_one_exact_result()
test_trapezoidal_linear()
```
#### File: diffeq-cloud-functions/numerical-integration/trapezoidal.py
```python
def trapezoidal(f, a, b, n):
h = float(b-a)/n
result = 0.5*f(a) + 0.5*f(b)
for i in range(1, n):
result += f(a + i*h)
result *= h
return result
def application():
from math import exp
v = lambda t: 3*(t**2)*exp(t**3)
n = int(input('n: '))
numerical = trapezoidal(v, 0, 1, n)
# Compare with exact result
V = lambda t: exp(t**3)
exact = V(1) - V(0)
error = exact - numerical
print('n=%d: %.16f, error: %g' % (n, numerical, error))
if __name__ == '__main__':
application()
```
#### File: diffeq-cloud-functions/pde-heat-equation/main.py
```python
from flask import send_file
from heat_equation import HeatEquationSolver
from zipfile import ZipFile
import os
from os.path import basename
from io import BytesIO
from math import *
def parse_parameter(request, param):
request_json = request.get_json()
print("param", param)
print("request json", request_json)
if request.args and param in request.args:
return request.arg.get(param)
elif request_json and param in request_json:
return request_json[param]
else:
raise ValueError(f"Body is invalid, or missing '{param}' property")
def parse_function_parameter(request, param):
print("parse_function_parameter")
request_json = request.get_json()
if request.args and param in request.args:
exec(param + "=" + request.args.get(param), globals())
# f = locals()['f']
elif request_json and param in request_json:
exec(param + "="+ request_json[param], globals())
# f = locals()['f']
else:
raise ValueError(f"Body is invalid, or missing '{param}' property")
print("success with parse_function_parameter")
return globals()[param]
def heat_equation(request):
# Parse parameters
request_json = request.get_json()
heat_x_0 = parse_parameter(request, 'heat_x_0')
heat_x_max = parse_parameter(request, 'heat_x_max')
alpha = parse_parameter(request, 'alpha')
Nx = parse_parameter(request, 'Nx')
x_max = parse_parameter(request, 'x_max')
Mt = parse_parameter(request, 'Mt')
t_max = parse_parameter(request, 't_max')
f_0 = parse_function_parameter(request, 'f_0')
# L = 1
# f_0 = lambda x: sin(pi*x/L)
# phi_0 = phi_L = 0
# alpha=0.1
# Run simulation
solver = HeatEquationSolver(f_0=f_0,
heat_x_0=heat_x_0,
heat_x_max=heat_x_max,
alpha=alpha,
Nx=Nx, x_max=x_max,
Mt=Mt, t_max=t_max,
save_plots=True)
u_solution = solver.get_solution()
solver.plot_stacked(u_solution)
# Create a ZipFile object
memory_file = BytesIO()
with ZipFile(memory_file, 'w') as zipObj:
for folderName, subfolders, filenames in os.walk("/tmp/"):
for filename in filenames:
# Create complete filepath of file in directory
filePath = os.path.join(folderName, filename)
# Add file to zip
zipObj.write(filePath, basename(filePath))
print("Wrote zip file with results successfully.")
memory_file.seek(0)
print("about to send memory_file")
return send_file(memory_file, attachment_filename='heat_equation_results.zip', as_attachment=True)
```
#### File: optimization-cloud-functions/energy-management/main.py
```python
from energy_management import optimize_static_network
from flask import jsonify
def static_energy_network(request):
request_json = request.get_json()
result = optimize_static_network(request_json)
return jsonify(
result=result)
```
#### File: Scientific-Computing/quantum-cloud-functions/karmarkar_karp.py
```python
from copy import deepcopy
def replace_element(array, target, new_elem):
for i in range(len(array)):
if array[i] == target:
array[i] = new_elem
break
def karmarkar_karp(number_list):
'''
largest_differencing_method
To-do:
* It's a mistake to sort the list more than once, it should be possible to do all operations in the same list.
'''
number_list.sort()
L = number_list
Ls = [L]
A = []
B = []
diffs = []
optimal_value = None
while True:
# Order the numbers
L.sort()
# Replace the largest and second-largest by their difference
diff = L[-1] - L[-2]
diffs.append(diff)
L = L[0:len(L)-2] + [diff]
Ls.append(deepcopy(L))
# If two or more numbers remain, return to step 1
if len(L) < 2:
# I don't really care about saving the last one into a list...
# But I do
A = L
optimal_value = L[0]
break
print(Ls)
# Using backtracking, compute the partition
for i in range(2, len(Ls)+1):
diff = diffs[-i+1]
print("diff", diff)
Ls[-i].sort()
if diff in A:
print(A)
print(Ls[-i])
# A = A[0:-1] + [Ls[-i][-1]]
replace_element(A, diff, Ls[-i][-1])
B.append(Ls[-i][-2])
else:
# B = B[0:-1] + [Ls[-i][-1]]
replace_element(B, diff, Ls[-i][-1])
A.append(Ls[-i][-2])
print(A, B)
return A, B, optimal_value
def application():
# partition_weights = [1, 5, 9, 21, 35, 5, 3, 5, 10, 11]
partition_weights = [4,5,6,8,7]
ship_A, ship_B, ship_diff = karmarkar_karp(partition_weights)
print("\nVerify result")
print("sum(ship_A) - sum(ship_B)", sum(ship_A) - sum(ship_B))
print("ship_diff", ship_diff)
print(ship_A)
print(ship_B)
if __name__ == '__main__':
application()
```
#### File: Scientific-Computing/quantum-cloud-functions/main.py
```python
from flask import jsonify
import time
from number_partition import create_problem_for_number_partition, create_simplified_problem_for_number_partition, solve_number_partition
from karmarkar_karp import karmarkar_karp
def cloud_function(request):
""" Responds to any HTTP request.
Args:
request (flask.Request): HTTP request object.
Returns:
The response text or any set of values that can be turned into a
Response object using
`make_response <http://flask.pocoo.org/docs/1.0/api/#flask.Flask.make_response>`.
https://cloud.google.com/functions/docs/quickstart-python
"""
request_json = request.get_json()
partition_weights = None
method = None
if not request_json:
return jsonify(
result="Bad request.")
if 'partition_weights' in request_json:
partition_weights = request_json['partition_weights']
if 'method' in request_json:
method = request_json['method']
result = None
formatted_result = None
if method == 'qubo' or method == None:
# problem = create_problem_for_number_partition(partition_weights)
problem = create_simplified_problem_for_number_partition(partition_weights)
result = solve_number_partition(problem)
elif method == 'karmarkar_karp':
result = karmarkar_karp(partition_weights)
return jsonify(
result=result)
``` |
{
"source": "JoseAmador95/Python-CayenneLPP",
"score": 4
} |
#### File: Python-CayenneLPP/python_cayennelpp/methods.py
```python
def hex_to_int(hex_string, signed=True):
"""Returns hex_string converted to int. Method can work with signed 2's complement any length.
:param hex_string: hex string. Example 'DEADBEEF' or 'deadbeef'.
:param signed: boolean. True or False, indicating if hex signed. Default True
:return: int representation of hex_string
"""
if signed:
# get total number of bits to be able to extract MSB. If MSB=1 number is signed
bits = len(bytearray.fromhex(hex_string)) * 8
val = int('0x' + hex_string, 16)
# get MSB and if MSB = 1 (means number is signed) - take 2's compliment
if (val & (1 << (bits - 1))) != 0: # if sign bit is set e.g., 8bit: 128-255
val = val - (1 << bits) # compute negative value
return val
else:
return int(hex_string, 16)
def digital_input_output_presence_illuminance(data):
"""Digital Output/Input/Presence/Illuminance | Data Resolution per bit = 1, Unsigned
All these values are the same in decoding.
:param data: hex string of sensor value
:return: int decoded value
"""
return hex_to_int(data, False)
def analog_input_output(data):
"""Analog Input/Output | Data Resolution per bit = 0.01 Signed
:param data: hex string of sensor value
:return: int decoded value
"""
return hex_to_int(data) / 100.0
def temperature(data):
"""Temperature | Data Resolution per bit = 0.1 degC Signed MSB
:param data: hex string of sensor value
:return: int decoded value
"""
return hex_to_int(data) / 10.0
def humidity(data):
"""Humidity | Data Resolution per bit = 0.5 % Unsigned
:param data: hex string of sensor value
:return: int decoded value
"""
return hex_to_int(data, False) / 2.0
def accelerometer(data):
"""Accelerometer | Data Resolution per bit = 0.001 G Signed MSB per axis
Data Size: 6 bytes. x axis value = 2 bytes, y axis value = 2 bytes, z axis value = 2 bytes.
Example: 04 D2 FB 2E 00 00 --> 04D2 - x, FB2E - y, 0000 - z.
:param data: hex string of sensor value
:return: dictionary of x,y,z axis as keys and their values
"""
return {'x': hex_to_int(data[:4]) / 1000.0, 'y': hex_to_int(data[4:8]) / 1000.0, 'z': hex_to_int(data[8:]) / 1000.0}
def barometer(data):
"""Barometer | Data Resolution per bit = 0.1 hPa Unsigned MSB
:param data: hex string of sensor value
:return: int decoded value
"""
return hex_to_int(data, False) / 10.0
def gyrometer(data):
"""Gyrometer | Data Resolution per bit = 0.01 deg/s Signed MSB per axis
Data Size: 6 bytes. x axis value = 2 bytes, y axis value = 2 bytes, z axis value = 2 bytes.
Example: 04 D2 FB 2E 00 00 --> 04D2 - x, FB2E - y, 0000 - z.
:param data: hex string of sensor value
:return: dictionary of x,y,z axis as keys and their values
"""
return {'x': hex_to_int(data[:4]) / 100.0, 'y': hex_to_int(data[4:8]) / 100.0, 'z': hex_to_int(data[8:]) / 100.0}
def gps_location(data):
"""GPS Location | Data Resolution per bit below
* Latitude : 0.0001 deg Signed MSB
* Longitude : 0.0001 deg Signed MSB
* Altitude : 0.01 meter Signed MSB
:param data: hex string of sensor value
:return: dictionary of lat,long,alt as key and their values
"""
return {'lat': hex_to_int(data[:6]) / 10000.0, 'long': hex_to_int(data[6:12]) / 10000.0, 'alt': hex_to_int(data[12:]) / 100.0}
hex_library = {
"00": {
"name": "Digital Input",
"size": 2,
"action": digital_input_output_presence_illuminance
},
"01": {
"name": "Digital Output",
"size": 2,
"action": digital_input_output_presence_illuminance
},
"02": {
"name": "Analog Input",
"size": 4,
"action": analog_input_output
},
"03": {
"name": "Analog Output",
"size": 4,
"action": analog_input_output
},
"65": {
"name": "Illuminance Sensor",
"size": 4,
"action": digital_input_output_presence_illuminance
},
"66": {
"name": "Presence Sensor",
"size": 2,
"action": digital_input_output_presence_illuminance
},
"67": {
"name": "Temperature Sensor",
"size": 4,
"action": temperature
},
"68": {
"name": "Humidity Sensor",
"size": 2,
"action": humidity
},
"71": {
"name": "Accelerometer",
"size": 12,
"action": accelerometer
},
"73": {
"name": "Barometer",
"size": 4,
"action": barometer
},
"86": {
"name": "Gyrometer",
"size": 12,
"action": gyrometer
},
"88": {
"name": "GPS Location",
"size": 18,
"action": gps_location
}
}
``` |
{
"source": "joseamarcucci/PyCollab-1",
"score": 2
} |
#### File: PyCollab-1/controladores/JotControlador.py
```python
import jwt
import requests
import datetime
import time
import json
from cachetools import TTLCache
import ssl
import sys
class JotControlador():
def __init__(self,domain, key, secret,cert):
self.domain = domain
self.key = key
self.secret = secret
self.cert = cert
exp = datetime.datetime.utcnow() + datetime.timedelta(minutes=5.0)
header = {
"alg":"RS256",
"typ":"JWT"
}
claims = {
"iss": self.key,
"sub": self.key,
"exp": exp
}
self.assertion = jwt.encode(claims,self.secret)
self.grant_type = "urn:ietf:params:oauth:grant-type:jwt-bearer"
self.payload = {
"grant_type": self.grant_type,
"assertion": self.assertion
}
self.verify_cert = cert
self.jcache = None
def getKey(self):
return self.key
def getSecret(self):
return self.secret
def setJot(self):
#Crear la petición con el header, payload y signature
endpoint = 'https://' + self.domain + '/token'
if self.jcache != None:
try:
token = self.jcache["jwtoken"]
return token
except KeyError:
pass
r = requests.post(endpoint, data=self.payload, auth=(self.key, self.secret), verify=self.cert)
if r.status_code == 200:
json_valores = json.loads(r.text)
self.jcache = TTLCache(maxsize=1, ttl=json_valores['expires_in'])
self.jcache['jwtoken'] = json_valores['access_token']
elif r.status_code == 401:
print("Your Blackboard Collaborate credentials are not valid, check it with <EMAIL>")
elif r.status_code == 400:
print("Your json Config.py is not valid")
else:
print("[auth:jotToken()] ERROR: " + str(r))
def getJot(self):
try:
if self.jcache != None:
token = self.jcache['jwtoken']
return token
else:
sys.exit()
except KeyError:
self.setJot()
return self.jcache['jwtoken']
```
#### File: joseamarcucci/PyCollab-1/Utilidades.py
```python
import datetime
import requests
from tqdm import tqdm
from tqdm import trange
from webService import WebService
import csv
import sys,getopt
import json
from time import sleep
webService = WebService()
def listaGrabaciones(recordings):
recordinglist = []
x=0
try:
number_of_recordings = (len(recordings['results']))
if number_of_recordings <= 0:
return None
while x < number_of_recordings:
recording_id = recordings['results'][x]['id']
rec_data = webService.get_recording_data(recording_id)
if rec_data != None:
if 'mediaDownloadUrl' in rec_data:
size = recording_storageSize(rec_data['mediaDownloadUrl'])
elif 'storageSize' in recordings['results'][x]:
size = recordings['results'][x]['storageSize']
else:
size = recording_storageSize(rec_data['extStreams'][0]['streamUrl'])
recordinglist.append({"recording_id" : recordings['results'][x]['id'], "recording_name" : recordings['results'][x]['name'], "duration":recordings['results'][x]['duration'], "storageSize":size,"created": recordings['results'][x]['created']})
else:
recordinglist.append({"recording_id" : recordings['results'][x]['id'],"recording_name" : recordings['results'][x]['name'],'msg':403})
x += 1
return recordinglist
except TypeError:
return None
def listaGrabacionesOnlyData(recordings):
recordinglist = []
x=0
try:
number_of_recordings = (len(recordings['results']))
if number_of_recordings <= 0:
return None
while x < number_of_recordings:
recording_id = recordings['results'][x]['id']
rec_data = webService.get_recording_data_4url(recording_id)
if rec_data != None:
if 'mediaDownloadUrl' in rec_data:
size = recording_storageSize(rec_data['mediaDownloadUrl'])
elif 'storageSize' in recordings['results'][x]:
size = recordings['results'][x]['storageSize']
else:
size = recording_storageSize(rec_data['extStreams'][0]['streamUrl'])
recordinglist.append({"recording_id" : recordings['results'][x]['id'], "recording_name" : recordings['results'][x]['name'], "duration":recordings['results'][x]['duration'], "storageSize":size,"created": recordings['results'][x]['created']})
else:
recordinglist.append({"recording_id" : recordings['results'][x]['id'],"recording_name" : recordings['results'][x]['name'],'msg':403})
x += 1
return recordinglist
except TypeError:
return None
def listaGrabacion(recording_info):
if recording_info == None:
return None
else:
if 'storageSize' in recording_info and 'created' in recording_info:
recording_data = {"recording_id" : recording_info['id'], "recording_name" : recording_info['name'], "duration":recording_info['duration'], "storageSize":recording_info['storageSize'],"created": recording_info['created']}
else:
rec_data = webService.get_recording_data(recording_info['results'][0]['id'])
if 'mediaDownloadUrl' in rec_data:
size = recording_storageSize(rec_data['mediaDownloadUrl'])
else:
size = recording_storageSize(rec_data['extStreams'][0]['streamUrl'])
recording_data = {"recording_id" : recording_info['id'], "recording_name" : recording_info['name'], "duration":recording_info['duration'], "storageSize":size,"created": recording_info['created']}
return recording_data
def listaGrabacionCollabData(recording_info):
if recording_info == None:
return None
else:
if 'mediaDownloadUrl' in recording_info:
size = recording_storageSize(recording_info['mediaDownloadUrl'])
chats = recording_info['chats']
downloadUrl = None
if len(chats) > 0:
chat = recording_info['chats'][0]['url']
else:
chat = None
recording_data = {'downloadUrl':recording_info['mediaDownloadUrl'], 'recording_name':recording_info['name'],'duration':recording_info['duration'],'created':recording_info['created'],'size':size, 'chat':chat}
else:
try:
downloadUrl = recording_info['extStreams'][0]['streamUrl']
size = recording_storageSize(downloadUrl)
try:
chats = recording_info['chats']
if len(chats) > 0:
chat = recording_info['chats'][0]['url']
else:
chat = None
except KeyError:
chat = None
except KeyError:
size : 0
if downloadUrl != None:
recording_data = {'downloadUrl':downloadUrl, 'recording_name':recording_info['name'],'duration':recording_info['duration'],'created':recording_info['created'],'size':size,'chat':chat}
else:
recording_data = {'downloadUrl': None, 'recording_name':recording_info['name'],'duration':recording_info['duration'],'created':recording_info['created'],'size':size,'chat':chat}
return recording_data
def recording_id(url:str):
splitedURL = url.split('/')
ultimo = splitedURL[len(splitedURL)-1]
return ultimo
def readCollabReport(fileName:str):
recording_ids = []
with open(fileName,encoding='utf-8') as f:
columnas = f.readline()
if 'RecordingLink' in columnas:
with open(fileName, newline='', encoding='utf-8') as nline:
registers = csv.DictReader(nline)
for register in registers:
recording = recording_id(register['RecordingLink'])
sessionOwner = register['SessionOwner']
sessionName = register['SessionName']
sessionIdentifier = register['SessionIdentifier']
recName = register['RecordingName']
recording_ids.append({'recording':recording, 'sessionOwner':sessionOwner,'recName':recName,'sessionName':sessionName, 'sessionId':sessionIdentifier})
return recording_ids
nline.close()
else:
return None
def recording_storageSize(url:str):
r = requests.get(url, stream=True,headers={'Accept-Encoding': None})
size = int(r.headers.get('content-length',0))
return size
def descargarGrabacion(url:str, fname:str):
resp = requests.get(url,stream=True)
total = int(resp.headers.get('content-length',0))
progress_bar = tqdm(total=total, unit='iB', unit_scale=True,unit_divisor=1024)
with open(fname,'wb') as file:
for data in resp.iter_content(chunk_size=1024):
size = file.write(data)
progress_bar.update(size)
progress_bar.close()
def crearArchivoChat(url:str,fname:str):
chatFile = requests.get(url, stream=True)
if chatFile.status_code == 200:
try:
jsonInfo = json.loads(chatFile.text)
#CSV
filename = fname + '.csv'
header = ["Participant id", "Student Name", "Message"]
try:
file = open(filename, 'w', encoding="utf-8")
writer = csv.writer(file)
writer.writerow(header)
for jsonRow in jsonInfo:
writer.writerow([jsonRow['id'],jsonRow['userName'], jsonRow['body']])
file.close()
except OSError as oserror:
if oserror.errno == 36:
print("Long file name")
pass
total = int(chatFile.headers.get('content-length',0))
with tqdm(total=total) as progress_bar:
for size in trange(total):
progress_bar.update(size)
progress_bar.close()
except json.decoder.JSONDecodeError:
print('Chat file is empty')
pass
else:
print("Chat URL is not valid:", str(chatFile))
def downloadrecording(recording_list, name, course_uuid):
for recording in recording_list:
recording_data = webService.get_recording_data(recording['recording_id'])
if recording_data != None:
filename = course_uuid + '-' + recording['recording_id'] + '-' + checkLongFilenameVideo(' ', recording['recording_name'])
chatFileName = 'Chat-' + course_uuid + '-' + recording['recording_id'] + '-' + checkLongFilenameChat(' ',recording['recording_name'])
fullpath = './downloads/'
print(fullpath + filename)
descargarGrabacion(recording_data['extStreams'][0]['streamUrl'],fullpath + filename)
if len(recording_data['chats']) == 0:
print("No chat on the recording")
else:
print("Downloaling chat")
downloadChats(recording_data['chats'][0],fullpath + chatFileName)
def downloadOneRecording(recording, course_id):
if recording != 403:
recording_data = webService.get_recording_data(recording['recording_id'])
if recording_data != None:
filename = course_id + '-' + recording['recording_id'] + '-' + checkLongFilenameVideo(course_id, recording['recording_name'])
chatFileName = 'Chat-' + course_id + '-' + recording['recording_id'] + '-' + checkLongFilenameChat(course_id,recording['recording_name'])
fullpath = './downloads/'
print(fullpath + filename)
descargarGrabacion(recording_data['extStreams'][0]['streamUrl'],fullpath + filename)
if len(recording_data['chats']) == 0:
print("No chat on the recording")
else:
print("Downloaling chat")
downloadChats(recording_data['chats'][0],fullpath + chatFileName)
return True
else:
return None
def downloadOneRecordingURL(recording,recording_url, course_id):
recording_data = webService.get_recording_data_4url(recording['recording_id'])
if recording_data != None:
filename = course_id + '-' + recording['recording_id'] + '-' + checkLongFilenameVideo(course_id, recording['recording_name'])
#chatFileName = 'Chat-' + course_id + '-' + recording['recording_id'] + '-' + checkLongFilenameChat(course_id,recording['recording_name'])
fullpath = './downloads/'
print(fullpath + filename)
descargarGrabacion(recording_url,fullpath + filename)
'''
if len(recording_data['chats']) == 0:
print("No chat on the recording")
else:
print("Downloaling chat")
downloadChats(recording_data['chats'][0],fullpath + chatFileName)
'''
return True
else:
return None
def downloadOneRecordingOnly(recording):
recording_data = webService.get_recording_data(recording['recording_id'])
if recording_data != None:
filename = recording['recording_id'] + '-' + checkLongFilenameVideo(' ', recording['recording_name'])
chatFileName = 'Chat-' + recording['recording_id'] + '-' + checkLongFilenameChat(' ',recording['recording_name'])
fullpath = './downloads/'
print(fullpath + filename)
descargarGrabacion(recording_data['extStreams'][0]['streamUrl'],fullpath + filename)
if len(recording_data['chats']) == 0:
print("No chat on the recording")
else:
print("Downloaling chat")
downloadChats(recording_data['chats'][0],fullpath + chatFileName)
return True
else:
return None
def downloadRecordingsUUID(recording_lista):
if recording_lista != None:
if 'recordingId' in recording_lista:
filename = recording_lista['recordingId'] + '-' + checkLongFilenameVideo('', recording_lista['recording_name'])
else:
filename = checkLongFilenameVideo(recording_lista['recordingId'], recording_lista['recording_name'])
chatFileName = 'Chat-' + filename
fullpath = './downloads/'
print(fullpath + filename)
descargarGrabacion(recording_lista['downloadUrl'],fullpath + filename)
if recording_lista['chat'] == None:
print("No chat on the recording")
else:
print("Downloaling chat")
downloadChatsFromURL(recording_lista['chat'],fullpath + chatFileName)
else:
print("No data from Recording ID on Collaborate")
def downloadFromURL(recording_id):
url = webService.getRecordingURL(recording_id)
return url
def downloadChats(chat_data,name):
chat_url = chat_data['url']
crearArchivoChat(chat_url,name)
def downloadChatsFromURL(chat_url,name):
crearArchivoChat(chat_url,name)
def deleteRecording(recording_id):
delete_info = webService.delete_recording(recording_id)
if delete_info != None:
return True
else:
return delete_info
def checkLongFilenameVideo(courseId:str, fname:str):
filename = fname.replace(':', ' ').replace('/', ' ').replace('”', '').replace('“', '').replace(',', '').replace('?', '').replace('|', '').replace('"', '') + '.mp4'
if len(filename) > 160:
return courseId + '.mp4'
else:
return filename
def checkLongFilenameChat(courseId:str, fname:str):
filename = fname.replace(':', ' ').replace('/', ' ').replace('”', '').replace('“', '').replace(',', '').replace('?', '').replace('|', '').replace('"', '')
if len(filename) > 160:
return " "
else:
return filename
def crearReporte(reporte):
filename = './reports/Collab_Download_RecordingReport.csv'
header = ["sessionOwner","Recording ID", "Recording Name", "Duration", "Storage Size (MB)", "Created Date"]
file = open(filename, 'w',newline='', encoding='utf-8')
writer = csv.writer(file)
writer.writerow(header)
for x in range(len(reporte)):
registro = reporte[x]
sessionOwner = registro[0]
recording_id = registro[1]
recording_name = registro[2]
duration = calcularTiempo(int(registro[3]/1000))
storage = str(round(float(registro[4])/1000000, 2))
created = convertirFecha(registro[5])
writer.writerow([sessionOwner,recording_id,recording_name,duration,storage,created])
file.close()
return "Report: Collab_Download_RecordingReport.csv created!"
def crearReporteMoodle(reporte):
filename = './reports/Collab_Moodle_Session_RecordingReport.csv'
header = ["Recording ID", "Recording Name", "Duration", "Storage Size (MB)", "Created Date"]
file = open(filename, 'w',newline='', encoding='utf-8')
writer = csv.writer(file)
writer.writerow(header)
for x in range(len(reporte)):
registro = reporte[x]
recording_id = registro[0]
recording_name = registro[1]
duration = calcularTiempo(int(registro[2]/1000))
storage = str(round(float(registro[3])/1000000, 2))
created = convertirFecha(registro[4])
writer.writerow([recording_id,recording_name,duration,storage,created])
file.close()
return "Report: Collab_Moodle_Session_RecordingReport.csv created!"
def crearReporteCollabDownload(reporte):
filename = './reports/Collab_Download_RecordingReport.csv'
header = ["Course ID/UUID","Recording ID", "Recording Name", "Duration", "Storage Size (MB)", "Created Date"]
file = open(filename, 'w',newline='', encoding='utf-8')
writer = csv.writer(file)
writer.writerow(header)
for x in range(len(reporte)):
registro = reporte[x]
course_id = registro[0]
recording_id = registro[1]
recording_name = registro[2]
duration = calcularTiempo(int(registro[3]/1000))
storage = str(round(float(registro[4])/1000000, 2))
created = convertirFecha(registro[5])
writer.writerow([course_id,recording_id,recording_name,duration,storage,created])
file.close()
return "Report: Collab_Download_RecordingReport.csv created!"
def crearReporteCollabRecIdDownload(reporte):
filename = './reports/Collab_Download_RecordingsId_Report.csv'
header = ["Recording ID", "Recording Name", "Duration", "Storage Size (MB)", "Created Date"]
file = open(filename, 'w',newline='', encoding='utf-8')
writer = csv.writer(file)
writer.writerow(header)
for x in range(len(reporte)):
registro = reporte[x]
recording_id = registro[0]
recording_name = registro[1]
duration = calcularTiempo(int(registro[2]/1000))
storage = str(round(float(registro[3])/1000000, 2))
created = convertirFecha(registro[4])
writer.writerow([recording_id,recording_name,duration,storage,created])
file.close()
return "Report: Collab_Download_RecordingsId_Report.csv created!"
def crearReporte_403(reporte):
filename = './reports/Collab_Download_RecordingReport_403.csv'
header = ["Course ID","Recording ID", "Recording Name", "Detail"]
file = open(filename, 'w',newline='', encoding='utf-8')
writer = csv.writer(file)
writer.writerow(header)
for x in range(len(reporte)):
registro = reporte[x]
course_id = registro[0]
recording_id = registro[1]
recording_name = registro[2]
detail = registro[3]
writer.writerow([course_id,recording_id,recording_name,detail])
file.close()
return "Report: Collab_Download_RecordingReport_403.csv created!"
def crearReporteCollab(reporte):
filename = './reports/Collab_Report_from_Course.csv'
headers = [ 'Course ID', 'Course Name','Course UUID', 'Recording ID', 'Recording Name','Duration', 'Storage Size (MB)', 'Created Date']
file = open(filename, 'w', newline='', encoding='utf-8')
writer = csv.writer(file)
writer.writerow(headers)
for x in range(len(reporte)):
registro = reporte[x]
course_id = registro[0]
couse_name = registro[1]
course_uuid = registro[2]
recording_id = registro[3]
recording_name = registro[4]
duration = calcularTiempo(int(registro[5]/1000))
storageSize = str(round(float(registro[6])/1000000, 2))
if registro[7]== 'not defined':
created = 'not defined'
else:
created = convertirFecha(registro[7])
writer.writerow([course_id,couse_name,course_uuid,recording_id,recording_name,duration,storageSize,created])
file.close()
return "Report: Collab_Report_from_Course.csv created!"
def crearReporteCollabRecordings(reporte):
filename = './reports/Collab_Report_Recordings.csv'
headers = ['sessionOwner','Recording ID', 'Recording Name','Duration', 'Storage Size (MB)', 'Created Date']
file = open(filename, 'w',newline='', encoding='utf-8')
writer = csv.writer(file)
writer.writerow(headers)
for x in range(len(reporte)):
registro = reporte[x]
session_ownner = registro[0]
recording_id = registro[1]
recording_name = registro[2]
duration = calcularTiempo(int(registro[3]/1000))
storageSize = str(round(float(registro[4])/1000000, 2))
created = convertirFecha(registro[5])
writer.writerow([session_ownner,recording_id,recording_name,duration,storageSize,created])
file.close()
def crearReporteCollab_403(reporte):
filename = './reports/Collab_Report_from_Course_error.csv'
headers = [ 'Course ID', 'Course Name','Course UUID', 'Recording ID', 'Recording Name','Detail']
file = open(filename, 'w',newline='', encoding='utf-8')
writer = csv.writer(file)
writer.writerow(headers)
for x in range(len(reporte)):
registro = reporte[x]
course_id = registro[0]
couse_name = registro[1]
course_uuid = registro[2]
recording_id = registro[3]
recording_name = registro[4]
detail = registro[5]
writer.writerow([course_id,couse_name,course_uuid,recording_id,recording_name,detail])
file.close()
return "Report: Collab_Report_from_Course_error.csv created!"
def crearReporte_Recordings_403(reporte):
filename = './reports/Collab_Report_Recordings_error.csv'
headers = [ 'OwnerSession', 'Session Name','Session ID', 'Recording ID', 'Recording Name','Detail']
file = open(filename, 'w',newline='', encoding='utf-8')
writer = csv.writer(file)
writer.writerow(headers)
for x in range(len(reporte)):
registro = reporte[x]
course_id = registro[0]
couse_name = registro[1]
course_uuid = registro[2]
recording_id = registro[3]
recording_name = registro[4]
detail = registro[5]
writer.writerow([course_id,couse_name,course_uuid,recording_id,recording_name,detail])
file.close()
return "Report: Collab_Download_RecordingReport_error.csv created!"
def crearReporteDelete(reporte):
filename = './reports/Collab_Delete_Recordings.csv'
headers = [ 'Recording Id', 'Status']
file = open(filename, 'w',newline='', encoding='utf-8')
writer = csv.writer(file)
writer.writerow(headers)
for x in range(len(reporte)):
registro = reporte[x]
recording_id = registro[0]
status = registro[1]
writer.writerow([recording_id,status])
file.close()
return "Report: Collab_Delete_Recordings.csv created!"
def leerCursos(filename):
cursos = []
with open(filename,encoding='utf-8') as reader:
for linea in reader:
contenido = linea.rstrip()
cursos.append(str(contenido))
reader.close()
return cursos
def leerUUID(filename):
uuids = []
with open(filename,encoding='utf-8') as reader:
for linea in reader:
contenido = linea.rstrip()
uuids.append(str(contenido))
reader.close()
return uuids
def leerRecUUID(filename):
uuids = []
with open(filename,encoding='utf-8') as reader:
for linea in reader:
contenido = linea.rstrip()
uuids.append(str(contenido))
reader.close()
return uuids
def main(argv):
archivoCursos = ''
archivoUUID = ''
semanas = 0
try:
opts,args = getopt.getopt(argv,"hf:e:w:", ["cfile=","ext=","weeks="])
except getopt.GetoptError:
print('Collab.py -f <LearnFileName_COURSE_ID.txt> -w <numberOfWeekBehindToSearch>')
print('Collab.py -e <LearnFileName_COURSE_UUID> -w <numberOfWeekBehindToSearch>')
sys.exit(2)
for opt,arg in opts:
if opt == '-h':
print('Collab.py -f <LearnFileName_COURSE_ID.txt> -w <numberOfWeekBehindToSearch>')
print('Collab.py -e <LearnFileName_COURSE_UUID> -w <numberOfWeekBehindToSearch>')
sys.exit()
elif opt in ('-f', '--cfile'):
archivoCursos = arg
elif opt in ('-w', '--weeks'):
semanas = int(arg)
elif opt in ('-e', '--ext'):
archivoUUID = arg
return [archivoCursos, archivoUUID, semanas]
def mainMoodle(argv):
moodleSessionID = ''
moodleLTI = ''
semanas = 0
try:
opts,args = getopt.getopt(argv,"hs:l:w:", ["session=","lti=","weeks="])
except getopt.GetoptError:
print("The correct params are:")
print('CollabMoodle.py -s <MoodlePlugInFileName_SESSION_ID.txt> -w <numberOfWeekBehindToSearch>')
print('CollabMoodle.py -l <MoodleFileName_LTI.txt> -w <numberOfWeekBehindToSearch>')
sys.exit(2)
for opt,arg in opts:
if opt == '-h':
print('CollabMoodle.py -s <MoodlePlugInFileName_SESSION_ID.txt> -w <numberOfWeekBehindToSearch>')
print('CollabMoodle.py -l <MoodleFileName_LTI.txt> -w <numberOfWeekBehindToSearch>')
sys.exit()
elif opt in ('-s', '--session'):
moodleSessionID = arg
elif opt in ('-l', '--lti'):
moodleLTI = arg
elif opt in ('-w', '--weeks'):
semanas = int(arg)
return [moodleSessionID, moodleLTI, semanas]
def mainReport(argv):
archivoCursos = ''
semanas = 0
try:
opts,args = getopt.getopt(argv,"hf:w:", ["cfile=","weeks="])
except getopt.GetoptError:
print('CollabReport.py -f <LearnFileName_COURSE_ID.txt> -w <numberOfWeekBehindToSearch>')
sys.exit(2)
for opt,arg in opts:
if opt == '-h':
print('CollabReport.py -f <LearnFileName_COURSE_ID.txt> -w <numberOfWeekBehindToSearch>')
sys.exit()
elif opt in ('-f', '--cfile'):
archivoCursos = arg
elif opt in ('-w', '--weeks'):
semanas = int(arg)
return [archivoCursos, semanas]
def mainRecordings(argv):
recordingsFile = ''
try:
opts,args = getopt.getopt(argv,"hf:", ["recordings="])
except getopt.GetoptError:
print("The correct params are:")
print('CollabRecordings.py -f <RecordingsReport.csv>')
sys.exit(2)
for opt,arg in opts:
if opt == '-h':
print('CollabRecordings.py -f <RecordingsReport.csv>')
sys.exit()
elif opt in ('-f', '--recordings'):
recordingsFile = arg
return [recordingsFile]
def mainMinutes(argv):
attendanceFile = ''
try:
opts,args = getopt.getopt(argv,"hf:", ["attendance="])
except getopt.GetoptError:
print("The correct params are:")
print('CollabMinutes.py -t <AttendaceReport.csv>')
sys.exit(2)
for opt,arg in opts:
if opt == '-h':
print('CollabMinutes.py -t <AttendaceReport.csv>')
sys.exit()
elif opt in ('-f', '--attendance'):
attendanceFile = arg
return [attendanceFile]
def mainStorage(argv):
recordingFile = ''
try:
opts,args = getopt.getopt(argv,"hf:", ["recordings="])
except getopt.GetoptError:
print("The correct params are:")
print('CollabStorage.py -t <RecordingReport.csv>')
sys.exit(2)
for opt,arg in opts:
if opt == '-h':
print('CollabStorage.py -t <RecordingReport.csv>')
sys.exit()
elif opt in ('-f', '--recordings'):
recordingFile = arg
return [recordingFile]
def mainRecfromid(argv):
attendanceFile = ''
try:
opts,args = getopt.getopt(argv,"hf:", ["list="])
except getopt.GetoptError:
print("The correct params are:")
print('CollabRecfromid.py -f <recodidingids_list.txt>')
sys.exit(2)
for opt,arg in opts:
if opt == '-h':
print('CollabRecfromid.py -f <recodidingids_list.txt>')
sys.exit()
elif opt in ('-f', '--list'):
attendanceFile = arg
return [attendanceFile]
def mainDelete(argv):
attendanceFile = ''
try:
opts,args = getopt.getopt(argv,"hf:", ["list="])
except getopt.GetoptError:
print("The correct params are:")
print('CollabDeleteRecordings.py -f <recodidingids_list.txt>')
sys.exit(2)
for opt,arg in opts:
if opt == '-h':
print('CollabDeleteRecordings.py -f <recodidingids_list.txt>')
sys.exit()
elif opt in ('-f', '--list'):
attendanceFile = arg
return [attendanceFile]
def calcularTiempo(s):
m, s = divmod(s,60)
h,m = divmod(m,60)
d, h = divmod(h,24)
tiempoEnSesion = datetime.time(h,m,s)
return tiempoEnSesion.strftime('%H:%M:%S')
def convertirFecha(fecha):
objetoFecha = datetime.datetime.strptime(fecha,'%Y-%m-%dT%H:%M:%S.%fZ')
return objetoFecha.strftime('%b %d,%Y')
def semanasAtiempo(weeks):
tiempo = datetime.datetime.now() - datetime.timedelta(weeks=int(weeks))
tiempo = tiempo.strftime('%Y-%m-%dT%H:%M:%SZ')
return tiempo
def collabTimeToMinutes(stime:str):
#ctime = datetime.datetime.strptime(stime,'%H:%M:%S').time()
ctime = stime.split(':')
hours = float(ctime[0])*60
minutes = float(ctime[1])
seconds = float(ctime[2])/60
return round(hours + minutes + seconds)
def collabMinutes(fileName:str):
minutes = 0
with open(fileName, encoding='utf-8') as f:
columnas = f.readline()
if 'AttendeeTotalTimeInSession' in columnas:
with open(fileName, newline='', encoding='utf-8') as nline:
registers = csv.DictReader(nline)
for register in registers:
minutes += collabTimeToMinutes(register['AttendeeTotalTimeInSession'])
fminutes = round(minutes)
return '{:,}'.format(fminutes)
nline.close()
else:
return None
f.close
def collabMinutesLoginGroups(fileName:str,login_gropus):
minutesByLoginGroup = convertList2Dic(login_gropus)
with open(fileName, newline='', encoding='utf-8') as nline:
registers = csv.DictReader(nline)
for register in registers:
if register['SessionOwner'] in login_gropus:
data = str2Minutes(register['RecordingDuration'])
if data != None:
minutesByLoginGroup[register['SessionOwner']] += data
#minutesByLoginGroup[register['SessionOwner']] += 1
nline.close()
return minutesByLoginGroup
def convertList2Dic(aList):
fDict = {aList[idx]:0 for idx in range(0,len(aList))}
return fDict
def str2Minutes(t):
s = str(t)
total = 0
times = s.split(':')
if len(times) == 1 and times[0] == '':
pass
else:
horas2min = float(times[0]) * 60
mins = float(times[1])
seg2min = float(times[2]) / 60
total = round(horas2min + mins + seg2min)
return total
def collabLoginGroup(fileName:str):
loginGroup = []
sessionOwners = []
with open(fileName, encoding='utf-8') as f:
columnas = f.readline()
if 'SessionOwner' in columnas:
with open(fileName, newline='', encoding='utf-8') as nline:
registers = csv.DictReader(nline)
for register in registers:
sessionOwners.append(register['SessionOwner'])
loginGroup = list(set(sessionOwners))
return loginGroup
nline.close()
else:
return None
f.close
def collabStorage(fileName:str, ltiAccount:str):
storage = 0.0
with open(fileName, encoding='utf-8') as f:
columnas = f.readline()
if 'RecordingDuration' in columnas:
with open(fileName, newline='', encoding='utf-8') as nline:
registros = csv.DictReader(nline)
for registro in registros:
#minutes += collabTimeToMinutes(registro['StorageUsageGigabytes'])
if registro['SessionOwner'] == ltiAccount:
storage += float(registro['StorageUsageGigabytes'])
fstorage = round(storage)
return '{:,}'.format(storage)
nline.close()
else:
return None
f.close
def listRecordingids(filename):
recids = []
with open(filename,encoding='utf-8') as reader:
for linea in reader:
contenido = linea.rstrip()
recids.append(str(contenido))
reader.close()
return recids
``` |
{
"source": "joseamarcucci/pymysql-example",
"score": 2
} |
#### File: joseamarcucci/pymysql-example/app.py
```python
from mysql.connector import (connection)
from mysql.connector.network import MySQLTCPSocket
import os
import re
import socket
import socks
try:
QG_ENVVAR = os.environ['QUOTAGUARDSTATIC_URL']
except KeyError:
try:
QG_ENVVAR = os.environ['QUOTAGUARDSHIELD_URL']
except KeyError:
print("Missing QUOTAGUARDSTATIC_URL and QUOTAGUARDSHIELD_URL. Exiting")
exit(1)
QG_PORT = 1080
QG_USER, QG_PASS, QG_HOST = re.split(r"[:@\/]", QG_ENVVAR)[3:-1]
PATCH = True
def monkey_patch_open_connection(self):
"""Open the TCP/IP connection to the MySQL server
"""
# Get address information
addrinfo = [None] * 5
try:
addrinfos = socket.getaddrinfo(self.server_host,
self.server_port,
0, socket.SOCK_STREAM,
socket.SOL_TCP)
# If multiple results we favor IPv4, unless IPv6 was forced.
for info in addrinfos:
if self.force_ipv6 and info[0] == socket.AF_INET6:
addrinfo = info
break
elif info[0] == socket.AF_INET:
addrinfo = info
break
if self.force_ipv6 and addrinfo[0] is None:
raise errors.InterfaceError(
"No IPv6 address found for {0}".format(self.server_host))
if addrinfo[0] is None:
addrinfo = addrinfos[0]
except IOError as err:
raise errors.InterfaceError(
errno=2003, values=(self.get_address(), _strioerror(err)))
else:
(self._family, socktype, proto, _, sockaddr) = addrinfo
# Instanciate the socket and connect
try:
self.sock = socks.socksocket(self._family, socktype, proto) #socket.socket(self._family, socktype, proto)
self.sock.set_proxy(socks.SOCKS5, QG_HOST, QG_PORT, True, QG_USER, QG_PASS)
self.sock.settimeout(self._connection_timeout)
self.sock.connect(sockaddr)
except IOError as err:
raise errors.InterfaceError(
errno=2003, values=(self.get_address(), _strioerror(err)))
except Exception as err:
raise errors.OperationalError(str(err))
# link in the monkey patch
if PATCH:
MySQLTCPSocket.open_connection = monkey_patch_open_connection
if __name__ == "__main__":
try:
DB_ENVVAR = os.environ['DATABASE']
DB_USER, DB_PASS, DB_HOST, DB_PORT, DB_NAME = re.split(r"[:@\/]", DB_ENVVAR)[3:]
except KeyError:
print("Missing DATABASE environment variable")
exit(1)
print("Connecting {}:{} to {} on {}:{}".format(DB_USER, DB_PASS, DB_NAME, DB_HOST, DB_PORT))
# test the connection
cnx = connection.MySQLConnection(user=DB_USER, password=<PASSWORD>, host=DB_HOST, database=DB_NAME, port=DB_PORT)
cursor = cnx.cursor()
query = "SELECT SUBSTRING_INDEX(USER(),'@',-1)"
cursor.execute(query)
for (ip) in cursor:
try:
octet = "(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)"
match = "({}[-.]{}[-.]{}[-.]{})\.".format(octet, octet, octet, octet)
found = re.sub(r"-", ".", re.search(match, ip[0]).group(1))
#validate IP
match = "({}\.{}\.{}\.{})".format(octet, octet, octet, octet)
found = re.search(match, found).group(1)
print("Connected via {}".format(found))
except AttributeError:
print("Connected, but unable to determine IP address: {}".format(ip[0]))
cursor.close()
cnx.close()
``` |
{
"source": "joseamolina/Machine_Learning_Techniques",
"score": 3
} |
#### File: joseamolina/Machine_Learning_Techniques/KNN_Jose.py
```python
import numpy as np
import pandas as pd
from math import sqrt
class KnnCancerClassifier:
# Initialize class variables
def __init__(self, file_name):
# Read from data set to a numpy array
self.data_set_cancer = np.genfromtxt(file_name, delimiter=',')
self.k = None
self.entries_data = np.shape(self.data_set_cancer)[0]
self.len_data = np.shape(self.data_set_cancer[0])[0]
self.testing_data_set = None
self.len_test_data = None
self.applier_distance = None
self.predictor_normal = True
# This method switches the k value for normal knn depending on passing parameter.
def _set_k(self, k_measure: "string") -> "None":
switcher = {
'static': 3,
'squared': int(np.round(np.sqrt(self.len_data))),
'n-fold': int(np.round(self.len_data / self.len_test_data)) + 1
}
self.k = switcher.get(k_measure)
# This method switches the distance calculating parameter depending on passing parameter.
def _set_distance(self, distance: "string") -> "None":
switcher = {
'euclidean': self._distance_euclidean,
'manhattan': self._distance_manhattan,
'chebyshev': self._distance_chebyshev,
'canberra': self._distance_canberra,
'braycurtis': self._distance_braycurtis
}
self.applier_distance = switcher.get(distance)
# Calculates the euclidean distance between 2 vectors
def _distance_euclidean(self, c: "Int", target) -> "Double":
# Parametrize data
target_new = list(map(lambda r: self._parametize_data(r[1], r[0]), enumerate(target[:self.len_data - 1])))
entry_new = list(map(lambda r: self._parametize_data(r[1], r[0]), enumerate(self.data_set_cancer[c, :self.len_data - 1])))
return np.sqrt(np.sum(np.square(np.subtract(target_new, entry_new))))
# Calculates the manhattan distance between 2 vectors
def _distance_manhattan(self, c: "Int", target) -> "Double":
# Parametrize data
target_new = list(map(lambda r: self._parametize_data(r[1], r[0]), enumerate(target[:self.len_data - 1])))
entry_new = list(
map(lambda r: self._parametize_data(r[1], r[0]), enumerate(self.data_set_cancer[c, :self.len_data - 1])))
return np.sum(np.abs(np.subtract(target_new, entry_new)))
# Calculates the chebyshev distance between 2 vectors
def _distance_chebyshev(self, c: "Int", target) -> "Double":
# Parametrize data
target_new = list(map(lambda r: self._parametize_data(r[1], r[0]), enumerate(target[:self.len_data - 1])))
entry_new = list(
map(lambda r: self._parametize_data(r[1], r[0]), enumerate(self.data_set_cancer[c, :self.len_data - 1])))
return np.max(np.abs(np.subtract(target_new, entry_new)))
# Calculates the canberra distance between 2 vectors
def _distance_canberra(self, c: "Int", target) -> "Double":
return max(abs(self._parametize_data(target[i], i) - self._parametize_data(self.data_set_cancer[c, i], i)) / (abs(
self._parametize_data(target[i], i)) + abs(self._parametize_data(self.data_set_cancer[c, i], i))) for i in
range(self.len_data - 1))
# Calculates the braycurtis distance between 2 vectors
def _distance_braycurtis(self, c: "Int", target) -> "Double":
return max(abs(self._parametize_data(target[i], i) - self._parametize_data(self.data_set_cancer[c, i], i)) / (sum(
self._parametize_data(target[k], k) for k in range(self.len_data - 1)) + sum(self._parametize_data(
self.data_set_cancer[c, y], y) for y in range(self.len_data - 1))) for i in range(self.len_data - 1))
# This method says if a prediction of a target query is valid or not. Normal distance
def _calculateDistances(self, target_ob):
# Create a Series data structure containing indexes to be measured
distances = pd.Series(range(self.entries_data))
# For each content of the series, calculate the distance between the target query and the indexed point
distances_computed = distances.apply(lambda x: self.applier_distance(x, target_ob))
# To sort the indexes according to the values contained (distances between target and each entry)
sorted_distances = np.argsort(distances_computed)
# Get the n first entries and count them
counts = np.bincount([self.data_set_cancer[i, 5] for i in sorted_distances[:self.k]])
# Return T if the prediction is valid
return counts.argmax() == target_ob[5]
# This method says if a prediction of a target query is valid or not. Weighted distance
def _predict_weighted(self, target_ob, n):
# Create a Series data structure containing indexes to be measured
distances = pd.Series(range(self.entries_data))
# For each entry, create a Tuple (class, 1/distance); being distance the distance between the entry
# and the target
distances_computed = distances.apply(lambda x: (self.data_set_cancer[x, 5], 1/(self.applier_distance(x, target_ob))**n))
# To sum for each class the frequency
sum_0 = 0
sum_1 = 0
for i in distances_computed:
if i[0] == 0:
sum_0 += i[1]
else:
sum_1 += i[1]
# To guess what class wins
class_pred = 0
if sum_1 >= sum_0:
class_pred = 1
# Return T if the prediction is valid
return class_pred == target_ob[5]
def test_accuracy(self, file, k_set, distance):
# Read instances of testing data set
self.testing_data_set = np.genfromtxt(file, delimiter=',')
# Get references of testing
self.len_test_data = np.shape(self.testing_data_set)[0]
self._set_k(k_set)
self._set_distance(distance)
# Create set containing the distances calculated for each testing entry
prediction_applied = pd.Series(range(self.len_test_data))
list_bool = prediction_applied.apply(lambda x: self._calculateDistances(self.testing_data_set[x]))
# Print results
print("Accuracy of the model: {0}%".format((list_bool.value_counts().get(True) * 100) / self.len_test_data))
print("Parameters: Distance: {0}, K format: {1}\n\n".format(distance, k_set))
def test_accuracy_weighted(self, file, distance, n):
self.testing_data_set = np.genfromtxt(file, delimiter=',')
self.len_test_data = np.shape(self.testing_data_set)[0]
self._set_distance(distance)
prediction_applied = pd.Series(range(self.len_test_data))
list_bool = prediction_applied.apply(lambda x: self._predict_weighted(self.testing_data_set[x], n))
print("Accuracy of the model: {0}%".format((list_bool.value_counts().get(True) * 100) / self.len_test_data))
print("Parameters: Distance: {0} and n= {1}\n\n".format(distance, n))
# This method normalizes the data according to its parameters range
@staticmethod
def _parametize_data(obj, index):
switcher = {
0: (obj - 1) / (5 - 1),
1: (obj - 1) / (120 - 1),
2: (obj - 1) / (4 - 1),
3: (obj - 1) / (5 - 1),
4: (obj - 1) / (4 - 1)
}
return switcher.get(index)
# Initialize instance
dert = KnnCancerClassifier('cancer/trainingData2.csv')
# Battery of normal classification
for k in ['static', 'squared', 'n-fold']:
for tech in ['euclidean', 'manhattan', 'chebyshev', 'canberra', 'braycurtis']:
dert.test_accuracy('cancer/testData2.csv', k, tech)
# Battery of weighted classification
for i in range(1,4):
for tech in ['euclidean', 'manhattan', 'chebyshev', 'canberra', 'braycurtis']:
dert.test_accuracy_weighted('cancer/testData2.csv', tech, i)
```
#### File: joseamolina/Machine_Learning_Techniques/main.py
```python
from math import log
class Model:
def __init__(self, name_positive, name_negative):
self.negative = {}
self.neg_P = {}
self.positive = {}
self.pos_P = {}
self.set_words = set()
self.reader_neg = open('dataFiles/train/trainNeg.txt', 'r', encoding="ISO-8859-1")
self.reader_pos = open('dataFiles/train/trainPos.txt', 'r', encoding="ISO-8859-1")
self.count_dic_neg = 0
self.count_dic_pos = 0
self.V = 0
self._train()
def _train(self):
for i in self.reader_neg:
lista = i.split()
for x in lista:
if x[0] != '@':
self.set_words.add(x)
value = self.negative.get(x)
if value == None:
self.negative[x] = 1
self.neg_P[x] = None
else:
self.negative[x] = value + 1
for i in self.reader_pos:
lista = i.split()
for x in lista:
if x[0] != '@':
self.set_words.add(x)
value = self.positive.get(x)
if value == None:
self.positive[x] = 1
self.pos_P[x] = None
else:
self.positive[x] = value + 1
self.V = len(self.set_words)
self.count_dic_neg = len(self.negative)
self.count_dic_pos = len(self.positive)
#prob_neg = count_dic_neg / count_dic_neg + count_dic_pos
for i in self.negative:
self.neg_P[i] = log((self.negative[i] + 1)/(self.count_dic_neg + self.V))
#prob_pos = count_dic_pos / count_dic_neg + count_dic_pos
for i in self.positive:
self.pos_P[i] = log((self.positive[i] + 1)/(self.count_dic_pos + self.V))
def _chech_tweet(self, tweet):
tweet = tweet.split()
summer_pos = log(len(self.positive) / len(self.positive) + len(self.negative))
summer_neg = log(len(self.negative) / len(self.positive) + len(self.negative))
for x in range(1, len(tweet)):
if not self.pos_P.get(tweet[x]):
var = log(1 / self.count_dic_pos + self.V)
else:
var = self.pos_P[tweet[x]]
summer_pos += var
if not self.neg_P.get(tweet[x]):
var = log(1 / self.count_dic_neg + self.V)
else:
var = self.neg_P[tweet[x]]
summer_neg += var
return 1 if summer_pos > summer_neg else -1
def check_accuracy(self, testPos, testNeg):
reader_neg_test = open(testNeg, 'r', encoding="ISO-8859-1")
reader_pos_test = open(testPos, 'r', encoding="ISO-8859-1")
tester_neg = reader_neg_test.read()
lista_neg = tester_neg.split("@")
list_tweets_neg = list(map(lambda k: k.replace("\n", "").replace("\"", "").replace(".", ""), lista_neg))
tester_pos = reader_pos_test.read()
lista_pos = tester_pos.split("@")
list_tweets_pos = list(map(lambda k: k.replace("\n", "").replace("\"", "").replace(".", ""), lista_pos))
n_pos = 0
n_neg = 0
for i in list_tweets_neg:
rt = self._chech_tweet(i)
if rt < 0:
n_neg += 1
else:
n_pos += 1
print('The accuracy of test is {0}% of correct predictions of negative dataset'.format((100 * n_neg) / (n_neg + n_pos)))
n_pos = 0
n_neg = 0
for i in list_tweets_pos:
rt = self._chech_tweet(i)
if rt < 0:
n_neg += 1
else:
n_pos += 1
print(len(self.set_words))
print('The accuracy of test is {0}% of correct predictions of positive dataset'.format((100 * n_pos) / (n_neg + n_pos)))
``` |
{
"source": "JoseAndresMR/ABC",
"score": 3
} |
#### File: brain/util/nn_model.py
```python
import numpy as np
import os
import json
import random
import torch
import torch.nn as nn
import torch.nn.functional as F
def hidden_init(layer):
""" Initialize the hidden layer weights with random noise. """
fan_in = layer.weight.data.size()[0]
lim = 1. / np.sqrt(fan_in)
return -lim, lim
class NnModel(nn.Module):
def __init__(self, config, inputs_size, seed):
"""Initialize parameters and build model. Initialize the torch functions contained in the defintion.
Args:
config (dict): model definiton.
inputs_size (dict): Contains the sizes of input and output.
state_size (int): Dimension of each state.
action_size (int): Dimension of each action.
seed (int): Random seed. """
super(NnModel, self).__init__()
self.config = config
self.seed = torch.manual_seed(seed)
if type(self.config) == str:
try:
with open(self.config, 'r') as j:
self.config = json.load(j)
except FileNotFoundError:
with open(os.path.join(os.path.dirname(__file__), "..", "configs", 'predefined_models.json'), 'r') as j:
self.config = json.load(j)[self.config]
self.layers = nn.ModuleList()
for i, layer_config in enumerate(self.config["layers"]):
# if i == 0:
# input_size = state_size
# else:
# input_size = self.config[model_type]["layers"][i-1]["size"]
if "size" in layer_config.keys() and type(layer_config["size"]) == str:
layer_config["size"] = inputs_size[layer_config["size"]]
if layer_config["type"] == "BatchNorm1d":
self.layers.append(nn.BatchNorm1d(layer_config["size"]))
if layer_config["type"] == "linear":
input_size = self.config["layers"][i-1]["size"]
if "concat" in layer_config.keys():
for input in layer_config["concat"]:
input_size += inputs_size[input]
self.layers.append(nn.Linear(input_size, layer_config["size"]))
if layer_config["type"] == "conv2d":
in_channels = self.config["layers"][i]["in_channels"]
out_channels = self.config["layers"][i]["out_channels"]
kernel_size = tuple(self.config["layers"][i]["kernel_size"])
stride = tuple(self.config["layers"][i]["stride"])
padding = tuple(self.config["layers"][i]["padding"])
# size = ((in_channels - kernel_size + 2*padding)/stride) + 1
self.layers.append(nn.Conv2d(
in_channels, out_channels, kernel_size, stride=stride, padding=padding))
if layer_config["type"] == "rnn":
input_size = self.config["layers"][i-1]["size"]
hidden_size = self.config["layers"][i]["hidden_size"]
num_layers = self.config["layers"][i]["num_layers"]
nonlinearity = self.config["layers"][i]["nonlinearity"]
self.config["layers"][i]["size"] = hidden_size
self.layers.append(nn.RNN(input_size=input_size, hidden_size=hidden_size,
num_layers=num_layers, nonlinearity=nonlinearity,
batch_first=True))
if layer_config["type"] == "maxpool2d":
kernel_size = self.config["layers"][i]["kernel_size"]
stride = self.config["layers"][i]["stride"]
self.layers.append(nn.MaxPool2d(kernel_size, stride))
if layer_config["type"] == "softmax":
self.layers.append(nn.Softmax(dim=1))
if layer_config["type"] == "clamp": # TODO
self.layers.append(torch.clamp(min=1))
self.reset_parameters()
def reset_parameters(self):
""" Randomly itinialize the weights of the linear layers. """
for i, layer_config in enumerate(self.config["layers"]):
if layer_config["type"] == "linear":
self.layers[i].weight.data.uniform_(
*hidden_init(self.layers[i]))
def forward(self, inputs):
""" Process all the sequential operations that define the current model.
There can be concatenations after the main operations of each layer.
There can be auxiliar operations after the main operation.
Args:
inputs (dict): Current state and action. """
x = inputs["state"]
for i, layer_config in enumerate(self.config["layers"]):
# Prior concatenations
if "concat" in layer_config.keys():
for input in layer_config["concat"]:
x = torch.cat((x, inputs[input]), dim=1)
# Main layers
if layer_config["type"] == "rnn":
x, _ = self.layers[i](x)
else:
x = self.layers[i](x)
# Post processing
if "features" in layer_config.keys():
for feature in layer_config["features"]:
if feature == "unsqueeze":
x = x.unsqueeze(1) # TODO: enter as feature parameter
elif feature == "squeeze":
x = x.squeeze(1) # TODO: enter as feature parameter
elif feature == "leaky_relu":
x = F.leaky_relu(x)
elif feature == "relu":
x = F.relu(x)
elif feature == "tanh":
x = torch.tanh(x)
elif feature == "sigmoid":
x = torch.sigmoid(x)
elif feature == "flatten":
x = torch.flatten(x, 1)
if "clip" in layer_config.keys():
x = torch.clip(
x, layer_config["clip"][0], layer_config["clip"][1])
return x
```
#### File: brain/util/prioritized_replay_buffer.py
```python
from collections import namedtuple, deque
import numpy as np
import random
import torch
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class PrioritizedReplayBuffer:
"""Fixed-size buffer to store experience tuples."""
def __init__(self, action_size, buffer_size, batch_size, seed, PRIORITIZED_ER_a):
"""Initialize the parameters.
Args:
action_size (int): Dimension of the actions.
buffer_size (int): Maximum size of buffer.
batch_size (int): Size of each training batch.
"""
self.action_size = action_size
self.memory = deque(maxlen=buffer_size)
self.batch_size = batch_size
self.experience = namedtuple("Experience", field_names=[
"state", "action", "reward", "next_state", "done", "td"])
self.seed = random.seed(seed)
self.PRIORITIZED_ER_a = PRIORITIZED_ER_a
def add(self, state, action, reward, next_state, done, td):
"""Add a new experience to memory.
Args:
states (np.array): Current observations on the environment.
actions (np.array): Action already selected by the agent given State.
rewards (np.array): Reward received from the Environment when taken the action.
next_states (np.array): Next observations on the environment.
dones (list of bools): Wether episode has finished in this step or not. """
e = self.experience(state, action, reward, next_state, done, td)
self.memory.append(e)
def sample(self):
"""Randomly sample a batch of experiences from memory.
Returns:
states (np.array): Current observations on the environment.
actions (np.array): Action already selected by the agent given State.
rewards (np.array): Reward received from the Environment when taken the action.
next_states (np.array): Next observations on the environment.
dones (list of bools): Wether episode has finished in this step or not. """
probs = np.array([abs(e.td)
for e in self.memory if e is not None], dtype=np.float)
probs = probs**self.PRIORITIZED_ER_a / \
sum(probs**self.PRIORITIZED_ER_a)
#probs = np.ones(len(self.memory))/len(self.memory)
chosen_indexes = random.choices(
range(len(self.memory)), k=self.batch_size, weights=probs)
experiences = [self.memory[i] for i in chosen_indexes]
probs = probs[chosen_indexes]
experiences = random.sample(self.memory, k=self.batch_size)
probs = torch.from_numpy(probs).float().to(device)
states = torch.from_numpy(
np.vstack([e.state for e in experiences if e is not None])).float().to(device)
actions = torch.from_numpy(
np.vstack([e.action for e in experiences if e is not None])).long().to(device)
rewards = torch.from_numpy(
np.vstack([e.reward for e in experiences if e is not None])).float().to(device)
next_states = torch.from_numpy(np.vstack(
[e.next_state for e in experiences if e is not None])).float().to(device)
dones = torch.from_numpy(np.vstack(
[e.done for e in experiences if e is not None]).astype(np.uint8)).float().to(device)
return (states, actions, rewards, next_states, dones, probs)
def __len__(self):
"""Return the current size of internal memory."""
return len(self.memory)
```
#### File: brainrl/management/test_bench.py
```python
import json
import os
import copy
import shutil
from typing import NewType
import optuna
from brainrl.management.experience import Experience
def set_value_in_dict_with_path(dic, keys, value):
for key in keys[:-1]:
if type(dic) == dict:
dic = dic.setdefault(key, {})
elif type(dic) == list:
dic = dic[key]
dic[keys[-1]] = value
class TestBench(object):
"""
Class to run batches of experiments following different settings.
"""
def __init__(self, config_folder_path, data_folder_path):
"""
Define initial configurations.
"""
self.config_path = config_folder_path
with open(os.path.join(self.config_path,"management",'config.json'), 'r') as j:
self.base_config = json.load(j)
# self.base_config = self.config
self.log_path = os.path.join(data_folder_path,"runs", self.base_config["id"])
# self.configs_paths = {"envs" : os.path.join(os.path.dirname(__file__),"..","environments","configs")}
# self.configs_paths["brain"] = os.path.join(os.path.dirname(__file__),"..","brain","configs")
# self.configs_paths["schedule"] = os.path.join(os.path.dirname(__file__),"schedule_configs")
# self.configs_paths["predefined_agents"] = os.path.join(os.path.dirname(__file__),"..","brain","rl_agents","predefined_agents")
# self.configs_paths["predefined_models"] = os.path.join(os.path.dirname(__file__),"..","brain","rl_agents","predefined_models")
# self.base_config = {}
# with open(os.path.join(self.configs_paths["envs"],'{}.json'.format(self.config["base_configs"]["envs"])), 'r') as j:
# self.base_config["envs"] = json.load(j)
# with open(os.path.join(self.configs_paths["brain"],'{}.json'.format(self.config["base_configs"]["brain"])), 'r') as j:
# self.base_config["brain"] = json.load(j)
# with open(os.path.join(self.configs_paths["schedule"],'{}.json'.format(self.config["base_configs"]["schedule"])), 'r') as j:
# self.base_config["schedule"] = json.load(j)
self.expandJsons(self.base_config)
def expandJsons(self, nested_dict, prepath=()):
if type(nested_dict) == list:
for i,v in enumerate(nested_dict):
path = prepath + (i,)
self.expandJsons(v, path) # recursive call
elif type(nested_dict) == dict:
for k, v in nested_dict.items():
path = prepath + (k,)
if type(v) == str and len(v) >= 5 and v[-5:] == ".json": # found json
with open(os.path.join(self.config_path,v), 'r') as j:
set_value_in_dict_with_path(self.base_config, path, json.load(j))
v = nested_dict[k]
if hasattr(v, 'items') or type(v) == list: # v is a dict or list
self.expandJsons(v, path) # recursive call
def experiences(self, max_iterations=999999999999, n_trials=15):
""" Run all experiments in sequence, changing the alterations from the prior base configuration. """
i = 0
for experiment_config in self.base_config["experiments"]:
def objective(trial):
for variable_name, variable_params in self.test_variables.items():
suggested_values = variable_params["suggested_values"]
x = trial.suggest_float(variable_name, suggested_values[0], suggested_values[1])
set_value_in_dict_with_path(current_config, variable_params["path"], x)
exp = Experience(current_config, current_log_path)
performance = exp.loop(max_iterations=max_iterations)
exp.finish()
return performance
current_log_path = os.path.join(self.log_path, str(i))
if os.path.isdir(current_log_path):
shutil.rmtree(current_log_path)
os.makedirs(current_log_path)
current_config = copy.deepcopy(self.base_config["base_configs"])
for field, value in experiment_config["config_alterations"].items():
current_config[field] = value
with open(os.path.join(current_log_path,"config.json"), 'w') as j:
json.dump(current_config, j)
self.test_variables = {}
for stack in experiment_config["values"]:
for variable, suggested_values in stack["values"].items():
self.test_variables[stack["prefix"] + variable] = {
"current_value" : None,
"path" : stack["path"] + [variable],
"suggested_values" : suggested_values
}
study = optuna.create_study(direction='minimize')
study.optimize(objective, n_trials=15)
study.best_params # E.g. {'x': 2.002108042}
print(study.best_params)
i += 1
``` |
{
"source": "JoseAndresMR/jamrepo",
"score": 2
} |
#### File: scripts/Agent/Agent_NAI.py
```python
import sys
import rospy
import std_msgs.msg
import time
import math
import numpy as np
import tf
# import rvo2
import rvo23d
import time
# from cv_bridge import CvBridge, CvBridgeError
from uav_abstraction_layer.srv import *
from geometry_msgs.msg import *
from sensor_msgs.msg import *
# import tensorflow as tflow
# from tensorflow.python.tools import inspesct_checkpoint as chkp
class Agent_NAI(object):
def __init__(self,ID):
# Local parameters inizialization from arguments
self.ID = ID
self.smooth_path_mode = 0
self.algorithms_dicc = {}
self.GettingWorldDefinition() # Global ROS parameters inizialization
# self.timer_start = time.time()
# Function to decide which algorithm is used for new velocity depending on parameters
def Guidance(self,desired_speed):
self.NeighborSelector(int(self.algorithms_dicc["orca3"]["N_neighbors_aware"])+1)
self.desired_speed = desired_speed
# print "loop time", time.time() - self.timer_start
# self.timer_start = time.time()
if "simple" in self.algorithms_dicc.keys():
return self.SimpleGuidance()
if "neural_network" in self.algorithms_dicc.keys():
return self.NeuralNetwork()
# elif self.algorithms_dicc[0] == "orca":
# return self.ORCA()
if "orca3" in self.algorithms_dicc.keys():
return self.ORCA3()
# return self.ORCA3_from_node()
# Function to set new velocity using a Neural Network
def NeuralNetwork(self):
# If the solver is a neural network, make some additional initializations
if not hasattr(self,'nn_loaded'):
self.nn_loaded = True
self.session = tflow.Session() # Start a TensorFlow session
self.learning_dataset_def = {"teacher_role" : self.role,
"teacher_algorithm" : "orca3",
"N_neighbors_aware" : self.algorithms_dicc["neural_network"]["N_neighbors_aware"]}
gml_folder_path = "/home/{0}/Libraries/gml".format("joseandresmr")
self.session_path = gml_folder_path + "/Sessions/{0}/{1}/{2}".format(self.learning_dataset_def["teacher_role"],self.learning_dataset_def["teacher_algorithm"],self.learning_dataset_def["N_neighbors_aware"])
# Import the metagraph from specific path. In the future will be better path management
new_saver = tflow.train.import_meta_graph(self.session_path + "/model.meta")
# Restore to the last chechpoint
new_saver.restore(self.session,tflow.train.latest_checkpoint(self.session_path))
# Initialize inputs and outputs from graph
self.graph_inputs = tflow.get_default_graph().get_tensor_by_name("single_input:0")
self.graph_outputs = tflow.get_default_graph().get_tensor_by_name("vel_posttreated:0")
# self.single_vel_logits_tensor = tflow.get_default_graph().get_tensor_by_name("single_vel_logits:0")
# Definition of neural network's inputs and outputs for every role.
# In the future this will be imported from a common place
if self.role == "path":
input_dicc = ['own_vel','goal_pose_rel','others_pos_rel','others_vel']
output_dicc = ["sel_vel"]
elif self.role == "agent_ad":
input_dicc = ['own_vel','goal_pose_rel','goal_vel','distance','others_pos_rel','others_vel']
output_dicc = ["sel_vel"]
elif self.role == "agent_ap":
input_dicc = ['own_vel','goal_pose_rel','goal_vel','others_pos_rel','others_vel']
output_dicc = ["sel_vel"]
# Initialization of pos and vel that will be taken as inputs
inputs = []
main_agent_pos = self.agents_data_list[self.ID-1].position.pose
main_agent_vel = self.agents_data_list[self.ID-1].velocity.twist.linear
# For every input in the dictionary, crate if needed and add it to inputs
for n_input in input_dicc:
# own vel
if n_input == "own_vel":
inputs += [main_agent_vel.x,main_agent_vel.y,main_agent_vel.z]
# own goal
elif n_input == "goal_pose_rel":
goal_lin_rel = self.OperatePoses(self.goal["pose"],main_agent_pos,'-').position
inputs += [goal_lin_rel.x,goal_lin_rel.y,goal_lin_rel.z]
elif n_input == "goal_vel":
inputs += [self.goal["vel"].linear.x,self.goal["vel"].linear.y,self.goal["vel"].linear.z]
elif n_input == "distance":
inputs.append(self.goal["dist"])
elif n_input == "others_pos_rel":
for n_neighbor in range(self.algorithms_dicc["neural_network"]["N_neighbors_aware"]):
if self.near_neighbors_sorted["types"][n_neighbor] == "agent":
n_agent = self.near_neighbors_sorted["ids"][n_neighbor]
other_pos_rel = self.OperatePoses(self.agents_data_list[n_agent].position.pose,main_agent_pos,'-').position
inputs += [other_pos_rel.x,other_pos_rel.y,other_pos_rel.z]
elif self.near_neighbors_sorted["types"][n_neighbor] == "obs":
n_obs = self.near_neighbors_sorted["ids"][n_neighbor]
obs_pose = self.obs_pose_list[n_obs]
other_pos_rel = self.OperatePoses(self.PoseFromArray(obs_pose),main_agent_pos,'-').position
inputs += [other_pos_rel.x,other_pos_rel.y,other_pos_rel.z]
elif n_input == "others_vel":
for n_neighbor in range(self.algorithms_dicc["neural_network"]["N_neighbors_aware"]):
if self.near_neighbors_sorted["types"][n_neighbor] == "agent":
n_agent = self.near_neighbors_sorted["ids"][n_neighbor]
other_vel_lin = self.agents_data_list[n_agent].velocity.twist.linear
inputs += [other_vel_lin.x,other_vel_lin.y,other_vel_lin.z]
elif self.near_neighbors_sorted["types"][n_neighbor] == "obs":
inputs += [0,0,0]
# Reshape the inputs to a single row
inputs_trans = np.asarray(inputs)
inputs_trans = inputs_trans.reshape((1, inputs_trans.shape[0]))
# Run session once to predict new selected velocity
selected_velocity = self.session.run(self.graph_outputs, feed_dict={self.graph_inputs:inputs_trans})
# Depict the output into the defined variables
output_index = 0
for n_output in output_dicc:
if n_output == "sel_vel":
selected_velocity = selected_velocity[0][output_index:output_index+3]
# Construct the twist
new_velocity_twist = Twist(Vector3(selected_velocity[0],selected_velocity[1],selected_velocity[2]),Vector3(0,0,0))
output_index += 3
# print("nn",new_velocity_twist)
# self.ORCA3()
print(new_velocity_twist)
return new_velocity_twist
# Function to set velocity using ORCA on 3D
def ORCA3(self):
self.algorithms_dicc["orca3"]["N_neighbors_aware"] = int(self.algorithms_dicc["orca3"]["N_neighbors_aware"])
params_dicc = self.algorithms_dicc["orca3"]
# Give value to orca algorithm parameters
timeStep = params_dicc["timeStep"] # 1/60. float The time step of the simulation. Must be positive.
neighborDist = params_dicc["neighborDist"] # 1.5 float The maximal distance (center point to center point) to other agents the agent takes into account in the navigation
maxNeighbors = params_dicc["N_neighbors_aware"] # 5 size_t The maximal number of other agents the agent takes into account in the navigation
timeHorizon = params_dicc["timeHorizon"] # 2.5 float The minimal amount of time for which the agent's velocities that are computed by the simulation are safe with respect to other agents.
agent_radius = params_dicc["agent_radius"] # 2 float The radius of the agent. Must be non-negative
maxSpeed = params_dicc["maxSpeed"] # 0.4 float The maximum speed of the agent. Must be non-negative.
velocity = (1, 1, 1)
obs_radius = 0.5
# Create an object of orca3 solver class and give the above defined parameters
sim = rvo23d.PyRVOSimulator(params_dicc["timeStep"], params_dicc["neighborDist"], params_dicc["N_neighbors_aware"], params_dicc["timeHorizon"], params_dicc["agent_radius"], params_dicc["maxSpeed"], velocity)
# Select nearest Agents and Neighbors
orca_agent_list = []
prefered_velocity = self.SimpleGuidance() # Select a velocity directly to goal as if there weren't exist neighbors
# Add to orca3 and to own list every agent created by own params
position_array = self.ArrayFromPose(self.agents_data_list[self.ID-1].position.pose)[0]
velocity_array = self.ArrayFromTwist(self.agents_data_list[self.ID-1].velocity.twist)[0]
prefered_velocity_array = self.ArrayFromTwist(prefered_velocity)[0]
orca_agent_list = [sim.addAgent((position_array[0],position_array[1],position_array[2]),
params_dicc["neighborDist"], params_dicc["N_neighbors_aware"], params_dicc["timeHorizon"], params_dicc["agent_radius"], params_dicc["maxSpeed"], (velocity_array[0],velocity_array[1],velocity_array[2]))]
# Set the prefered velocity of own Agent as decided avobe
sim.setAgentPrefVelocity(orca_agent_list[0],(prefered_velocity_array[0],prefered_velocity_array[1],prefered_velocity_array[2]))
for n_neighbor in range(len(self.near_neighbors_sorted["ids"])):
if self.near_neighbors_sorted["types"][n_neighbor] == "agent":
n_agent = self.near_neighbors_sorted["ids"][n_neighbor]
position_array = self.ArrayFromPose(self.agents_data_list[n_agent].position.pose)[0]
velocity_array = self.ArrayFromTwist(self.agents_data_list[n_agent].velocity.twist)[0]
orca_agent_list.append(sim.addAgent((position_array[0],position_array[1],position_array[2]),
params_dicc["neighborDist"], params_dicc["N_neighbors_aware"], params_dicc["timeHorizon"], params_dicc["agent_radius"], params_dicc["maxSpeed"],
(velocity_array[0],velocity_array[1],velocity_array[2])))
sim.setAgentPrefVelocity(orca_agent_list[-1],(velocity_array[0],velocity_array[1],velocity_array[2]))
# Add to orca3 and to own list every obstacle created by own params
elif self.near_neighbors_sorted["types"][n_neighbor] == "obs":
n_obs = self.near_neighbors_sorted["ids"][n_neighbor]
obs_pose = self.obs_pose_list[n_obs]
orca_agent_list.append(sim.addAgent((obs_pose[0][0],obs_pose[0][1],obs_pose[0][2]),
params_dicc["neighborDist"], params_dicc["N_neighbors_aware"], params_dicc["timeHorizon"], obs_radius, 0.0, (0, 0, 0)))
sim.setAgentPrefVelocity(orca_agent_list[-1],(0,0,0))
sim.doStep() # Perform a step of orca3
selected_velocity = sim.getAgentVelocity(orca_agent_list[0]) # Extract own velocity decided by orca3
# Become that velocity in a twist
new_velocity_twist = Twist(Vector3(0,0,0),Vector3(0,0,0))
new_velocity_twist.linear.x = selected_velocity[0]
new_velocity_twist.linear.y = selected_velocity[1]
new_velocity_twist.linear.z = selected_velocity[2]
# If head use selected, decide it by direct by simple algorithm. In future, put lower threshold.
if self.heading_use == True:
new_velocity_twist.angular.z = prefered_velocity.angular.z
return new_velocity_twist
def ORCA3_from_node(self):
if "agent_created" not in self.algorithms_dicc["orca3"].keys():
# rospy.wait_for_service('orca/add_agents')
# try:
# add_agent_prox = rospy.ServiceProxy('orca/add_agents', MESSAGEEEEEEEEEEEEEE )
# # model_name = "{0}_{1}".format(self.agent_models[n_agent],n_agent+1)
# add_agent_prox(model_name)
# time.sleep(0.1)
# except rospy.ServiceException, e:
# print "Service call failed: %s"%e
# print "error in add orca agent"
self.orca_optimal_velocity = Twist()
self.algorithms_dicc["orca3"]["agent_created"] = True
self.algorithms_dicc["orca3"]["prefered_velocity_pub"] = rospy.Publisher('/orca/agent_{}/prefered_velocity'.format(self.ID), TwistStamped, queue_size = 1)
def handle_orca_optimal_velocity(data):
self.orca_optimal_velocity = data.twist
rospy.Subscriber('/orca/agent_{}/optimal_velocity'.format(self.ID), TwistStamped, handle_orca_optimal_velocity)
prefered_velocity = self.SimpleGuidance()
prefered_velocity_stamped = TwistStamped()
prefered_velocity_stamped.twist = prefered_velocity
self.algorithms_dicc["orca3"]["prefered_velocity_pub"].publish(prefered_velocity_stamped)
time.sleep(0.1)
return self.orca_optimal_velocity
# Function to set velocity directly to goal
def SimpleGuidance(self):
if self.smooth_path_mode != 0:
return self.agents_data_list[self.ID-1].smooth_velocity
# Set algorithm params
desired_speed_at_goal = 0
aprox_distance = 3
# Create a vector from actual position to goal position
relative_distance = np.asarray([self.goal["pose"].position.x-self.agents_data_list[self.ID-1].position.pose.position.x,\
self.goal["pose"].position.y-self.agents_data_list[self.ID-1].position.pose.position.y,\
self.goal["pose"].position.z-self.agents_data_list[self.ID-1].position.pose.position.z])
distance_norm = np.linalg.norm(relative_distance) # Calculate its norm
# If at the distance shorter than aproximation distance, reduce the velocity module
if distance_norm < aprox_distance:
self.desired_speed = desired_speed_at_goal - (self.desired_speed - desired_speed_at_goal)\
+ ((self.desired_speed - desired_speed_at_goal) *2) / (1 + math.exp(-5*distance_norm/aprox_distance))
# Multiply each axis by the velocity module
relative_WP_linear=Vector3(relative_distance[0]/distance_norm*self.desired_speed,\
relative_distance[1]/distance_norm*self.desired_speed,\
relative_distance[2]/distance_norm*self.desired_speed)
# Transform it in a pose position and calculate its orientation in Euler angles
relative_WP_pose_degrees=Pose(relative_WP_linear,\
Vector3(np.arctan2(relative_WP_linear.z,relative_WP_linear.y),\
np.arctan2(relative_WP_linear.x,relative_WP_linear.z),\
np.arctan2(relative_WP_linear.y,relative_WP_linear.x))) #### COMPROBAR ANGULOS
# Transform the orientation from Eurler angles to quaternions
orientation_list = [self.agents_data_list[self.ID-1].position.pose.orientation.x, self.agents_data_list[self.ID-1].position.pose.orientation.y, self.agents_data_list[self.ID-1].position.pose.orientation.z, self.agents_data_list[self.ID-1].position.pose.orientation.w]
euler = tf.transformations.euler_from_quaternion(orientation_list)
# Create the velocity twist with calculated data
new_velocity_twist = Twist(relative_WP_pose_degrees.position,\
Vector3(0,\
0,\
relative_WP_pose_degrees.orientation.z-euler[2]))
# Thresholds imposition
# new_velocity_twist.linear.x = self.UpperLowerSaturation(new_velocity_twist.linear.x,1.5)
# new_velocity_twist.linear.y = self.UpperLowerSaturation(new_velocity_twist.linear.y,1.5)
# new_velocity_twist.angular.z = self.UpperLowerSaturation(new_velocity_twist.angular.z,0.5)
return new_velocity_twist
# Function to set hovering velocity equal to zeros
def Hover(self):
new_velocity_twist = Twist(Vector3(0,0,0),Vector3(0,0,0))
return new_velocity_twist
# Function to saturate a value
def UpperLowerSaturation(self,value,threshold):
if value > threshold:
value = threshold
elif value < -threshold:
value = -threshold
return value
def NeighborSelector(self,N_neighbors_aware):
agent_distances = []
for n_agent in range(self.N_agents):
if n_agent != self.ID-1:
agent_distances.append(self.agents_data_list[n_agent].distance_rel2main)
else:
agent_distances.append(0)
obs_distances = self.agents_data_list[self.ID-1].obs_distances_rel2main
all_distances = agent_distances + obs_distances
self.near_neighbors_sorted = {"distances" : sorted(all_distances)[1:N_neighbors_aware]}
ids_list = []
types_list = []
for neigh in list(np.argsort(all_distances))[1:N_neighbors_aware]:
if neigh < self.N_agents:
neigh_type = "agent"
neith_id = neigh
else:
neigh_type = "obs"
neith_id = neigh - self.N_agents
types_list.append(neigh_type)
ids_list.append(neith_id)
self.near_neighbors_sorted["ids"] = ids_list
self.near_neighbors_sorted["types"] = types_list
return self.near_neighbors_sorted
def PoseFromArray(self,Array):
quat = tf.transformations.quaternion_from_euler(Array[1][0],Array[1][1],Array[1][2])
return Pose(Point(Array[0][0],Array[0][1],Array[0][2]),Quaternion(quat[0],quat[1],quat[2],quat[3]))
def ArrayFromPose(self,pose):
euler = [0,0,0]
# euler = tf.transformations.euler_from_quaternion(pose.orientation.x,pose.orientation.y,pose.orientation.z,pose.orienation.w)
return [[pose.position.x,pose.position.y,pose.position.z],[euler[0],euler[1],euler[2]]]
def TwistFromArray(self,Array):
return Twist(Vector3(Array[0][0],Array[0][1],Array[0][2]),Vector3(Array[1][0],Array[1][1],Array[1][2]))
def ArrayFromTwist(self,twist):
return [[twist.linear.x,twist.linear.y,twist.linear.z],[twist.angular.x,twist.angular.y,twist.angular.z]]
def OperatePoses(self,pose1,pose2,op = '+'):
if op == '+':
aux = 1
elif op == '-':
aux = -1
result_pose = Pose()
result_pose.position.x = pose1.position.x+aux*pose2.position.x
result_pose.position.y = pose1.position.y+aux*pose2.position.y
result_pose.position.z = pose1.position.z+aux*pose2.position.z
result_pose.orientation.x = pose1.orientation.x+aux*pose2.orientation.x
result_pose.orientation.y = pose1.orientation.y+aux*pose2.orientation.y
result_pose.orientation.z = pose1.orientation.z+aux*pose2.orientation.z
result_pose.orientation.w = pose1.orientation.w+aux*pose2.orientation.w
return result_pose
def algorithm_control(self, name, action, params_dicc):
if action == "delete":
self.algorithms_dicc.pop(name)
elif action == "set":
if name in self.algorithms_dicc.keys():
self.algorithms_dicc[name].update(params_dicc)
else:
self.algorithms_dicc[name] = params_dicc
# Function to get Global ROS parameters
def GettingWorldDefinition(self):
self.hyperparameters = rospy.get_param('magna_hyperparameters')
self.mission_name = self.hyperparameters['mission']
self.submission_name = self.hyperparameters['submission']
self.world_name = self.hyperparameters['world']
self.subworld_name = self.hyperparameters['subworld']
self.n_simulation = self.hyperparameters['n_simulation']
self.N_agents = self.hyperparameters['N_agents']
self.N_obs = self.hyperparameters['N_obs']
self.n_dataset = self.hyperparameters['n_dataset']
self.obs_pose_list = self.hyperparameters['obs_pose_list']
self.heading_use = self.hyperparameters['heading_use']
```
#### File: scripts/Various/ADSB.py
```python
import numpy as np
from std_msgs.msg import String,Header
from geometry_msgs.msg import TwistStamped,Twist,Vector3,PoseStamped,Pose,Point,Quaternion
class ADSB(object):
def __init__(self,ICAO = "40621D",with_ref = True,pos_ref = [0,0]):
self.ICAO = ICAO
self.with_ref = with_ref
self.pos_ref = pos_ref
self.version = None
self.nic_s = None
self.nic_a = None
self.nic_b = None
self.nic_c = None
self.version = 2
self.nic_s = 0
self.nic_a = 0
self.nic_b = 0
self.nic_c = 0
def incoming_msg(self,msg,msg_aux = None,even_pos = 0):
if pms.adsb.icao(msg) == self.ICAO and pms.crc(msg, encode=False) == "000000000000000000000000":
h = Header()
typecode = pms.adsb.typecode(msg)
print typecode
### Aircraft Identificaction, TC = 1 - 4
if typecode in range(1,4):
self.callsign = pms.adsb.callsign(msg)
TC = "callsign"
extracted_info = [self.callsign]
### Surface position, TC = 5 - 8
if typecode in range(5,8):
if self.version == 1:
if self.nic_s != None:
self.nic = pms.adsb.nic_v1(msg,self.nic_s)
elif self.version == 2:
if self.nic_a != None and self.nic_b != None:
self.nic = pms.adsb.nic_v2(msg,self.nic_a,self.nic_b,self.nic_c)
TC = "surface_position"
extracted_info = [self.nic]
### Airborne position (w/ Baro Altitude), TC = 9 - 18
if typecode in range(9,18):
if self.version == 1:
if self.nic_s != None:
self.nic = pms.adsb.nic_v1(msg,self.nic_s)
elif self.version == 2:
self.nic_b = pms.adsb.nic_b(msg)
if self.nic_a != None and self.nic_b != None:
self.nic = pms.adsb.nic_v2(msg,self.nic_a,self.nic_b,self.nic_c)
if self.with_ref == True:
position = pms.adsb.position_with_ref(msg, self.pos_ref[0],self.pos_ref[1])
airborne_position = pms.adsb.airborne_position_with_ref(msg, self.pos_ref[0],self.pos_ref[1])
surface_position = pms.adsb.surface_position_with_ref(msg, self.pos_ref[0],self.pos_ref[1])
altitude = pms.adsb.altitude(msg)
if self.with_ref == False:
if even_pos == 0:
msg_even = msg
msg_odd = msg_aux
elif even_pos == 1:
msg_even = msg_aux
msg_odd = msg
position = pms.adsb.position(msg_even, msg_odd, t_even, t_odd, lat_ref=None, lon_ref=None)
airborne_position = pms.adsb.airborne_position(msg_even, msg_odd, t_even, t_odd)
surface_position = pms.adsb.surface_position(msg_even, msg_odd, t_even, t_odd, self.pos_ref[0],self.pos_ref[1])
altitude = pms.adsb.altitude(msg_even)
self.position = PoseStamped(h,Pose(Point(position[0],position[1],altitude),Quaternion(0,0,0,0)))
self.airborne_position = PoseStamped(h,Pose(Point(airborne_position[0],airborne_position[1],altitude),Quaternion(0,0,0,0)))
self.surface_position = PoseStamped(h,Pose(Point(surface_position[0],surface_position[1],altitude),Quaternion(0,0,0,0)))
TC = "airborne_position_baro_altitude"
extracted_info = [self.position,self.airborne_position,self.surface_position,self.nic]
### Airborne velocities, TC = 19
if typecode == 19:
velocity = pms.adsb.velocity(msg) # handles both surface & airborne messages. return: v(modulo en kt), h (heading en deg), Vr (verticalrate en ft/min)
self.velocity = TwistStamped(h,Twist(Vector3(velocity[0]*np.cos(velocity[1]),velocity[0]*np.sin(velocity[1]),velocity[2]),\
Vector3(0,0,0)))
self.nac_v = pms.adsb.nac_v(msg)
TC = "airborne_velocity"
extracted_info = [self.velocity,self.nac_v]
### Airborne position (w/ GNSS Height), TC = 20 - 22
if typecode in range(20,22):
if self.version == 1:
if self.nic_s != None:
self.nic = pms.adsb.nic_v1(msg,self.nic_s)
elif self.version == 2:
if self.nic_a != None and self.nic_b != None:
self.nic = pms.adsb.nic_v2(msg,self.nic_a,self.nic_b,self.nic_c)
TC = "airborne_position_GNSS_Height"
extracted_info = [self.nic]
### Aircraft status, TC = 28
if typecode == 28:
pass
### Target state and status information, TC = 29
if typecode == 29:
self.sil = pms.adsb.sil(msg,self.version)
self.nac_p = pms.adsb.nac_p(msg)
TC = "status"
extracted_info = [self.sil, self.nac_p]
### Aircraft operation status, TC = 31
if typecode == 31:
self.version = pms.adsb.version(msg)
self.sil = pms.adsb.version(msg)
self.nac_p = pms.adsb.nac_p(msg)
if self.version == 1:
self.nic_s = pms.adsb.nic_s(msg)
elif self.version == 2:
self.nic_a, self.nic_c = pms.adsb.nic_a_and_c(msg)
TC = "status"
extracted_info = [self.sil, self.nac_p]
return TC, extracted_info
``` |
{
"source": "Joseane-Guedes/Sudoku",
"score": 4
} |
#### File: Joseane-Guedes/Sudoku/Sudoku.py
```python
import random
def sudoku():
"""A função sudoku tem como objetivo gerar um sudoku aleatório e completo e armazenar o valor na variável em que foi chamado a função, irá armazenar em apenas uma variável, uma lista com 9 listas contendo os números de 1 a 9 em casa uma, de forma que complete um sudoku, sendo o primeiro índice a primeira linha, o segundo índice a segunda linha e assim por diante"""
n = (1, 2, 3, 4, 5, 6, 7, 8, 9)
c = 0
loop = 0
#cada linha do sudoku
l1 = [0,0,0,0,0,0,0,0,0]
l2 = [0,0,0,0,0,0,0,0,0]
l3 = [0,0,0,0,0,0,0,0,0]
l4 = [0,0,0,0,0,0,0,0,0]
l5 = [0,0,0,0,0,0,0,0,0]
l6 = [0,0,0,0,0,0,0,0,0]
l7 = [0,0,0,0,0,0,0,0,0]
l8 = [0,0,0,0,0,0,0,0,0]
l9 = [0,0,0,0,0,0,0,0,0]
nl = 0
while nl < 10:
#sorteando os numeros de cada linha
for i in range(0, 9): #linha 1
l1[i] = random.choice(n)
veri = i
while l1[veri] in l1[:veri]:
l1[i] = random.choice(n)
#adicionando os dados para comparar nos próximos sorteios
bloco1 = l1[0:3]
bloco2 = l1[3:6]
bloco3 = l1[6:9]
i = 0
loop = 0
while i < 9: #linha 2
l2[i] = random.choice(n)
veri = i
c = 0
if i < 3:
while l2[veri] in l2[:veri] or l2[veri] in bloco1:
l2[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
c = 0
if i >= 3 and i <6:
while l2[veri] in l2[:veri] or l2[veri] in bloco2:
l2[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
c = 0
if i >= 6 and i < 9:
while l2[veri] in l2[:veri] or l2[veri] in bloco3:
l2[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
i = i + 1
if c > 100: #caso entre em loop a linha será reiniciada
c = c - c
i = 0
if loop > 1000:
break
bloco1 = bloco1 + l2[0:3]
bloco2 = bloco2 + l2[3:6]
bloco3 = bloco3 + l2[6:9]
i = 0
loop = 0
while i < 9: #linha 3
l3[i] = random.choice(n)
veri = i
c = 0
if i < 3:
while l3[veri] in l3[:veri] or l3[veri] in bloco1:
l3[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
c = 0
if i >= 3 and i <6:
while l3[veri] in l3[:veri] or l3[veri] in bloco2:
l3[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
c = 0
if i >= 6 and i < 9:
while l3[veri] in l3[:veri] or l3[veri] in bloco3:
l3[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
i = i + 1
if c > 100:
c = c - c
i = 0
if loop > 1000:
break
bloco1 = bloco1 + l3[0:3]
bloco2 = bloco2 + l3[3:6]
bloco3 = bloco3 + l3[6:9]
c1 = [l1[0], l2[0], l3[0]]
c2 = [l1[1], l2[1], l3[1]]
c3 = [l1[2], l2[2], l3[2]]
c4 = [l1[3], l2[3], l3[3]]
c5 = [l1[4], l2[4], l3[4]]
c6 = [l1[5], l2[5], l3[5]]
c7 = [l1[6], l2[6], l3[6]]
c8 = [l1[7], l2[7], l3[7]]
c9 = [l1[8], l2[8], l3[8]]
llista = [c1, c2, c3, c4, c5, c6, c7, c8, c9]
i = 0
loop = 0
while i < 9: #linha 4
l4[i] = random.choice(n)
veri = i
c = 0
if i < 3:
while l4[veri] in l4[:veri] or l4[veri] in llista[veri]:
l4[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
c = 0
if i >= 3 and i <6:
while l4[veri] in l4[:veri] or l4[veri] in llista[veri]:
l4[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
c = 0
if i >= 6 and i < 9:
while l4[veri] in l4[:veri] or l4[veri] in llista[veri]:
l4[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
i = i + 1
if c > 100:
c = c - c
i = 0
if loop > 1000:
break
bloco4 = l4[0:3]
bloco5 = l4[3:6]
bloco6 = l4[6:9]
c1 = c1 + [l4[0]]
c2 = c2 + [l4[1]]
c3 = c3 + [l4[2]]
c4 = c4 + [l4[3]]
c5 = c5 + [l4[4]]
c6 = c6 + [l4[5]]
c7 = c7 + [l4[6]]
c8 = c8 + [l4[7]]
c9 = c9 + [l4[8]]
llista = [c1, c2, c3, c4, c5, c6, c7, c8, c9]
i = 0
loop = 0
while i < 9: #linha 5
l5[i] = random.choice(n)
veri = i
c = 0
if i < 3:
while l5[veri] in l5[:veri] or l5[veri] in bloco4 or l5[veri] in llista[veri]:
l5[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
c = 0
if i >= 3 and i <6:
while l5[veri] in l5[:veri] or l5[veri] in bloco5 or l5[veri] in llista[veri]:
l5[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
c = 0
if i >= 6 and i < 9:
while l5[veri] in l5[:veri] or l5[veri] in bloco6 or l5[veri] in llista[veri]:
l5[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
i = i + 1
if c > 100:
c = c - c
i = 0
if loop > 1000:
break
bloco4 = bloco4 + l5[0:3]
bloco5 = bloco5 + l5[3:6]
bloco6 = bloco6 + l5[6:9]
c1 = c1 + [l5[0]]
c2 = c2 + [l5[1]]
c3 = c3 + [l5[2]]
c4 = c4 + [l5[3]]
c5 = c5 + [l5[4]]
c6 = c6 + [l5[5]]
c7 = c7 + [l5[6]]
c8 = c8 + [l5[7]]
c9 = c9 + [l5[8]]
llista = [c1, c2, c3, c4, c5, c6, c7, c8, c9]
i = 0
loop = 0
while i < 9: #linha 6
l6[i] = random.choice(n)
veri = i
c = 0
if i < 3:
while l6[veri] in l6[:veri] or l6[veri] in bloco4 or l6[veri] in llista[veri]:
l6[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
c = 0
if i >= 3 and i <6:
while l6[veri] in l6[:veri] or l6[veri] in bloco5 or l6[veri] in llista[veri]:
l6[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
c = 0
if i >= 6 and i < 9:
while l6[veri] in l6[:veri] or l6[veri] in bloco6 or l6[veri] in llista[veri]:
l6[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
i = i + 1
if c > 100:
c = c - c
i = 0
if loop > 1000:
break
bloco4 = bloco4 + l6[0:3]
bloco5 = bloco5 + l6[3:6]
bloco6 = bloco6 + l6[6:9]
c1 = c1 + [l6[0]]
c2 = c2 + [l6[1]]
c3 = c3 + [l6[2]]
c4 = c4 + [l6[3]]
c5 = c5 + [l6[4]]
c6 = c6 + [l6[5]]
c7 = c7 + [l6[6]]
c8 = c8 + [l6[7]]
c9 = c9 + [l6[8]]
llista = [c1, c2, c3, c4, c5, c6, c7, c8, c9]
i = 0
loop = 0
while i < 9: #linha 7
l7[i] = random.choice(n)
veri = i
c = 0
if i < 3:
while l7[veri] in l7[:veri] or l7[veri] in llista[veri]:
l7[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
c = 0
if i >= 3 and i <6:
while l7[veri] in l7[:veri] or l7[veri] in llista[veri]:
l7[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
c = 0
if i >= 6 and i < 9:
while l7[veri] in l7[:veri] or l7[veri] in llista[veri]:
l7[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
i = i + 1
if c > 100:
c = c - c
i = 0
if loop > 1000:
break
bloco7 = l7[0:3]
bloco8 = l7[3:6]
bloco9 = l7[6:9]
c1 = c1 + [l7[0]]
c2 = c2 + [l7[1]]
c3 = c3 + [l7[2]]
c4 = c4 + [l7[3]]
c5 = c5 + [l7[4]]
c6 = c6 + [l7[5]]
c7 = c7 + [l7[6]]
c8 = c8 + [l7[7]]
c9 = c9 + [l7[8]]
llista = [c1, c2, c3, c4, c5, c6, c7, c8, c9]
i = 0
loop = 0
while i < 9: #linha 8
l8[i] = random.choice(n)
veri = i
c = 0
if i < 3:
while l8[veri] in l8[:veri] or l8[veri] in bloco7 or l8[veri] in llista[veri]:
l8[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
c = 0
if i >= 3 and i <6:
while l8[veri] in l8[:veri] or l8[veri] in bloco8 or l8[veri] in llista[veri]:
l8[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
c = 0
if i >= 6 and i < 9:
while l8[veri] in l8[:veri] or l8[veri] in bloco9 or l8[veri] in llista[veri]:
l8[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
i = i + 1
if c > 100:
c = c - c
i = 0
if loop > 1000:
break
bloco7 = bloco7 + l8[0:3]
bloco8 = bloco8 + l8[3:6]
bloco9 = bloco9 + l8[6:9]
c1 = c1 + [l8[0]]
c2 = c2 + [l8[1]]
c3 = c3 + [l8[2]]
c4 = c4 + [l8[3]]
c5 = c5 + [l8[4]]
c6 = c6 + [l8[5]]
c7 = c7 + [l8[6]]
c8 = c8 + [l8[7]]
c9 = c9 + [l8[8]]
llista = [c1, c2, c3, c4, c5, c6, c7, c8, c9]
i = 0
loop = 0
while i < 9: #linha 9
l9[i] = random.choice(n)
veri = i
c = 0
if i < 3:
while l9[veri] in l9[:veri] or l9[veri] in bloco7 or l9[veri] in llista[veri]:
l9[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
c = 0
if i >= 3 and i <6:
while l9[veri] in l9[:veri] or l9[veri] in bloco8 or l9[veri] in llista[veri]:
l9[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
c = 0
if i >= 6 and i < 9:
while l9[veri] in l9[:veri] or l9[veri] in bloco9 or l9[veri] in llista[veri]:
l9[i] = random.choice(n)
c = c + 1
loop = loop + 1
if c > 100:
i = 0
break
i = i + 1
if c > 100:
c = c - c
i = 0
if loop > 1000:
break
bloco7 = bloco7 + l9[0:3]
bloco8 = bloco8 + l9[3:6]
bloco9 = bloco9 + l9[6:9]
c1 = c1 + [l9[0]]
c2 = c2 + [l9[1]]
c3 = c3 + [l9[2]]
c4 = c4 + [l9[3]]
c5 = c5 + [l9[4]]
c6 = c6 + [l9[5]]
c7 = c7 + [l9[6]]
c8 = c8 + [l9[7]]
c9 = c9 + [l9[8]]
nl = nl + 1
if loop <= 1000:
break
loop = 0
#sudoku completo
sc = [l1, l2, l3, l4, l5, l6, l7, l8, l9]
return sc
def sudoku_i(sc, dificuldade):
""" A função sudoku_i(sudoku incompleto), tem como objetivo receber o sudoku gerado na primeira função e a dificuldade nesse sudoku, para esconder alguns números, para que assim seja possível jogar
sc: o sudoku completo
dificuldade: 'facil', 'medio', 'dificil'
"""
#Dados que serão exibidos
sl1 = sc[0][:]
sl2 = sc[1][:]
sl3 = sc[2][:]
sl4 = sc[3][:]
sl5 = sc[4][:]
sl6 = sc[5][:]
sl7 = sc[6][:]
sl8 = sc[7][:]
sl9 = sc[8][:]
nlista = [sl1, sl2, sl3, sl4, sl5, sl6, sl7, sl8, sl9]
i = 0
n = (0, 1, 2, 3, 4, 5, 6, 7, 8)
if dificuldade == 'facil':
for conteL in nlista:
for c in range(0, 5):
facil = random.choice(n)
conteL[facil] = ''
nlista[i] = conteL
i = i + 1
if dificuldade == 'medio':
for conteL in nlista:
for c in range(0, 6):
medio = random.choice(n)
conteL[medio] = ''
nlista[i] = conteL
i = i + 1
if dificuldade == 'dificil':
for conteL in nlista:
for c in range(0, 7):
dificil = random.choice(n)
conteL[dificil] = ''
nlista[i] = conteL
i = i + 1
return nlista
def verificar_sudoku(sv):
""" A função verificar_sudoku, tem como objetivo receber um sudoku que o próprio usuário preencheu e analisar se ele está correto e retorna o valor True caso esteja correto e False caso esteja errado
sv: sudoku a ser verificado
"""
#todas as linhas
l1 = sv[0]
l2 = sv[1]
l3 = sv[2]
l4 = sv[3]
l5 = sv[4]
l6 = sv[5]
l7 = sv[6]
l8 = sv[7]
l9 = sv[8]
#todos os blocos
b1 = [l1[0], l1[1], l1[2], l2[0], l2[1], l2[2], l3[0], l3[1], l3[2]]
b2 = [l1[3], l1[4], l1[5], l2[3], l2[4], l2[5], l3[3], l3[4], l3[5]]
b3 = [l1[6], l1[7], l1[8], l2[6], l2[7], l2[8], l3[6], l3[7], l3[8]]
b4 = [l4[0], l4[1], l4[2], l5[0], l5[1], l5[2], l6[0], l6[1], l6[2]]
b5 = [l4[3], l4[4], l4[5], l5[3], l5[4], l5[5], l6[3], l6[4], l6[5]]
b6 = [l4[6], l4[7], l4[8], l5[6], l5[7], l5[8], l6[6], l6[7], l6[8]]
b7 = [l7[0], l7[1], l7[2], l8[0], l8[1], l8[2], l9[0], l9[1], l9[2]]
b8 = [l7[3], l7[4], l7[5], l8[3], l8[4], l8[5], l9[3], l9[4], l9[5]]
b9 = [l7[6], l7[7], l7[8], l8[6], l8[7], l8[8], l9[6], l9[7], l9[8]]
#todas as colunas
c1 = [l1[0], l2[0], l3[0], l4[0], l5[0], l6[0], l7[0], l8[0], l9[0]]
c2 = [l1[1], l2[1], l3[1], l4[1], l5[1], l6[1], l7[1], l8[1], l9[1]]
c3 = [l1[2], l2[2], l3[2], l4[2], l5[2], l6[2], l7[2], l8[2], l9[2]]
c4 = [l1[3], l2[3], l3[3], l4[3], l5[3], l6[3], l7[3], l8[3], l9[3]]
c5 = [l1[4], l2[4], l3[4], l4[4], l5[4], l6[4], l7[4], l8[4], l9[4]]
c6 = [l1[5], l2[5], l3[5], l4[5], l5[5], l6[5], l7[5], l8[5], l9[5]]
c7 = [l1[6], l2[6], l3[6], l4[6], l5[6], l6[6], l7[6], l8[6], l9[6]]
c8 = [l1[7], l2[7], l3[7], l4[7], l5[7], l6[7], l7[7], l8[7], l9[7]]
c9 = [l1[8], l2[8], l3[8], l4[8], l5[8], l6[8], l7[8], l8[8], l9[8]]
#ll = lista com todas as linhas
lista_linhas = l1, l2, l3, l4, l5, l6, l7, l8, l9
#lc = lista com todas as colunas
lista_colunas = c1, c2, c3, c4, c5, c6, c7, c8, c9
#lb = lista com todos os blocos
lista_blocos = b1, b2, b3, b4, b5, b6, b7, b8, b9
ctd = 0
ctdb = 0
cb = 0
correto = True
for linha in lista_linhas:
for num in linha:
if lista_colunas[ctd].count(num) > 1 or lista_blocos[ctdb].count(num) > 1 or linha.count(num) > 1:
correto = False
ctd += 1
else:
ctd += 1
if ctd == 3:
ctdb += 1
if ctd == 6:
ctdb += 1
if ctd == 8:
cb += 1
ctd -= ctd
ctdb = 0
if cb > 3:
ctdb = 3
if cb > 5:
ctdb = 6
return correto
``` |
{
"source": "joseangel-sc/conducto",
"score": 2
} |
#### File: conducto/api/pipeline.py
```python
import boto3
from .. import api
from ..shared import constants, types as t, request_utils
from . import api_utils
class Pipeline:
def __init__(self):
self.config = api.Config()
self.url = self.config.get_url()
############################################################
# public methods
############################################################
def create(
self, token: t.Token, command: str, cloud: bool, **kwargs
) -> t.PipelineId:
from ..pipeline import Node
headers = api_utils.get_auth_headers(token)
in_data = {"command": command, "cloud": cloud, **kwargs}
# set the executable
if "executable" not in kwargs:
# conducto.internal has limited availability
import conducto.internal.host_detection as hostdet
in_data["executable"] = hostdet.host_exec()
if "tags" in kwargs:
in_data["tags"] = Node.sanitize_tags(in_data["tags"])
response = request_utils.post(
self.url + "/program/program", headers=headers, data=in_data
)
out_data = api_utils.get_data(response)
return t.PipelineId(out_data["pipeline_id"])
def archive(self, token: t.Token, pipeline_id: t.PipelineId):
headers = api_utils.get_auth_headers(token)
url = f"{self.url}/program/program/{pipeline_id}"
response = request_utils.delete(url, headers=headers)
api_utils.get_data(response)
def get(self, token: t.Token, pipeline_id: t.PipelineId) -> dict:
headers = api_utils.get_auth_headers(token)
response = request_utils.get(
self.url + f"/program/program/{pipeline_id}", headers=headers
)
return api_utils.get_data(response)
def list(self, token: t.Token) -> list:
headers = api_utils.get_auth_headers(token)
response = request_utils.get(
self.url + "/program/program/list", headers=headers
)
return api_utils.get_data(response)
def perms(self, token: t.Token, pipeline_id: t.PipelineId) -> set:
headers = api_utils.get_auth_headers(token)
response = request_utils.get(
self.url + f"/program/program/{pipeline_id}/perms", headers=headers
)
data = api_utils.get_data(response)
return data["perms"] if "perms" in data else []
def update(
self, token: t.Token, pipeline_id: t.PipelineId, params: dict, *args, **kwargs
):
headers = api_utils.get_auth_headers(token)
keys = args if args else params.keys()
if len(keys) == 0:
raise Exception("No params to update on pipeline!")
data = {k: params[k] for k in keys}
if "tags" in data:
from ..pipeline import Node
data["tags"] = Node.sanitize_tags(data["tags"])
if "extra_secret" in kwargs:
headers["Service-Secret"] = kwargs["extra_secret"]
response = request_utils.put(
self.url + f"/program/program/{pipeline_id}", headers=headers, data=data
)
api_utils.get_data(response)
def save_serialization(
self, token: t.Token, pipeline_id: t.PipelineId, serialization: str
):
pipeline = self.get(token, pipeline_id)
put_serialization_s3(token, pipeline["program_path"], serialization)
def touch(self, token: t.Token, pipeline_id: t.PipelineId):
headers = api_utils.get_auth_headers(token)
response = request_utils.put(
self.url + f"/program/program/{pipeline_id}/touch", headers=headers
)
api_utils.get_data(response)
def sleep_standby(self, token: t.Token, pipeline_id: t.PipelineId):
pipeline = self.get(token, pipeline_id)
pl = constants.PipelineLifecycle
if pipeline["status"] == pl.STANDBY_CLOUD:
self.update(token, pipeline_id, {"status": pl.SLEEPING_CLOUD}, "status")
else:
# TODO: think about error
pass
def get_history(self, token: t.Token, params: dict):
headers = api_utils.get_auth_headers(token)
response = request_utils.get(
self.url + "/program/program/history", headers=headers, params=params
)
return api_utils.get_data(response)
def _get_s3_split(path):
s3Prefix = "s3://"
bucketKey = path[len(s3Prefix) :]
bucket, key = bucketKey.split("/", 1)
return bucket, key
def put_serialization_s3(token, s3path, serialization):
bucket, key = _get_s3_split(s3path)
# log.log("S3 bucket={}, key={}".format(bucket, key))
auth = api.Auth()
token = auth.get_refreshed_token(token)
creds = auth.get_credentials(token)
session = boto3.Session(
aws_access_key_id=creds["AccessKeyId"],
aws_secret_access_key=creds["SecretKey"],
aws_session_token=creds["SessionToken"],
)
s3 = session.client("s3")
s3.put_object(Body=serialization.encode("utf-8"), Bucket=bucket, Key=key)
AsyncPipeline = api_utils.async_helper(Pipeline)
```
#### File: conducto/internal/build.py
```python
import os
import time
import subprocess
import functools
import pipes
import shutil
import socket
import sys
from http import HTTPStatus as hs
from conducto import api
from conducto.shared import client_utils, constants, log, types as t
import conducto.internal.host_detection as hostdet
@functools.lru_cache(None)
def docker_desktop_23():
# Docker Desktop
try:
kwargs = dict(check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Docker Desktop 2.2.x
lsdrives = "docker run --rm -v /:/mnt/external alpine ls /mnt/external/host_mnt"
proc = subprocess.run(lsdrives, shell=True, **kwargs)
return False
except subprocess.CalledProcessError:
return True
@functools.lru_cache(None)
def docker_available_drives():
import string
if hostdet.is_wsl():
kwargs = dict(check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
drives = []
for drive in string.ascii_lowercase:
drivedir = f"{drive}:\\"
try:
subprocess.run(f"wslpath -u {drivedir}", shell=True, **kwargs)
drives.append(drive)
except subprocess.CalledProcessError:
pass
else:
from ctypes import windll # Windows only
# get all drives
drive_bitmask = windll.kernel32.GetLogicalDrives()
letters = string.ascii_lowercase
drives = [letters[i] for i, v in enumerate(bin(drive_bitmask)) if v == "1"]
# filter to fixed drives
is_fixed = lambda x: windll.kernel32.GetDriveTypeW(f"{x}:\\") == 3
drives = [d for d in drives if is_fixed(d.upper())]
return drives
@functools.lru_cache(None)
def _split_windocker(path):
chunks = path.split("//")
mangled = hostdet.wsl_host_docker_path(chunks[0])
if len(chunks) > 1:
newctx = f"{mangled}//{chunks[1]}"
else:
newctx = mangled
return newctx
def _wsl_translate_locations(node):
# Convert image contexts to Windows host paths in the format that docker
# understands.
drives = set()
image_ids = []
imagelist = []
for child in node.stream():
if id(child.image) not in image_ids:
image_ids.append(id(child.image))
imagelist.append(child.image)
for img in imagelist:
path = img.copy_dir
if path:
newpath = _split_windocker(path)
img.copy_dir = newpath
drives.add(newpath[1])
path = img.context
if path:
newpath = _split_windocker(path)
img.context = newpath
drives.add(newpath[1])
path = img.dockerfile
if path:
newpath = _split_windocker(path)
img.dockerfile = newpath
drives.add(newpath[1])
return drives
def _windows_translate_locations(node):
# Convert image contexts to format that docker understands.
drives = set()
image_ids = []
imagelist = []
for child in node.stream():
if id(child.image) not in image_ids:
image_ids.append(id(child.image))
imagelist.append(child.image)
for img in imagelist:
path = img.copy_dir
if path:
newpath = hostdet.windows_docker_path(path)
img.copy_dir = newpath
drives.add(newpath[1])
path = img.context
if path:
newpath = hostdet.windows_docker_path(path)
img.context = newpath
drives.add(newpath[1])
path = img.dockerfile
if path:
newpath = hostdet.windows_docker_path(path)
img.dockerfile = newpath
drives.add(newpath[1])
return drives
def build(
node,
build_mode=constants.BuildMode.DEPLOY_TO_CLOUD,
use_shell=False,
use_app=True,
retention=7,
is_public=False,
):
assert node.parent is None
assert node.name == "/"
if hostdet.is_wsl():
required_drives = _wsl_translate_locations(node)
elif hostdet.is_windows():
required_drives = _windows_translate_locations(node)
if hostdet.is_wsl() or hostdet.is_windows():
available = docker_available_drives()
unavailable = set(required_drives).difference(available)
if len(unavailable) > 0:
msg = f"The drive {unavailable.pop()} is used in an image context, but is not available in Docker. Review your Docker Desktop file sharing settings."
raise hostdet.WindowsMapError(msg)
from .. import api
# refresh the token for every pipeline launch
# Force in case of cognito change
node.token = token = api.Auth().get_token_from_shell(force=True)
serialization = node.serialize()
command = " ".join(pipes.quote(a) for a in sys.argv)
# Register pipeline, get <pipeline_id>
cloud = build_mode == constants.BuildMode.DEPLOY_TO_CLOUD
pipeline_id = api.Pipeline().create(
token,
command,
cloud=cloud,
retention=retention,
tags=node.tags or [],
title=node.title,
is_public=is_public,
)
launch_from_serialization(
serialization, pipeline_id, build_mode, use_shell, use_app, token
)
def launch_from_serialization(
serialization,
pipeline_id,
build_mode=constants.BuildMode.DEPLOY_TO_CLOUD,
use_shell=False,
use_app=True,
token=None,
inject_env=None,
is_migration=False,
):
if not token:
token = api.Auth().get_token_from_shell(force=True)
def cloud_deploy():
# Get a token, serialize, and then deploy to AWS. Once that
# returns, connect to it using the shell_ui.
api.Pipeline().save_serialization(token, pipeline_id, serialization)
api.Manager().launch(
token, pipeline_id, env=inject_env, is_migration=is_migration
)
log.debug(f"Connecting to pipeline_id={pipeline_id}")
def local_deploy():
clean_log_dirs(token)
# Write serialization to ~/.conducto/
local_progdir = constants.ConductoPaths.get_local_path(pipeline_id)
os.makedirs(local_progdir, exist_ok=True)
serialization_path = os.path.join(
local_progdir, constants.ConductoPaths.SERIALIZATION
)
with open(serialization_path, "w") as f:
f.write(serialization)
api.Pipeline().update(token, pipeline_id, {"program_path": serialization_path})
run_in_local_container(
token, pipeline_id, inject_env=inject_env, is_migration=is_migration
)
if build_mode == constants.BuildMode.DEPLOY_TO_CLOUD:
func = cloud_deploy
starting = False
else:
func = local_deploy
starting = True
run(token, pipeline_id, func, use_app, use_shell, "Starting", starting)
return pipeline_id
def run(token, pipeline_id, func, use_app, use_shell, msg, starting):
from .. import api, shell_ui
url = api.Config().get_connect_url(pipeline_id)
u_url = log.format(url, underline=True)
if starting:
tag = api.Config().get_image_tag()
manager_image = constants.ImageUtil.get_manager_image(tag)
try:
client_utils.subprocess_run(["docker", "image", "inspect", manager_image])
except client_utils.CalledProcessError:
docker_parts = ["docker", "pull", manager_image]
print("Downloading the Conducto docker image that runs your pipeline.")
log.debug(" ".join(pipes.quote(s) for s in docker_parts))
client_utils.subprocess_run(
docker_parts, msg="Error pulling manager container",
)
print(f"{msg} pipeline {pipeline_id}.")
func()
if _manager_debug():
return
if use_app:
print(
f"Viewing at {u_url}. To disable, specify '--no-app' on the command line."
)
hostdet.system_open(url)
else:
print(f"View at {u_url}")
data = api.Pipeline().get(token, pipeline_id)
if data.get("is_public"):
unauth_password = data["<PASSWORD>"]
url = api.Config().get_url()
public_url = f"{url}/app/s/{pipeline_id}/{unauth_password}"
u_public_url = log.format(public_url, underline=True)
print(f"\nPublic view at:\n{u_public_url}")
if use_shell:
shell_ui.connect(token, pipeline_id, "Deploying")
def run_in_local_container(
token, pipeline_id, update_token=False, inject_env=None, is_migration=False
):
# Remote base dir will be verified by container.
local_basedir = constants.ConductoPaths.get_local_base_dir()
if inject_env is None:
inject_env = {}
if hostdet.is_wsl():
local_basedir = os.path.realpath(local_basedir)
local_basedir = hostdet.wsl_host_docker_path(local_basedir)
elif hostdet.is_windows():
local_basedir = hostdet.windows_docker_path(local_basedir)
else:
subp = subprocess.Popen(
"head -1 /proc/self/cgroup|cut -d/ -f3",
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
)
container_id, err = subp.communicate()
container_id = container_id.decode("utf-8").strip()
if container_id:
# Mount to the ~/.conducto of the host machine and not of the container
import json
subp = subprocess.Popen(
f"docker inspect -f '{{{{ json .Mounts }}}}' {container_id}",
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.DEVNULL,
)
mount_data, err = subp.communicate()
if subp.returncode == 0:
mounts = json.loads(mount_data)
for mount in mounts:
if mount["Destination"] == local_basedir:
local_basedir = mount["Source"]
break
# The homedir inside the manager is /root
remote_basedir = "/root/.conducto"
tag = api.Config().get_image_tag()
manager_image = constants.ImageUtil.get_manager_image(tag)
ccp = constants.ConductoPaths
pipelinebase = ccp.get_local_path(pipeline_id, expand=False, base=remote_basedir)
# Note: This path is in the docker which is always unix
pipelinebase = pipelinebase.replace(os.path.sep, "/")
serialization = f"{pipelinebase}/{ccp.SERIALIZATION}"
container_name = f"conducto_manager_{pipeline_id}"
network_name = os.getenv("CONDUCTO_NETWORK", f"conducto_network_{pipeline_id}")
if not is_migration:
try:
client_utils.subprocess_run(
["docker", "network", "create", network_name, "--label=conducto"]
)
except client_utils.CalledProcessError as e:
if f"network with name {network_name} already exists" in e.stderr.decode():
pass
else:
raise
flags = [
# Detached mode.
"-d",
# Remove container when done.
"--rm",
# --name is the name of the container, as in when you do `docker ps`
# --hostname is the name of the host inside the container.
# Set them equal so that the manager can use socket.gethostname() to
# spin up workers that connect to its network.
"--name",
container_name,
"--network",
network_name,
"--hostname",
container_name,
"--label",
"conducto",
# Mount local conducto basedir on container. Allow TaskServer
# to access config and serialization and write logs.
"-v",
f"{local_basedir}:{remote_basedir}",
# Mount docker sock so we can spin out task workers.
"-v",
"/var/run/docker.sock:/var/run/docker.sock",
# Specify expected base dir for container to verify.
"-e",
f"CONDUCTO_BASE_DIR_VERIFY={remote_basedir}",
"-e",
f"CONDUCTO_LOCAL_BASE_DIR={local_basedir}",
"-e",
f"CONDUCTO_LOCAL_HOSTNAME={socket.gethostname()}",
"-e",
f"CONDUCTO_NETWORK={network_name}",
]
for env_var in (
"CONDUCTO_URL",
"CONDUCTO_CONFIG",
"IMAGE_TAG",
"CONDUCTO_DEV_REGISTRY",
):
if os.environ.get(env_var):
flags.extend(["-e", f"{env_var}={os.environ[env_var]}"])
for k, v in inject_env.items():
flags.extend(["-e", f"{k}={v}"])
if hostdet.is_wsl() or hostdet.is_windows():
drives = docker_available_drives()
if docker_desktop_23():
flags.extend(["-e", "WINDOWS_HOST=host_mnt"])
else:
flags.extend(["-e", "WINDOWS_HOST=plain"])
for d in drives:
# Mount whole system read-only to enable rebuilding images as needed
mount = f"type=bind,source={d}:/,target={constants.ConductoPaths.MOUNT_LOCATION}/{d.lower()},readonly"
flags += ["--mount", mount]
else:
# Mount whole system read-only to enable rebuilding images as needed
mount = f"type=bind,source=/,target={constants.ConductoPaths.MOUNT_LOCATION},readonly"
flags += ["--mount", mount]
if _manager_debug():
flags[0] = "-it"
flags += ["-e", "CONDUCTO_LOG_LEVEL=0"]
capture_output = False
else:
capture_output = True
mcpu = _manager_cpu()
if mcpu > 0:
flags += ["--cpus", str(mcpu)]
# WSL doesn't persist this into containers natively
# Have to have this configured so that we can use host docker creds to pull containers
docker_basedir = constants.ConductoPaths.get_local_docker_config_dir()
if docker_basedir:
flags += ["-v", f"{docker_basedir}:/root/.docker"]
cmd_parts = [
"python",
"-m",
"manager.src",
"-p",
pipeline_id,
"-i",
serialization,
"--profile",
api.Config().default_profile,
"--local",
]
if update_token:
cmd_parts += ["--update_token", "--token", token]
if manager_image.startswith("conducto/"):
docker_parts = ["docker", "pull", manager_image]
log.debug(" ".join(pipes.quote(s) for s in docker_parts))
client_utils.subprocess_run(
docker_parts,
capture_output=capture_output,
msg="Error pulling manager container",
)
# Run manager container.
docker_parts = ["docker", "run"] + flags + [manager_image] + cmd_parts
log.debug(" ".join(pipes.quote(s) for s in docker_parts))
client_utils.subprocess_run(
docker_parts,
msg="Error starting manager container",
capture_output=capture_output,
)
# When in debug mode the manager is run attached and it makes no sense to
# follow that up with waiting for the manager to start.
if not _manager_debug():
log.debug(f"Verifying manager docker startup pipeline_id={pipeline_id}")
def _get_docker_output():
p = subprocess.run(["docker", "ps"], stdout=subprocess.PIPE)
return p.stdout.decode("utf-8")
pl = constants.PipelineLifecycle
target = pl.active - pl.standby
# wait 45 seconds, but this should be quick
for _ in range(
int(
constants.ManagerAppParams.WAIT_TIME_SECS
/ constants.ManagerAppParams.POLL_INTERVAL_SECS
)
):
time.sleep(constants.ManagerAppParams.POLL_INTERVAL_SECS)
log.debug(f"awaiting program {pipeline_id} active")
data = api.Pipeline().get(token, pipeline_id)
if data["status"] in target and data["pgw"] not in ["", None]:
break
dps = _get_docker_output()
if container_name not in dps:
attached = [param for param in docker_parts if param != "-d"]
dockerrun = " ".join(pipes.quote(s) for s in attached)
msg = f"There was an error starting the docker container. Try running the command below for more diagnostics or contact us on Slack at ConductoHQ.\n{dockerrun}"
raise RuntimeError(msg)
else:
# timeout, return error
raise RuntimeError(
f"no manager connection to pgw for {pipeline_id} after {constants.ManagerAppParams.WAIT_TIME_SECS} seconds"
)
log.debug(f"Manager docker connected to pgw pipeline_id={pipeline_id}")
def clean_log_dirs(token):
from .. import api
pipelines = api.Pipeline().list(token)
pipeline_ids = set(p["pipeline_id"] for p in pipelines)
# Remove all outdated logs directories.
profile = api.Config().default_profile
local_basedir = os.path.join(constants.ConductoPaths.get_local_base_dir(), profile)
if os.path.isdir(local_basedir):
for subdir in os.listdir(local_basedir):
if subdir not in pipeline_ids:
shutil.rmtree(os.path.join(local_basedir, subdir), ignore_errors=True)
def _manager_debug():
return t.Bool(os.getenv("CONDUCTO_MANAGER_DEBUG"))
def _manager_cpu():
return float(os.getenv("CONDUCTO_MANAGER_CPU", "1"))
```
#### File: joseangel-sc/conducto/pipeline.py
```python
import base64
import collections
import functools
import gzip
import inspect
import itertools
import json
import os
import re
import traceback
import typing
import conducto.internal.host_detection as hostdet
from .shared import constants, log, types as t
from . import api, callback, image as image_mod
State = constants.State
class TreeError(Exception):
pass
def jsonable(obj):
try:
json.dumps(obj)
return True
except TypeError:
return False
def load_node(**kwargs):
if kwargs["type"] == "Exec":
return Exec(**kwargs)
elif kwargs["type"] == "Serial":
return Serial(**kwargs)
elif kwargs["type"] == "Parallel":
return Parallel(**kwargs)
else:
raise TypeError("Type {} not a valid node type".format(kwargs["type"]))
class Node:
"""
The node classes :py:class:`Exec`, :py:class:`Serial` and
:py:class:`Parallel` all derive from this class. The parameters here apply
directly to `Exec` nodes and as defaults on `Serial` and `Parallel` for the
sub-nodes.
:param cpu: `float`, default 1, Number of CPUs to allocate to the Node.
Must be >0 if assigned.
:param mem: `float`, default 2, GB of memory to allocate to the Node. Must
be >0 if assigned.
:param requires_docker: `bool`, default `False`, If True, enable the Node
to use
:param env: `dict` with keys environment variables and the values
:param image: :py:class:`conducto.Image` or `str`, Run Node in container
using the given :py:class:`conducto.Image` or image identified
by name in Docker.
:param image_name: `str`, Reference an :py:class:`conducto.Image` by
name instead of passing it explicitly. The Image must have been
registered with :py:func:`conducto.Node.register_image`.
:param same_container: See :ref:`Running Exec nodes` for details. Note this
has special inheritance rules when propagating to child nodes.
:param skip: bool, default `False`, If False the Node will be run normally.
If True execution will pass over it and it will not be run.
:param suppress_errors: bool, default `False`, If True the Node will go to
the Done state when finished, even if some children have failed. If False,
any failed children will cause it to go to the Error state.
:param name: If creating Node inside a context manager, you may pass
`name=...` instead of using normal dict assignment.
All of these arguments, except for `name`, may be set in the Node
constructor or later. For example, `n = co.Parallel(cpu=2)` and
.. code-block::
n = co.Parallel()
n.cpu = 2
are equivalent.
:ivar name: Immutable. The name of this Node must be unique among sibling
Nodes. It is most commonly set through dict assignment with
`parent['nodename'] = co.Parallel()`. It may also be set in the
constructor with `co.Parallel(name='nodename')` if you're using another
Node as a context manager. It may not contain a `/`, as `/` is reserved
as the path separator.
"""
# Enum regarding skip statuses. The naming is awkward but intentional:
# 'skip' is the namespace, but we should phrase the terms in the positive,
# i.e., how many are running.
SKIP_RUN_NONE = 0
SKIP_RUN_SOME = 1
SKIP_RUN_ALL = 2
# In AWS cloud mode, mem and cpu must fit on an EC2 instance (in EC2
# mode), and must be one of allowed pairings (in FARGATE mode).
DEFAULT_MEM = 2
DEFAULT_CPU = 1
DEFAULT_GPU = 0
_CONTEXT_STACK = []
_NUM_FILE_AND_LINE_CALLS = 0
_MAX_FILE_AND_LINE_CALLS = 50000
if api.Config().get("config", "force_debug_info") or t.Bool(
os.getenv("CONDUCTO_FORCE_DEBUG_INFO")
):
_MAX_FILE_AND_LINE_CALLS = 10 ** 20
__slots__ = (
"_name",
"id",
"id_root",
"user_set",
"_root",
"pipeline_id",
"id_generator",
"token",
"parent",
"children",
"_callbacks",
"suppress_errors",
"same_container",
"env",
"doc",
"title",
"tags",
"file",
"line",
"_repo",
"_autorun",
"_sleep_when_done",
)
def __init__(
self,
*,
env=None,
skip=False,
name=None,
cpu=None,
gpu=None,
mem=None,
requires_docker=None,
suppress_errors=False,
same_container=constants.SameContainer.INHERIT,
image: typing.Union[str, image_mod.Image] = None,
image_name=None,
doc=None,
title=None,
tags: typing.Iterable = None,
file=None,
line=None,
):
self.id_generator, self.id_root = itertools.count(), self
self.id = None
self.parent = None
self._root = self
self.children = {}
self._callbacks = []
self.token = None
assert image_name is None or image is None, "can only specify one image"
self._repo = image_mod.Repository()
# store actual values of each attribute
self.user_set = {
"skip": skip,
"cpu": cpu,
"gpu": gpu,
"mem": mem,
"requires_docker": requires_docker,
}
if image:
self.image = image
else:
self.user_set["image_name"] = image_name
self.env = env or {}
self.doc = doc
self.title = title
self.tags = self.sanitize_tags(tags)
if name is not None:
if not Node._CONTEXT_STACK:
raise ValueError(
f"Cannot assign name={name} outside of a context manager."
)
if "/" in name:
raise ValueError(
f"Disallowed character in name, may not use '/': {name}"
)
parent = Node._CONTEXT_STACK[-1]
parent[name] = self
else:
self._name = "/"
self.suppress_errors = suppress_errors
self.same_container = same_container
# These are only to be set on the root node, and only by co.main().
self._autorun = None
self._sleep_when_done = None
if file is not None:
self.file = file
self.line = line
else:
self.file, self.line = self._get_file_and_line()
def __enter__(self):
Node._CONTEXT_STACK.append(self)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if Node._CONTEXT_STACK[-1] is not self:
raise Exception(
f"Node context error: {repr(Node._CONTEXT_STACK[-1])} is not {repr(self)}"
)
Node._CONTEXT_STACK.pop()
def __str__(self):
"""
The full path of Node, computed by joining the names of this Node's ancestry with `/`.
.. code-block:: python
import conducto as co
x = co.Parallel()
x["foo"] = y = co.Parallel()
x["foo/bar"] = z = co.Exec("echo foobar")
print(f"x.name={x.name} str(x)={x}")
# x.name=/ str(x) = /
print(f"y.name={y.name} str(y)={y}")
# y.name=foo str(y) = /foo
print(f"z.name={z.name} str(z)={z}")
# z.name=bar str(z) = /foo/bar
for node in x.stream():
print(str(node))
# /
# /foo
# /foo/bar
"""
name = []
cur = self
while cur:
name.append(cur.name)
cur = cur.parent
return "/".join(name[::-1]).replace("//", "/")
@property
def name(self):
return self._name
@property
def repo(self):
return self.root._repo
@property
def _id(self):
return self.id
@property
def mem(self):
return self.user_set["mem"]
@property
def gpu(self):
return self.user_set["gpu"]
@property
def cpu(self):
return self.user_set["cpu"]
@property
def requires_docker(self):
return self.user_set.get("requires_docker")
@property
def skip(self):
return self.user_set.get("skip", False)
@mem.setter
def mem(self, val):
self.user_set["mem"] = val
@gpu.setter
def gpu(self, val):
self.user_set["gpu"] = val
@cpu.setter
def cpu(self, val):
self.user_set["cpu"] = val
@property
def image(self) -> typing.Optional[image_mod.Image]:
if self.image_name is None:
return None
return self.repo[self.image_name]
@property
def image_name(self):
return self.get_inherited_attribute("image_name")
@image.setter
def image(self, val):
if val is None:
self.user_set["image_name"] = None
return
if isinstance(val, str):
val = image_mod.Image(val)
if isinstance(val, image_mod.Image):
self.repo.add(val)
self.user_set["image_name"] = val.name
else:
raise ValueError(f"Unknown type for Node.image: {repr(val)}")
@requires_docker.setter
def requires_docker(self, val: bool):
self.user_set["requires_docker"] = val
@skip.setter
def skip(self, val: bool):
self.user_set["skip"] = val
def register_image(self, image: image_mod.Image):
"""
Register a named Image for use by descendant Nodes that specify
image_name. This is especially useful with lazy pipeline creation to
ensure that the correct base image is used.
:param image: :py:class:`conducto.Image`
"""
self.repo.add(image)
def on_done(self, cback):
assert isinstance(cback, callback.base)
self._callbacks.append((State.DONE, cback))
def on_error(self, cback):
assert isinstance(cback, callback.base)
self._callbacks.append((State.ERROR, cback))
def on_queued(self, cback):
assert isinstance(cback, callback.base)
self._callbacks.append((State.QUEUED, cback))
def on_running(self, cback):
assert isinstance(cback, callback.base)
self._callbacks.append((State.RUNNING, cback))
def _pull(self):
if self.id is None or self.root != self.id_root:
self.id_root = self.root
self.id = next(self.root.id_generator)
# get root with path compression
@property
def root(self):
if self._root != self:
self._root = self._root.root
return self._root
def __setitem__(self, name, node):
if "/" in name:
path, new = name.rsplit("/", 1)
self[path][new] = node
return
if name in self.children or node.root == self.root or node.root != node:
raise TreeError(
f"Adding node {name} violates the integrity of the pipeline"
)
self.children[name] = node
self.repo.merge(node.repo)
node.parent = self
node._root = self.root
node._name = name
def __getitem__(self, item):
# Absolute paths start with a '/' and begin at the root
if item.startswith("/"):
current = self.root
else:
current = self
for i in item.split("/"):
# Ignore consecutive delimiters: 'a/b//c' == 'a/b/c'
if not i:
continue
# Find the referenced child and iterate
current = current.children[i]
return current
def __contains__(self, item):
try:
self[item]
except KeyError:
return False
else:
return True
def describe(self):
output = {
**self.user_set,
**{"__env__" + key: value for key, value in self.env.items()},
"id": self,
"callbacks": [(event, cb.to_literal()) for event, cb in self._callbacks],
"type": self.__class__.__name__,
"file": self.file,
"line": self.line,
}
if self.doc:
output["doc"] = self.doc
if self.title:
output["title"] = self.title
if self.tags:
output["tags"] = self.tags
if self.same_container != constants.SameContainer.INHERIT:
output["same_container"] = self.same_container
if self.suppress_errors:
output["suppress_errors"] = self.suppress_errors
if isinstance(self, Serial):
output["stop_on_error"] = self.stop_on_error
if isinstance(self, Exec):
output["command"] = self.command
return output
def serialize(self, pretty=False):
def validate_env(node):
for key, value in node.env.items():
if not isinstance(key, str):
raise TypeError(
f"{node} has {type(key).__name__} in env key when str is required"
)
if not isinstance(value, str):
raise TypeError(
f"{node} has {type(value).__name__} in env value for {key} when str is required"
)
res = {
"edges": [],
"nodes": [],
"images": self.repo.images,
"token": self.token,
"autorun": self._autorun,
"sleep_when_done": self._sleep_when_done,
}
queue = collections.deque([self])
while queue:
node = queue.popleft()
validate_env(node)
node._pull()
res["nodes"].append(
{k: v for k, v in node.describe().items() if v is not None}
)
for name, child in node.children.items():
queue.append(child)
res["edges"].append([node, child, name])
class NodeEncoder(json.JSONEncoder):
def default(self, o):
try:
return o._id
except AttributeError:
return o
if pretty:
import pprint
return pprint.pformat(res)
output = json.dumps(res, cls=NodeEncoder)
return base64.b64encode(
gzip.compress(output.encode(), compresslevel=3)
).decode()
@staticmethod
def deserialize(string):
string = gzip.decompress(base64.b64decode(string))
data = json.loads(string)
nodes = {i["id"]: load_node(**i) for i in data["nodes"]}
for i in data["nodes"]:
for event, cb_literal in i.get("callbacks", []):
cb, cb_args = cb_literal
kwargs = {
k: nodes[cb_args[k]] for k in cb_args.get("__node_args__", [])
}
cb = callback.base(cb, **kwargs)
nodes[i["id"]]._callbacks.append((event, cb))
for parent, child, name in data["edges"]:
nodes[parent][name] = nodes[child]
root = nodes[data["nodes"][0]["id"]]
root.token = data.get("token")
root._autorun = data.get("autorun", False)
root._sleep_when_done = data.get("sleep_when_done", False)
return root
# returns a stream in topological order
def stream(self, reverse=False):
"""
Iterate through the nodes
"""
def _fwd():
stack = [self]
while stack:
yld = stack.pop()
yield yld
stack.extend(list(yld.children.values())[::-1])
def _bwd():
stack = [[self, True]]
while stack:
while stack[-1][1]:
stack[-1][1] = False
for i in stack[-1][0].children.values():
stack.append([i, True])
yield stack.pop()[0]
if reverse:
return _bwd()
else:
return _fwd()
def get_inherited_attribute(self, attr):
node = self
while node is not None:
v = node.user_set[attr]
if v is not None:
return v
else:
node = node.parent
return None
def launch_local(
self,
use_shell=True,
retention=7,
run=False,
sleep_when_done=False,
prebuild_images=False,
):
"""
Launch directly from python.
:param use_shell: If True (default) it will connect to the running
pipeline using the shell UI. Otherwise just launch the pipeline and
then exit.
:param retention: Once the pipeline is put to sleep, its logs and
:ref:`data` will be deleted after `retention` days of inactivity.
Until then it can be woken up and interacted with.
:param run: If True the pipeline will run immediately upon launching.
Otherwise (default) it will stay Pending until the user starts it.
:param sleep_when_done: If True the pipeline will sleep -- manager
exits with recoverable state -- when the root node successfully
gets to the Done state.
:param prebuild_images: If True build the images before launching the pipeline.
"""
# TODO: Do we want these params? They seem sensible and they were documented at one point.
# :param tags: If specified, should be a list of strings. The app lets you filter programs based on these tags.
# :param title: Title to show in the program list in the app. If unspecified, the title will be based on the command line.
self._build(
build_mode=constants.BuildMode.LOCAL,
use_shell=use_shell,
retention=retention,
run=run,
sleep_when_done=sleep_when_done,
prebuild_images=prebuild_images,
)
def _build(
self,
build_mode=constants.BuildMode.LOCAL,
use_shell=False,
use_app=False,
prebuild_images=False,
retention=7,
run=False,
sleep_when_done=False,
is_public=False,
):
if self.image is None:
self.image = image_mod.Image(name="conducto-default")
self.check_images()
if build_mode != constants.BuildMode.LOCAL or prebuild_images:
image_mod.make_all(
self, push_to_cloud=build_mode != constants.BuildMode.LOCAL
)
self._autorun = run
self._sleep_when_done = sleep_when_done
from conducto.internal import build
return build.build(
self,
build_mode,
use_shell=use_shell,
use_app=use_app,
retention=retention,
is_public=is_public,
)
def check_images(self):
for node in self.stream():
if isinstance(node, Exec):
node.expanded_command()
def pretty(self, strict=True):
buf = []
self._pretty("", "", "", buf, strict)
return "\n".join(buf)
def _pretty(self, node_prefix, child_prefix, index_str, buf, strict):
"""
Draw pretty representation of the node pipeline, using ASCII box-drawing
characters.
For example:
/
├─1 First
│ ├─ Parallel1 "echo 'I run first"
│ └─ Parallel2 "echo 'I also run first"
└─2 Second "echo 'I run last.'"
"""
if isinstance(self, Exec):
node_str = f"{log.format(self.name, color='cyan')} {self.expanded_command(strict)}"
node_str = node_str.strip().replace("\n", "\\n")
else:
node_str = log.format(self.name, color="blue")
buf.append(f"{node_prefix}{index_str}{node_str}")
length_of_length = len(str(len(self.children) - 1))
for i, node in enumerate(self.children.values()):
if isinstance(self, Parallel):
new_index_str = " "
else:
new_index_str = f"{str(i).zfill(length_of_length)} "
if i == len(self.children) - 1:
this_node_prefix = f"{child_prefix}└─"
this_child_prefix = f"{child_prefix} "
else:
this_node_prefix = f"{child_prefix}├─"
this_child_prefix = f"{child_prefix}│ "
node._pretty(
this_node_prefix, this_child_prefix, new_index_str, buf, strict
)
@staticmethod
def sanitize_tags(val):
if val is None:
return val
elif isinstance(val, (bytes, str)):
return [val]
elif isinstance(val, (list, tuple, set)):
for v in val:
if not isinstance(v, (bytes, str)):
raise TypeError(f"Expected list of strings, got: {repr(v)}")
return val
else:
raise TypeError(f"Cannot convert {repr(val)} to list of strings.")
@staticmethod
def _get_file_and_line():
if Node._NUM_FILE_AND_LINE_CALLS > Node._MAX_FILE_AND_LINE_CALLS:
return None, None
Node._NUM_FILE_AND_LINE_CALLS += 1
for frame, lineno in traceback.walk_stack(None):
filename = frame.f_code.co_filename
if not filename.startswith(_conducto_dir):
if not _isabs(filename):
filename = _abspath(filename)
return filename, lineno
return None, None
@staticmethod
def force_debug_info(val):
if val:
Node._MAX_FILE_AND_LINE_CALLS = 10 ** 30
else:
Node._MAX_FILE_AND_LINE_CALLS = 10 ** 4
class Exec(Node):
"""
A node that contains an executable command
:param command: A shell command to execute or a python callable
If a Python callable is specified for the command the `args` and `kwargs`
are serialized and a `conducto` command line is constructed to launch the
function for that node in the pipeline.
"""
__slots__ = ("command",)
def __init__(self, command, *args, **kwargs):
if callable(command):
self._validate_args(command, *args, **kwargs)
from .glue import method
wrapper = method.Wrapper(command)
command = wrapper.to_command(*args, **kwargs)
kwargs = wrapper.get_exec_params(*args, **kwargs)
args = []
if args:
raise ValueError(
f"Only allowed arg is command. Got:\n command={repr(command)}\n args={args}\n kwargs={kwargs}"
)
super().__init__(**kwargs)
# Instance variables
self.command = command
# Validate arguments for the given function without calling it. This is useful for
# raising early errors on `co.Lazy()` or `co.Exec(func, *args, **kwargs).
@staticmethod
def _validate_args(func, *args, **kwargs):
params = inspect.signature(func).parameters
hints = typing.get_type_hints(func)
if isinstance(func, staticmethod):
function = func.__func__
else:
function = func
# TODO: can target function have a `*args` or `**kwargs` in the signature? If
# so, handle it.
invalid_params = [
(name, str(param.kind))
for name, param in params.items()
if param.kind != inspect.Parameter.POSITIONAL_OR_KEYWORD
]
if invalid_params:
raise TypeError(
f"Unsupported parameter types of "
f"{function.__name__}: {invalid_params} - "
f"Only {str(inspect.Parameter.POSITIONAL_OR_KEYWORD)} is allowed."
)
# this will also validate against too-many or too-few arguments
call_args = inspect.getcallargs(function, *args, **kwargs)
for name, arg_value in call_args.items():
if name in hints:
# If there is a type hint, use the output of `typing.get_type_hints`. It
# infers typing.Optional when default is None, and it handles forward
# references.
param_type = hints[name]
else:
# If
param_type = params[name].annotation
if not t.is_instance(arg_value, param_type):
raise TypeError(
f"Argument {name}={arg_value} {type(arg_value)} for "
f"function {function.__name__} is not compatible "
f"with expected type: {param_type}"
)
def delete_child(self, node):
raise NotImplementedError("Exec nodes have no children")
def append_child(self, node):
raise NotImplementedError("Exec nodes have no children")
def expanded_command(self, strict=True):
if "__conducto_path:" in self.command:
img = self.image
if img is None:
if strict:
raise ValueError(
"Node references code inside a container but no image is specified\n"
f" Node: {self}"
)
else:
return self.command
COPY_DIR = image_mod.dockerfile_mod.COPY_DIR
def repl(match):
path = match.group(1)
path_map = dict(img.path_map)
# If a gitroot was detected, it was marked in the command with a "//".
# If copy_url was set then we can determine what the external portion
# of the path was. Together with COPY_DIR we can update path_map
if "//" in path and img.copy_url:
external = path.split("//", 1)[0]
path_map[external] = COPY_DIR
# Normalize path to get rid of the //.
path = os.path.normpath(path)
# temporary windows translations -- these are translated for
# real just-in-time during the final serialization, but we
# convert them here to faciliate this validation.
if hostdet.is_windows():
wdp = hostdet.windows_docker_path
path_map = {wdp(k): v for k, v in path_map.items()}
for external, internal in path_map.items():
# For each element of path_map, see if the external path matches
external = os.path.normpath(external.rstrip("/"))
if not path.startswith(external):
continue
# If so, calculate the corresponding internal path
internal = os.path.normpath(internal.rstrip("/"))
relative = os.path.relpath(path, external)
new_path = os.path.join(internal, relative)
# As a convenience, if we `docker_auto_workdir` then we know the workdir and
# we can shorten the path
if img.docker_auto_workdir and new_path.startswith(COPY_DIR):
return os.path.relpath(new_path, COPY_DIR)
else:
# Otherwise just return an absolute path.
return new_path
raise ValueError(
f"Node references local code but the Image doesn't have enough information to infer the corresponding path inside the container.\n"
f"Expected '.copy_dir', '.copy_url' inside a Git directory, or 'path_map'."
f" Node: {self}\n"
f" Image: {img.to_dict()}"
)
return re.sub("__conducto_path:(.*?):endpath__", repl, self.command)
else:
return self.command
class Parallel(Node):
"""
Node that has child Nodes and runs them at the same time.
Same interface as :py:func:`conducto.Node`.
"""
pass
class Serial(Node):
"""
Node that has child Nodes and runs them one after
another. Same interface as :py:func:`conducto.Node`, plus
the following:
:param stop_on_error: bool, default `True`, If True the Serial will Error when one of its children Errors, leaving subsequent children Pending. If False and a child Errors the Serial will still run the rest of its children and then Error, defaults to True
"""
__slots__ = ["stop_on_error"]
def __init__(
self,
*,
env=None,
skip=False,
name=None,
cpu=None,
gpu=None,
mem=None,
requires_docker=None,
stop_on_error=True,
suppress_errors=False,
same_container=constants.SameContainer.INHERIT,
image: typing.Union[str, image_mod.Image] = None,
image_name=None,
doc=None,
tags: typing.Iterable = None,
):
super().__init__(
env=env,
skip=skip,
name=name,
cpu=cpu,
gpu=gpu,
mem=mem,
requires_docker=requires_docker,
suppress_errors=suppress_errors,
same_container=same_container,
image=image,
image_name=image_name,
doc=doc,
tags=tags,
)
self.stop_on_error = stop_on_error
_abspath = functools.lru_cache(1000)(os.path.abspath)
_isabs = functools.lru_cache(1000)(os.path.isabs)
_conducto_dir = os.path.dirname(__file__) + os.path.sep
``` |
{
"source": "joseangl/transience",
"score": 3
} |
#### File: transience/transience/utils.py
```python
import numpy as np
import os
from glob import iglob
def load_numpy_parallel_data(dir1, dir2):
view1_data = []
view2_data = []
for f in iglob(os.path.join(dir1, '*.npy')):
filename = os.path.basename(f)
f2 = os.path.join(dir2, filename)
# Ensure that the file also exists in dir2
if not os.path.isfile(f2):
continue
view1_data.append(np.load(f))
view2_data.append(np.load(f2))
return view1_data, view2_data
def load_numpy_dataset(directory, files):
return [np.load(os.path.join(directory, f)) for f in files]
``` |
{
"source": "JoseAntFer/pyny3d",
"score": 3
} |
#### File: pyny3d/pyny3d/shadows.py
```python
import numpy as np
import pyny3d.geoms as pyny
class ShadowsManager(object):
"""
Class in charge of the management for the shadows simulations.
It can be initialize as standalone object or associated to a
``pyny.Space`` through the ``.shadow`` method.
The only argument needed for the simulator to run is ``t`` or ``dt``
and the ``latitude``. If the ShadowsManager is initialized from
``pyny.Space.shadows`` it is possible to run the execution in *auto*
mode without inputing anything.
Some explanaions about how it works:
The shadows are computed discretely using a set of distributed
**sensible points** through the model. These points can be set with
the ``.get_height(attach=True)`` or the ``.mesh()`` methods.
At the same time, the sun positions are also discretized. The
simulator needs a finite number of positions, given by their azimuth
and zenit. Anyway, it is more convenient to give it a time vector
and the latitude and let the program calculate the sun positions for
you.
For convenience, the time is managed in "absolute minutes" within
the range of a year in the computations, that is, the first possible
interval [0] is the Jan 1 00:00 and the last [525599] is Dec 31
23:59. February 29 is not taken into account. It is possible to
automatically create an equally spaced t vector by giving a fixed
interval, althought the inputed vectors an be irregular.
In view of the fact that there are, potentially, more than 8000
sunnys half-hour intervals in an year, the program precomputes a
discretization for the Solar Horizont (azimuth, zenit pairs) and
classify the *t* and *data* vectors. The goal is to approximate
these 8000 interval simulations to a less than 340 with an maximum
error of 3 deg (0.05rads).
This discretization is manually\* adjustable to be able to fastly
compute large datasets at low resolution before the serious
computations start.
For now, the Solar Horizont discretization can only be automatically
computed by a mesh. In the future more complex and convenient
discretizations will be available. Anyway, it is possible to input
a custom discretization by manually introducing the atributtes
described in :func:`Voronoi_SH`.
Finally,
the atributes which can be safely manipulated to tune up the
simulator before the computations are all which start with *arg_*
(= default values):
* .arg_data
* .arg_t
* .arg_dt
* .arg_latitude = None
* .arg_run_true_time = False
* .arg_longitude = None (only for ``true_time``)
* .arg_UTC = None (only for ``true_time``)
* .arg_zenitmin = 0.1 (minimum zenit, avoid irrelevant errors
from trigonometric approximations)
* .arg_vor_size = 0.15 (mesh_size of the Voronoi diagram)
:param space: 3D model to run the simulation.
:type space: ``pyny.Space``
:param data: Data timeseries to project on the 3D model (radiation,
for example).
:type data: ndarray (shape=N), None
:param t: Time vector in absolute minutes or datetime objects
:type t: ndarray or list, None
:param dt: Interval time to generate t vector.
:type dt: int, None
:param latitude: Local latitude.
:type latitude: float (radians)
:returns: None
.. note:: \* In the future, the discretizations will be
automated based on error adjustment.
.. warning:: The shadows computation do not take care
of the holes\*, instead, they can be emulated by a collection of
polygons.
"""
def __init__(self, space, data=None, t=None, dt=None, latitude=None):
from pyny3d.shadows import Viz
self.viz = Viz(self)
self.space = space
# Arguments
self.arg_data = data
self.arg_t = t
self.arg_dt = dt
self.arg_latitude = latitude
self.arg_run_true_time = False
self.arg_longitude = None
self.arg_UTC = None
self.arg_zenitmin = 0.05
self.arg_vor_size = 0.15
# Processed information
## Precalculations
self.diff_t = None
self.integral = None
## Voronoi
self.t2vor_map = None
self.vor_freq = None
self.vor_surf = None
self.vor_centers = None
## get_sunpos
self.azimuth_zenit = None
self.true_time = None
## compute_shadows
self.light_vor = None
## project_data
self.proj_vor = None
self.proj_points = None
def run(self):
"""
Run the shadowing computation with the values stored in
``self.arg_``. Precomputed information is stored in:
* **.diff_t** (*ndarray*): ``np.diff(t)``
* **.integral** (*ndarray*): Trapezoidal data integration
over time.
The steps are:
* :func:`get_sunpos`
* :func:`Vonoroi_SH`
* :func:`compute_shadows`
* :func:`project_data`
:retruns: None
"""
# Adapt series
## time
if self.integral is None:
if self.arg_t is not None:
import datetime
if type(self.arg_t[0]) == datetime.datetime:
self.arg_t = self.to_minutes(time_obj=self.arg_t)
else:
self.arg_t = np.round(self.arg_t)
elif self.arg_dt is not None:
self.arg_dt = np.round(self.arg_dt)
self.arg_t = self.to_minutes(dt=self.arg_dt)
else:
raise ValueError('At least one time parameter is needed.')
self.diff_t = np.diff(self.arg_t)
## data
if self.arg_data is None:
self.arg_data = np.ones(self.arg_t.shape[0])
dt = self.diff_t/60 # hs
rect = self.arg_data[:-1]/1000*dt # kilounits
triang_side = np.diff(self.arg_data)
triang = 0.5*triang_side*dt
self.integral = rect + triang
self.integral = np.hstack((0, self.integral))
# Computation
if self.azimuth_zenit is None:
self.get_sunpos(self.arg_t, self.arg_run_true_time)
if self.vor_centers is None:
self.Vonoroi_SH(self.arg_vor_size)
self.compute_shadows()
self.project_data()
def Vonoroi_SH(self, mesh_size=0.1):
"""
Generates a equally spaced mesh on the Solar Horizont (SH).
Computes the Voronoi diagram from a set of points given by pairs
of (azimuth, zenit) values. This discretization completely
covers all the Sun positions.
The smaller mesh size, the better resolution obtained. It is
important to note that this heavily affects the performance.
The generated information is stored in:
* **.t2vor_map** (*ndarray*): Mapping between time vector and
the Voronoi diagram.
* **.vor_freq** (*ndarray*): Number of times a Sun position
is inside each polygon in the Voronoi diagram.
* **.vor_surf** (*``pyny.Surface``*): Voronoi diagram.
* **.vor_centers** (*ndarray`*): Mass center of the
``pyny.Polygons`` that form the Voronoi diagram.
:param mesh_size: Mesh size for the square discretization of the
Solar Horizont.
:type mesh_size: float (in radians)
:param plot: If True, generates a visualization of the Voronoi
diagram.
:type plot: bool
:returns: None
.. note:: In future versions this discretization will be
improved substantially. For now, it is quite rigid and only
admits square discretization.
"""
from scipy.spatial import Voronoi
from pyny3d.utils import sort_numpy
state = pyny.Polygon.verify
pyny.Polygon.verify = False
# Sort and remove NaNs
xy_sorted, order_back = sort_numpy(self.azimuth_zenit, col=1,
order_back=True)
# New grid
x1 = np.arange(-np.pi, np.pi, mesh_size)
y1 = np.arange(-mesh_size*2, np.pi/2+mesh_size*2, mesh_size)
x1, y1 = np.meshgrid(x1, y1)
centers = np.array([x1.ravel(), y1.ravel()]).T
# Voronoi
vor = Voronoi(centers)
# Setting the SH polygons
pyny_polygons = [pyny.Polygon(vor.vertices[v], False)
for v in vor.regions[1:] if len(v) > 3]
raw_surf = pyny.Surface(pyny_polygons)
# Classify data into the polygons discretization
map_ = raw_surf.classify(xy_sorted, edge=True, col=1,
already_sorted=True)
map_ = map_[order_back]
# Selecting polygons with points inside
vor = []
count = []
for i, poly_i in enumerate(np.unique(map_)[1:]):
vor.append(raw_surf[poly_i])
bool_0 = map_==poly_i
count.append(bool_0.sum())
map_[bool_0] = i
# Storing the information
self.t2vor_map = map_
self.vor_freq = np.array(count)
self.vor_surf = pyny.Surface(vor)
self.vor_centers = np.array([poly.get_centroid()[:2]
for poly in self.vor_surf])
pyny.Polygon.verify = state
def get_sunpos(self, t, true_time=False):
"""
Computes the Sun positions for the *t* time vector.
*t* have to be in absolute minutes (0 at 00:00 01 Jan). The and
in Sun positions calculated are in solar time, that is, maximun
solar zenit exactly at midday.
The generated information is stored in:
* **.azimuth_zenit** (*ndarray*)
* **.true_time** (*datetime*): local time
:param t: Absolute minutes vector.
:type t: ndarray (dtype=int)
:param true_time: If True, a datetime vector with the true local
time will be stored at ``.true_time``
:type true_time: bool
:returns: Equivalent times in absolute minutes in year.
:rtype: ndarray (dtype=int)
:returns: None
.. seealso:: :func:`to_minutes` to easily genetare valid input
t.
"""
import numpy as np
lat = self.arg_latitude
long = self.arg_longitude
alphamin = self.arg_zenitmin
# Solar calculations
day = np.modf(t/1440)[0]
fractional_year = 2*np.pi/(365*24*60)*(-24*60+t)
declination = 0.006918 - \
0.399912*np.cos(fractional_year) + \
0.070257*np.sin(fractional_year) - \
0.006758*np.cos(2*fractional_year) + \
0.000907*np.sin(2*fractional_year) - \
0.002697*np.cos(3*fractional_year) + \
0.00148*np.sin(3*fractional_year)
hour_angle = np.tile(np.arange(-np.pi, np.pi, 2*np.pi/(24*60),
dtype='float'), 365)[t]
solar_zenit = np.arcsin(np.sin(lat)*np.sin(declination) + \
np.cos(lat)*np.cos(declination)*np.cos(hour_angle))
solar_zenit[solar_zenit<=0+alphamin] = np.nan
#### Avoiding numpy warning
aux = (np.sin(solar_zenit)*np.sin(lat) - np.sin(declination))/ \
(np.cos(solar_zenit)*np.cos(lat))
not_nan = np.logical_not(np.isnan(aux))
aux_1 = aux[not_nan]
aux_1[aux_1>=1] = np.nan
aux[not_nan] = aux_1
####
solar_azimuth = np.arccos(aux)
solar_azimuth[day==0.5] = 0
solar_azimuth[day<0.5] *= -1
self.azimuth_zenit = np.vstack((solar_azimuth, solar_zenit)).T
# True time
if true_time:
import datetime as dt
long = np.rad2deg(long)
instant_0 = dt.datetime(1,1,1,0,0,0) # Simulator time
# Real time
equation_time = 229.18*(0.000075+0.001868*np.cos(fractional_year) - \
0.032077*np.sin(fractional_year) - \
0.014615*np.cos(2*fractional_year) - \
0.040849*np.sin(2*fractional_year))
time_offset = equation_time + 4*long + 60*self.arg_UTC
true_solar_time = t + time_offset
delta_true_date_objs = np.array([dt.timedelta(minutes=i)
for i in true_solar_time])
self.true_time = instant_0 + delta_true_date_objs
def compute_shadows(self):
"""
Computes the shadoing for the ``pyny.Space`` stored in
``.space`` for the time intervals and Sun positions stored in
``.arg_t`` and ``.sun_pos``, respectively.
The generated information is stored in:
* **.light_vor** (*ndarray (dtype=bool)*): Array with the
points in ``pyny.Space`` as columns and the discretized
Sun positions as rows. Indicates whether the points are
illuminated in each Sun position.
* **.light** (*ndarray (dtype=bool)*): The same as
``.light_vor`` but with the time intervals in ``.arg_t``
as rows instead of the Sun positions.
:returns: None
"""
from pyny3d.utils import sort_numpy, bool2index, index2bool
state = pyny.Polygon.verify
pyny.Polygon.verify = False
model = self.space
light = []
for sun in self.vor_centers:
# Rotation of the whole ``pyny.Space``
polygons_photo, _, points_to_eval = model.photo(sun, False)
# Auxiliar pyny.Surface to fast management of pip
Photo_surface = pyny.Surface(polygons_photo)
Photo_surface.lock()
# Sort/unsort points
n_points = points_to_eval.shape[0]
points_index_0 = np.arange(n_points) # _N indicates the depth level
points_to_eval, order_back = sort_numpy(points_to_eval, col=0,
order_back=True)
# Loop over the sorted (areas) Polygons
for i in model.sorted_areas:
p = points_to_eval[points_index_0][:, :2]
polygon_photo = Photo_surface[i]
index_1 = bool2index(polygon_photo.pip(p, sorted_col=0))
points_1 = points_to_eval[points_index_0[index_1]]
if points_1.shape[0] != 0:
# Rotation algebra
a, b, c = polygon_photo[:3, :]
R = np.array([b-a, c-a, np.cross(b-a, c-a)]).T
R_inv = np.linalg.inv(R)
Tr = a # Translation
# Reference point (between the Sun and the polygon)
reference_point = np.mean((a, b, c), axis=0)
reference_point[2] = reference_point[2] - 1
points_1 = np.vstack((points_1, reference_point))
points_over_polygon = np.dot(R_inv, (points_1-Tr).T).T
# Logical stuff
shadow_bool_2 = np.sign(points_over_polygon[:-1, 2]) != \
np.sign(points_over_polygon[-1, 2])
shadow_index_2 = bool2index(shadow_bool_2)
if shadow_index_2.shape[0] != 0:
points_to_remove = index_1[shadow_index_2]
points_index_0 = np.delete(points_index_0,
points_to_remove)
lighted_bool_0 = index2bool(points_index_0,
length=points_to_eval.shape[0])
# Updating the solution
light.append(lighted_bool_0[order_back])
# Storing the solution
self.light_vor = np.vstack(light)
self.light = self.light_vor[self.t2vor_map]
pyny.Polygon.verify = state
def project_data(self):
'''
Assign the sum of ``.integral``\* to each sensible point in the
``pyny.Space`` for the intervals that the points are visible to
the Sun.
The generated information is stored in:
* **.proj_vor** (*ndarray*): ``.integral`` projected to the
Voronoi diagram.
* **.proj_points** (*ndarray*): ``.integral`` projected to
the sensible points in the ``pyny.Space``.
:returns: None
.. note:: \* Trapezoidal data (``.arg_data``) integration over
time (``.arg_t``).
'''
from pyny3d.utils import sort_numpy
proj = self.light_vor.astype(float)
map_ = np.vstack((self.t2vor_map, self.integral)).T
map_sorted = sort_numpy(map_)
n_points = map_sorted.shape[0]
for i in range(proj.shape[0]):
a, b = np.searchsorted(map_sorted[:, 0], (i, i+1))
if b == n_points:
b = -1
proj[i, :] *= np.sum(map_sorted[a:b, 1])
self.proj_vor = np.sum(proj, axis=1)
self.proj_points = np.sum(proj, axis=0)
@staticmethod
def to_minutes(time_obj = None, dt = None):
'''
Converts ``datetime`` objects lists into absolute minutes
vectors. It also can be used to generate absolute minutes vector
from a time interval (in minutes).
:param time_obj: ``datetime`` objects to convert into absolute
minutes.
:type time_obj: list of ``datetime`` objects
:param dt: Constant interval time to generate a time vector for
a whole year.
:type dt: int
:returns: Equivalent times in absolute minutes in year.
:rtype: ndarray (dtype=int)
.. note:: If the time_obj has times higher than 23:59 31 Dec,
they will be removed.
.. note:: If a leap-year is introduced, the method will remove
the last year (31 Dec) in order to keep the series
continuous.
'''
import datetime
if dt is not None and time_obj is None:
return np.arange(0, 365*24*60, dt, dtype = int)
elif dt is None and time_obj is not None:
if type(time_obj) == datetime.datetime:
time_obj = [time_obj]
year = time_obj[0].year
time = []
for obj in time_obj:
tt = obj.timetuple()
if year == tt.tm_year:
time.append((tt.tm_yday-1)*24*60 +
tt.tm_hour*60 +
tt.tm_min)
return np.array(time, dtype=int)
else:
raise ValueError('Input error')
class Viz(object):
'''
This class stores the visualization methods. It is linked with
the ShadowsManager class by its attribute ``.viz``.
:param ShadowsMaganer: ShadowsMaganer instance to compute the
visualizations.
:returns: None
'''
def __init__(self, ShadowsMaganer):
self.SM = ShadowsMaganer
def vor_plot(self, which='vor'):
"""
Voronoi diagram visualizations. There are three types:
1. **vor**: Voronoi diagram of the Solar Horizont.
2. **freq**: Frequency of Sun positions in t in the Voronoi
diagram of the Solar Horizont.
3. **data**: Accumulated time integral of the data projected
in the Voronoi diagram of the Solar Horizont.
:param which: Type of visualization.
:type which: str
:returns: None
"""
import matplotlib.cm as cm
import matplotlib.pyplot as plt
sm = self.SM
if sm.light_vor is None:
raise ValueError('The computation has not been made yet')
if which is 'vor':
title = 'Voronoi diagram of the Solar Horizont'
ax = sm.vor_surf.plot2d('b', alpha=0.15, ret=True, title=title)
ax.scatter(sm.azimuth_zenit[:, 0],sm.azimuth_zenit[:, 1], c='k')
ax.scatter(sm.vor_centers[:, 0], sm.vor_centers[:,1],
s = 30, c = 'red')
ax.set_xlabel('Solar Azimuth')
ax.set_ylabel('Solar Zenit')
plt.show()
elif which is 'freq':
cmap = cm.Blues
title = 'Frequency of Sun positions in the Voronoi diagram '+\
'of the Solar Horizont'
ax = sm.vor_surf.plot2d(sm.vor_freq, cmap=cmap, alpha=0.85,
colorbar=True, title=title, ret=True,
cbar_label=' Freq')
ax.set_xlabel('Solar Azimuth')
ax.set_ylabel('Solar Zenit')
plt.show()
elif which is 'data':
cmap = cm.YlOrRd
title = 'Data projected in the Voronoi diagram of the'+\
' Solar Horizont'
data = sm.proj_vor/sm.vor_freq
proj_data = data*100/data.max()
ax = sm.vor_surf.plot2d(proj_data, alpha=0.85, cmap=cmap,
colorbar=True, title=title, ret=True,
cbar_label='%')
ax.set_xlabel('Solar Azimuth')
ax.set_ylabel('Solar Zenit')
plt.title('max = '+str(data.max())+' kilounits*hour')
plt.show()
else:
raise ValueError('Invalid plot '+which)
def exposure_plot(self, places=-1, c_poly='default', c_holes='default',
s_sop=25, extra_height=0.1):
"""
Plots the exposure of the sensible points in a space to the data
and the Sun positions. It is required to previously compute the
shadowing.
If the computation has been made with a data timeseries, the plot
will have a colorbar. Units are accumulated kilounits*hour (for
the series), that is, if the input data is in Watts
(irradiation) for a whole year, the output will be
kWh received in an entire year.
If there is no data inputed, the plot will show only the number
of times each point "has been seen by the Sun" along the series.
:param places: Indexes of the places to plot. If -1, plots all.
:type places: int or list
:param c_poly: Polygons color.
:type c_poly: matplotlib color, 'default' or 't' (transparent)
:param c_holes: Holes color.
:type c_holes: matplotlib color, 'default' or 't' (transparent)
:param s_sop: Set of points size.
:type s_sop: float or ndarray
:param extra_height: Extra elevation for the points in the plot.
:type extra_height: float
:returns: None
"""
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import matplotlib.colors as mcolors
sm = self.SM
if sm.light_vor is None:
raise ValueError('The shadowing has not been computed yet')
proj_data = sm.proj_points*100/sm.proj_points.max()
if places == -1:
places = range(len(sm.space.places))
elif type(places) == int:
places = [places]
places = np.array(places)
places[places<0] = len(sm.space.places) + places[places<0]
places = np.unique(places)
points = sm.space.get_sets_of_points()
index = sm.space.get_sets_index()
# Model plot
sop = []
data = []
aux_space = pyny.Space() # Later centering of the plot
ax=None
for i in places:
aux_space.add_places(sm.space[i])
ax = sm.space[i].iplot(c_poly=c_poly, c_holes=c_holes,
c_sop=False, ret=True, ax=ax)
sop.append(points[index==i])
data.append(proj_data[index==i])
sop = np.vstack(sop)
sop = np.vstack((sop, np.array([-1e+12, -1e+12, -1e+12])))
data = np.hstack(data)
proj_data = np.hstack((data, 0))
# Sensible points plot
## Color
cmap = cm.jet
normalize = mcolors.Normalize(vmin=proj_data.min(),
vmax=proj_data.max())
color_vector = cmap(normalize(proj_data))
## Plot
ax.scatter(sop[:, 0], sop[:, 1], sop[:, 2]+extra_height,
c=color_vector, s=s_sop)
## Axis
aux_space.center_plot(ax)
## Colorbar
scalarmappaple = cm.ScalarMappable(norm=normalize, cmap=cmap)
scalarmappaple.set_array(proj_data)
cbar = plt.colorbar(scalarmappaple, shrink=0.8, aspect=10)
cbar.ax.set_ylabel('%', rotation=0)
if not (sm.arg_data.max() == 1 and sm.arg_data.min() == 1):
plt.title('Accumulated data Projection\nmax = ' + \
str(sm.proj_points.max()) + \
' kilounits*hour')
else:
plt.title('Sun exposure')
```
#### File: pyny3d/pyny3d/utils.py
```python
import numpy as np
'''
General purpose functions for clean up the code in critical areas.
'''
def sort_numpy(array, col=0, order_back=False):
"""
Sorts the columns for an entire ``ndarrray`` according to sorting
one of them.
:param array: Array to sort.
:type array: ndarray
:param col: Master column to sort.
:type col: int
:param order_back: If True, also returns the index to undo the
new order.
:type order_back: bool
:returns: sorted_array or [sorted_array, order_back]
:rtype: ndarray, list
"""
x = array[:,col]
sorted_index = np.argsort(x, kind = 'quicksort')
sorted_array = array[sorted_index]
if not order_back:
return sorted_array
else:
n_points = sorted_index.shape[0]
order_back = np.empty(n_points, dtype=int)
order_back[sorted_index] = np.arange(n_points)
return [sorted_array, order_back]
def arange_col(n, dtype=int):
"""
Returns ``np.arange`` in a column form.
:param n: Length of the array.
:type n: int
:param dtype: Type of the array.
:type dtype: type
:returns: ``np.arange`` in a column form.
:rtype: ndarray
"""
return np.reshape(np.arange(n, dtype = dtype), (n, 1))
def bool2index(bool_):
"""
Returns a numpy array with the indices where bool\_ is True.
:param bool_: bool array to extract Trues positions.
:type bool_: ndarray (type=bool)
:returns: Array with the indices where bool\_ is True.
:rtype: ndarray
.. seealso:: :func:`index2bool`
"""
return np.arange(bool_.shape[0])[bool_]
def index2bool(index, length=None):
"""
Returns a numpy boolean array with Trues in the input index
positions.
:param index: index array with the Trues positions.
:type index: ndarray (type=int)
:param length: Length of the returned array.
:type length: int or None
:returns: array with Trues in the input index positions.
:rtype: ndarray
.. seealso:: :func:`bool2index`
"""
if index.shape[0] == 0 and length is None:
return np.arange(0, dtype = bool)
if length is None: length = index.max()+1
sol = np.zeros(length, dtype=bool)
sol[index] = True
return sol
``` |
{
"source": "Joseantonio-96/BigData_Uni",
"score": 3
} |
#### File: BigData_Uni/K-Means_Python_R/kmeans_serial.py
```python
import matplotlib.pyplot as plt
from sklearn.cluster import KMeans
import pandas as pd
import timeit
import seaborn as sns
def elbow(vals, timer_start):
plt.figure('Elbow Graph showing the optimal amount of clusters k')
plt.plot(range(1, 10), vals, 'bx-')
plt.xlabel('k')
plt.ylabel('Distortion')
print(f'Time up to here is: {timeit.default_timer() - timer_start}')
plt.show()
def centroids(kmeanModel, labels):
cluster = plt.figure(1)
plt.scatter(list(kmeanModel.cluster_centers_[:, 0]), list(kmeanModel.cluster_centers_[:, 1]), cmap='rainbow')
plt.xlabel(labels[0])
plt.ylabel(labels[1])
plt.title('Price and Speed of the cluster centroids')
def heatmap(df, timer_start):
avg = df.groupby(['Cluster']).mean()
Price_max = round(avg['price'].max(), 2)
print(f'Cluster with highest Price avg has price = {Price_max} ')
# Plotting the heatmap
heat = plt.figure('Heatmap for the cluster centroids')
avg = avg.drop(['id', 'cd', 'multi', 'premium'], axis=1)
sns.heatmap(avg, annot=True, cmap="YlGnBu")
print(f'Time of second part: {timeit.default_timer() - timer_start}')
plt.show()
if __name__ == "__main__":
# load csv
start = timeit.default_timer()
df = pd.read_csv('computers.csv', encoding="ISO-8859-1", sep=";")
df = df.rename(columns={df.columns[0]: "id"})
# make values numeric
df[['cd', 'multi', 'premium']] = df[['cd', 'multi', 'premium']].replace({'yes': 1, 'no': 0})
# to numpy array
data = df.values[:, 1:]
labels = df.columns[1:]
# plot the elbow graph
elbow([KMeans(n_clusters=k).fit(data).inertia_ for k in range(1, 10)], start)
# fit the data
n_clusters = int(input('How many clusters?: '))
restart_timer = timeit.default_timer()
kmeanModel = KMeans(n_clusters)
kmeanModel.fit(data)
identified_clusters = kmeanModel.fit_predict(data)
df['Cluster'] = identified_clusters
# Plotting the centroids of first two dimensions
centroids(kmeanModel, labels)
# Getting the average of each cluster
heatmap(df, restart_timer)
```
#### File: BigData_Uni/Parallelization_Python/lab01_parallel_multiproc.py
```python
from pandas import read_csv, concat
from multiprocessing import Pool, cpu_count
from timeit import default_timer
import lab01_serial
def parallelize_dataframe(func, n_cores=cpu_count()):
pool = Pool(n_cores)
df = concat(pool.map(func, read_csv('proteins.csv', sep=';', chunksize=21000, index_col='structureId')))
pool.close()
return df
def search_pattern(df):
return df['sequence'].str.count(pat)
pat = str.upper(input('Pattern: '))
start = default_timer()
occ = parallelize_dataframe(search_pattern)
print(f'Time is {default_timer()-start}')
print('No matches for the given pattern' if occ.max() < 1 else f'Protein that has the most matches has ID {occ.idxmax()} with {occ.max()} occurrences')
lab01_serial.pat_plot(occ, pat)
``` |
{
"source": "Joseantoniomr99/Estandares-de-datos",
"score": 3
} |
#### File: Joseantoniomr99/Estandares-de-datos/Polen.py
```python
from flask import Flask, render_template, redirect, request
from flask_pymongo import pymongo
client = pymongo.MongoClient("mongodb+srv://Emanuela:<EMAIL>/myFirstDatabase?retryWrites=true&w=majority")
mongo_db = client.get_database('AIRE')
mongo_col = pymongo.collection.Collection(mongo_db, 'coleccionPolen')
app = Flask(__name__)
@app.route('/')
def pagina_principal():
return render_template("home.html")
@app.route('/samples')
def samples():
samples = mongo_col.find()
return render_template("samples.html.j2", users = samples, mostrar_enlace = True)
@app.route('/form', methods = ['GET', 'POST'])
def form():
if request.method == 'GET':
return render_template("form.html.j2")
elif request.method == 'POST':
date = request.form['date']
Alnus = request.form['Alnus']
Betulus = request.form['Betulus']
Taxus = request.form['Taxus']
Fraxinus = request.form['Fraxinus']
Poaceae = request.form['Poaceae']
Quercus = request.form['Quercus']
Ulmus = request.form['Ulmus']
Urtica = request.form['Urtica']
usuario = {"date" : date, "Alnus": Alnus, "Betulus" : Betulus, "Taxus": Taxus, "Fraxinus": Fraxinus, "Poaceae": Poaceae, "Quercus": Quercus, "Ulmus": Ulmus, "Urtica": Urtica}
mongo_col.insert_one(usuario)
return redirect("/samples")
if __name__ == '__main__':
app.run(debug=True)
``` |
{
"source": "JoseAntonioSiguenza/deepchem",
"score": 2
} |
#### File: feat/molecule_featurizers/dmpnn_featurizer.py
```python
from rdkit import Chem
from typing import List, Tuple, Union, Dict, Set, Sequence
import deepchem as dc
from deepchem.utils.typing import RDKitAtom
from deepchem.utils.molecule_feature_utils import one_hot_encode
from deepchem.utils.molecule_feature_utils import get_atom_total_degree_one_hot
from deepchem.utils.molecule_feature_utils import get_atom_formal_charge_one_hot
from deepchem.utils.molecule_feature_utils import get_atom_total_num_Hs_one_hot
from deepchem.utils.molecule_feature_utils import get_atom_hybridization_one_hot
from deepchem.utils.molecule_feature_utils import get_atom_is_in_aromatic_one_hot
from deepchem.feat.graph_features import bond_features as b_Feats
class GraphConvConstants(object):
"""
A class for holding featurization parameters.
"""
MAX_ATOMIC_NUM = 100
ATOM_FEATURES: Dict[str, List[int]] = {
'atomic_num': list(range(MAX_ATOMIC_NUM)),
'degree': [0, 1, 2, 3, 4, 5],
'formal_charge': [-1, -2, 1, 2, 0],
'chiral_tag': [0, 1, 2, 3],
'num_Hs': [0, 1, 2, 3, 4]
}
ATOM_FEATURES_HYBRIDIZATION: List[str] = ["SP", "SP2", "SP3", "SP3D", "SP3D2"]
"""Dimension of atom feature vector"""
ATOM_FDIM = sum(len(choices) + 1 for choices in ATOM_FEATURES.values()) + len(
ATOM_FEATURES_HYBRIDIZATION) + 1 + 2
# len(choices) +1 and len(ATOM_FEATURES_HYBRIDIZATION) +1 to include room for unknown set
# + 2 at end for is_in_aromatic and mass
BOND_FDIM = 14
def get_atomic_num_one_hot(atom: RDKitAtom,
allowable_set: List[int],
include_unknown_set: bool = True) -> List[float]:
"""Get a one-hot feature about atomic number of the given atom.
Parameters
---------
atom: rdkit.Chem.rdchem.Atom
RDKit atom object
allowable_set: List[int]
The range of atomic numbers to consider.
include_unknown_set: bool, default False
If true, the index of all types not in `allowable_set` is `len(allowable_set)`.
Returns
-------
List[float]
A one-hot vector of atomic number of the given atom.
If `include_unknown_set` is False, the length is `len(allowable_set)`.
If `include_unknown_set` is True, the length is `len(allowable_set) + 1`.
"""
return one_hot_encode(atom.GetAtomicNum() - 1, allowable_set,
include_unknown_set)
def get_atom_chiral_tag_one_hot(
atom: RDKitAtom,
allowable_set: List[int],
include_unknown_set: bool = True) -> List[float]:
"""Get a one-hot feature about chirality of the given atom.
Parameters
---------
atom: rdkit.Chem.rdchem.Atom
RDKit atom object
allowable_set: List[int]
The list of chirality tags to consider.
include_unknown_set: bool, default False
If true, the index of all types not in `allowable_set` is `len(allowable_set)`.
Returns
-------
List[float]
A one-hot vector of chirality of the given atom.
If `include_unknown_set` is False, the length is `len(allowable_set)`.
If `include_unknown_set` is True, the length is `len(allowable_set) + 1`.
"""
return one_hot_encode(atom.GetChiralTag(), allowable_set, include_unknown_set)
def get_atom_mass(atom: RDKitAtom) -> List[float]:
"""Get vector feature containing downscaled mass of the given atom.
Parameters
---------
atom: rdkit.Chem.rdchem.Atom
RDKit atom object
Returns
-------
List[float]
A vector of downscaled mass of the given atom.
"""
return [atom.GetMass() * 0.01]
def atom_features(
atom: Chem.rdchem.Atom,
functional_groups: List[int] = None,
only_atom_num: bool = False) -> Sequence[Union[bool, int, float]]:
"""Helper method used to compute atom feature vector.
Deepchem already contains an atom_features function, however we are defining a new one here due to the need to handle features specific to DMPNN.
Parameters
----------
atom: RDKit.Chem.rdchem.Atom
Atom to compute features on.
functional_groups: List[int]
A k-hot vector indicating the functional groups the atom belongs to.
Default value is None
only_atom_num: bool
Toggle to build a feature vector for an atom containing only the atom number information.
Returns
-------
features: Sequence[Union[bool, int, float]]
A list of atom features.
Examples
--------
>>> from rdkit import Chem
>>> mol = Chem.MolFromSmiles('C')
>>> atom = mol.GetAtoms()[0]
>>> features = dc.feat.molecule_featurizers.dmpnn_featurizer.atom_features(atom)
>>> type(features)
<class 'list'>
>>> len(features)
133
"""
if atom is None:
features: Sequence[Union[bool, int,
float]] = [0] * GraphConvConstants.ATOM_FDIM
elif only_atom_num:
features = []
features += get_atomic_num_one_hot(
atom, GraphConvConstants.ATOM_FEATURES['atomic_num'])
features += [0] * (
GraphConvConstants.ATOM_FDIM - GraphConvConstants.MAX_ATOMIC_NUM - 1
) # set other features to zero
else:
features = []
features += get_atomic_num_one_hot(
atom, GraphConvConstants.ATOM_FEATURES['atomic_num'])
features += get_atom_total_degree_one_hot(
atom, GraphConvConstants.ATOM_FEATURES['degree'])
features += get_atom_formal_charge_one_hot(
atom, GraphConvConstants.ATOM_FEATURES['formal_charge'])
features += get_atom_chiral_tag_one_hot(
atom, GraphConvConstants.ATOM_FEATURES['chiral_tag'])
features += get_atom_total_num_Hs_one_hot(
atom, GraphConvConstants.ATOM_FEATURES['num_Hs'])
features += get_atom_hybridization_one_hot(
atom, GraphConvConstants.ATOM_FEATURES_HYBRIDIZATION, True)
features += get_atom_is_in_aromatic_one_hot(atom)
features = [int(feature) for feature in features]
features += get_atom_mass(atom)
if functional_groups is not None:
features += functional_groups
return features
def bond_features(bond: Chem.rdchem.Bond) -> Sequence[Union[bool, int, float]]:
"""wrapper function for bond_features() already available in deepchem, used to compute bond feature vector.
Parameters
----------
bond: rdkit.Chem.rdchem.Bond
Bond to compute features on.
Returns
-------
features: Sequence[Union[bool, int, float]]
A list of bond features.
Examples
--------
>>> from rdkit import Chem
>>> mol = Chem.MolFromSmiles('CC')
>>> bond = mol.GetBondWithIdx(0)
>>> b_features = dc.feat.molecule_featurizers.dmpnn_featurizer.bond_features(bond)
>>> type(b_features)
<class 'list'>
>>> len(b_features)
14
"""
if bond is None:
b_features: Sequence[Union[
bool, int, float]] = [1] + [0] * (GraphConvConstants.BOND_FDIM - 1)
else:
b_features = [0] + b_Feats(bond, use_extended_chirality=True)
return b_features
def map_reac_to_prod(
mol_reac: Chem.Mol,
mol_prod: Chem.Mol) -> Tuple[Dict[int, int], List[int], List[int]]:
"""
Function to build a dictionary of mapping atom indices in the reactants to the products.
Parameters
----------
mol_reac: Chem.Mol
An RDKit molecule of the reactants.
mol_prod: Chem.Mol
An RDKit molecule of the products.
Returns
-------
mappings: Tuple[Dict[int,int],List[int],List[int]]
A tuple containing a dictionary of corresponding reactant and product atom indices,
list of atom ids of product not part of the mapping and
list of atom ids of reactant not part of the mapping
"""
only_prod_ids: List[int] = []
prod_map_to_id: Dict[int, int] = {}
mapnos_reac: Set[int] = set(
[atom.GetAtomMapNum() for atom in mol_reac.GetAtoms()])
for atom in mol_prod.GetAtoms():
mapno = atom.GetAtomMapNum()
if (mapno > 0):
prod_map_to_id[mapno] = atom.GetIdx()
if (mapno not in mapnos_reac):
only_prod_ids.append(atom.GetIdx())
else:
only_prod_ids.append(atom.GetIdx())
only_reac_ids: List[int] = []
reac_id_to_prod_id: Dict[int, int] = {}
for atom in mol_reac.GetAtoms():
mapno = atom.GetAtomMapNum()
if (mapno > 0):
try:
reac_id_to_prod_id[atom.GetIdx()] = prod_map_to_id[mapno]
except KeyError:
only_reac_ids.append(atom.GetIdx())
else:
only_reac_ids.append(atom.GetIdx())
mappings: Tuple[Dict[int, int], List[int],
List[int]] = (reac_id_to_prod_id, only_prod_ids,
only_reac_ids)
return mappings
``` |
{
"source": "JoseAntonioVelasco/Python_Conn4Web",
"score": 4
} |
#### File: website/con4/utilities.py
```python
def insideBoard(board, x, y):
"""detecta si la ficha que quieres poner esta dentro de los limites del tablero
Args:
board (list): lista que contiene el tablero
x (int): posicion x en la que quieres poner en el tablero
y (int): posicion y en la que quieres poner en el tablero
Returns:
boolean: si esta dentro true si no false
"""
columns = len(board[0])
rows = len(board)
if x in range(columns) and y in range(rows):
return True
else:
return False
def legalMove(board, x, y):
"""Comprueba si el movimiento es valido, que no se superponga sobre otras fichas,
este dentro del tablero y que no este flotando la ficha
Args:
board (list): lista que contiene el tablero
x (int): posicion x en la que quieres poner en el tablero
y (int): posicion y en la que quieres poner en el tablero
Returns:
boolean: si el movimiento es valido
"""
if insideBoard(board, x, y) == False:
return False
cond1 = board[y][x] == 'free'
cond2 = (board[y-1][x] == 'red') or (board[y-1][x] == 'yellow')
cond3 = (y == 0)
if (cond1 and cond2) or (cond3):
return True
else:
return False
def win(board, x, y, turn):
"""detecta si en ese movimiento hay victoria
Args:
board (list): lista que contiene el tablero
x (int): posicion x en la que quieres poner en el tablero
y (int): posicion y en la que quieres poner en el tablero
turn (string): que jugador ha puesto, ej: 'X'/'O' o 'red'/'yellow'
Returns:
boolean: true si hay victoria, false si no hay victoria
"""
directions = [[[0, 1], [0, -1]], [[1, 1], [-1, -1]],
[[1, 0], [-1, 0]], [[1, -1], [-1, 1]]]
for direction in directions:
counter = 0
for vector in direction:
loop = 0
x_ = x
y_ = y
for loop in range(4):
loop = loop + 1
x_ = x_ + vector[0]
y_ = y_ + vector[1]
cond = insideBoard(board, x_, y_) and board[y_][x_] == turn
if cond:
counter = counter + 1
else:
break
if counter == 3:
return True
return False
``` |
{
"source": "JoseAntpr/mooviest-back",
"score": 3
} |
#### File: mooviest-back/person/models.py
```python
from django.db import models
from common.models import Lang
FEMALE = 1
MALE = 2
GENDER_CHOICES = (
(FEMALE, "Female"),
(MALE, "Male")
)
class Person(models.Model):
"""
`Person` represents a person that work in the cinema world could be
an actor, producer, director ...
Attributes:
"""
name = models.CharField(max_length=100)
birthday = models.DateField(null=True, blank=True)
death = models.DateField(null=True, blank=True)
gender = models.IntegerField(choices=GENDER_CHOICES, null=True, blank=True)
place_of_birthday = models.CharField(max_length=100, null=True, blank=True)
image = models.URLField(null=True, blank=True)
homepage = models.URLField(null=True, blank=True)
langs = models.ManyToManyField(Lang, through='Person_lang', blank=True)
def __str__(self):
return self.name
class Person_lang(models.Model):
"""
`Person_lan` is a extension to Person for language translation.
Attributes:
"""
person = models.ForeignKey(Person, on_delete=models.CASCADE)
lang = models.ForeignKey(Lang, on_delete=models.CASCADE)
biography = models.TextField()
class Job (models.Model):
"""
`Job` presents diferents movie jobs.
Attributes:
"""
langs = models.ManyToManyField(Lang, through='Job_lang')
class Job_lang(models.Model):
"""
`Job_lang` is a extension to Jon where translate some params.
Attributes:
"""
job = models.name = models.ForeignKey(Job, on_delete=models.CASCADE)
lang = models.ForeignKey(Lang, on_delete=models.CASCADE)
name = models.CharField(max_length=100)
``` |
{
"source": "joseapicon/indabom",
"score": 2
} |
#### File: indabom/indabom/views.py
```python
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseServerError, HttpResponseNotFound
from django.contrib.auth import authenticate, login, get_user_model
from django.template.response import TemplateResponse
from django.urls import reverse
from django.views.generic.base import TemplateView
from indabom.settings import DEBUG
from indabom.forms import UserForm
from urllib.error import URLError
def index(request):
if request.user.is_authenticated:
return HttpResponseRedirect(reverse('bom:home'))
else:
return TemplateResponse(request, 'indabom/index.html', locals())
def handler404(request, exception=None, *args, **kwargs):
return HttpResponseNotFound(render(request, 'indabom/404.html', status=404, context=locals()))
def handler500(request):
return HttpResponseServerError(render(request, 'indabom/500.html', status=500))
def signup(request):
name = 'signup'
if request.method == 'POST':
form = UserForm(request.POST)
try:
if form.is_valid():
new_user = form.save()
login(request, new_user, backend='django.contrib.auth.backends.ModelBackend')
return HttpResponseRedirect(reverse('bom:home'))
except URLError:
if DEBUG and len(form.errors.keys()) == 1 and 'captcha' in form.errors.keys():
new_user = form.save()
login(request, new_user, backend='django.contrib.auth.backends.ModelBackend')
return HttpResponseRedirect(reverse('bom:home'))
else:
form = UserForm()
return TemplateResponse(request, 'indabom/signup.html', locals())
class IndabomTemplateView(TemplateView):
name = None
def __init__(self, *args, **kwargs):
super().__init__(**kwargs)
self.template_name = f'indabom/{self.name}.html'
def get_context_data(self, *args, **kwargs):
context = super(IndabomTemplateView, self).get_context_data(**kwargs)
context['name'] = self.name
return context
class About(IndabomTemplateView):
name = 'about'
class LearnMore(IndabomTemplateView):
name = 'learn-more'
class PrivacyPolicy(IndabomTemplateView):
name = 'privacy-policy'
class TermsAndConditions(IndabomTemplateView):
name = 'terms-and-conditions'
class Install(IndabomTemplateView):
name = 'install'
``` |
{
"source": "JoseAred/time_series_augmentation",
"score": 2
} |
#### File: time_series_augmentation/utils/augmentation.py
```python
import numpy as np
from tqdm import tqdm
def jitter(x, sigma=0.03):
# https://arxiv.org/pdf/1706.00527.pdf
return x + np.random.normal(loc=0., scale=sigma, size=x.shape)
def scaling(x, sigma=0.1):
# https://arxiv.org/pdf/1706.00527.pdf
factor = np.random.normal(loc=1., scale=sigma, size=(x.shape[0],x.shape[2]))
return np.multiply(x, factor[:,np.newaxis,:])
def rotation(x):
flip = np.random.choice([-1, 1], size=(x.shape[0],x.shape[2]))
rotate_axis = np.arange(x.shape[2])
np.random.shuffle(rotate_axis)
return flip[:,np.newaxis,:] * x[:,:,rotate_axis]
def permutation(x, max_segments=5, seg_mode="equal"):
orig_steps = np.arange(x.shape[1])
num_segs = np.random.randint(1, max_segments, size=(x.shape[0]))
ret = np.zeros_like(x)
for i, pat in enumerate(x):
if num_segs[i] > 1:
if seg_mode == "random":
split_points = np.random.choice(x.shape[1]-2, num_segs[i]-1, replace=False)
split_points.sort()
splits = np.split(orig_steps, split_points)
else:
splits = np.array_split(orig_steps, num_segs[i])
warp = np.concatenate(np.random.permutation(splits)).ravel()
ret[i] = pat[warp]
else:
ret[i] = pat
return ret
def magnitude_warp(x, sigma=0.2, knot=4):
from scipy.interpolate import CubicSpline
orig_steps = np.arange(x.shape[1])
random_warps = np.random.normal(loc=1.0, scale=sigma, size=(x.shape[0], knot+2, x.shape[2]))
warp_steps = (np.ones((x.shape[2],1))*(np.linspace(0, x.shape[1]-1., num=knot+2))).T
ret = np.zeros_like(x)
for i, pat in enumerate(x):
warper = np.array([CubicSpline(warp_steps[:,dim], random_warps[i,:,dim])(orig_steps) for dim in range(x.shape[2])]).T
ret[i] = pat * warper
return ret
def time_warp(x, sigma=0.2, knot=4):
from scipy.interpolate import CubicSpline
orig_steps = np.arange(x.shape[1])
random_warps = np.random.normal(loc=1.0, scale=sigma, size=(x.shape[0], knot+2, x.shape[2]))
warp_steps = (np.ones((x.shape[2],1))*(np.linspace(0, x.shape[1]-1., num=knot+2))).T
ret = np.zeros_like(x)
for i, pat in enumerate(x):
for dim in range(x.shape[2]):
time_warp = CubicSpline(warp_steps[:,dim], warp_steps[:,dim] * random_warps[i,:,dim])(orig_steps)
scale = (x.shape[1]-1)/time_warp[-1]
ret[i,:,dim] = np.interp(orig_steps, np.clip(scale*time_warp, 0, x.shape[1]-1), pat[:,dim]).T
return ret
def window_slice(x, reduce_ratio=0.9):
# https://halshs.archives-ouvertes.fr/halshs-01357973/document
target_len = np.ceil(reduce_ratio*x.shape[1]).astype(int)
if target_len >= x.shape[1]:
return x
starts = np.random.randint(low=0, high=x.shape[1]-target_len, size=(x.shape[0])).astype(int)
ends = (target_len + starts).astype(int)
ret = np.zeros_like(x)
for i, pat in enumerate(x):
for dim in range(x.shape[2]):
ret[i,:,dim] = np.interp(np.linspace(0, target_len, num=x.shape[1]), np.arange(target_len), pat[starts[i]:ends[i],dim]).T
return ret
def window_warp(x, window_ratio=0.1, scales=[0.5, 2.]):
# https://halshs.archives-ouvertes.fr/halshs-01357973/document
warp_scales = np.random.choice(scales, x.shape[0])
warp_size = np.ceil(window_ratio*x.shape[1]).astype(int)
window_steps = np.arange(warp_size)
window_starts = np.random.randint(low=1, high=x.shape[1]-warp_size-1, size=(x.shape[0])).astype(int)
window_ends = (window_starts + warp_size).astype(int)
ret = np.zeros_like(x)
for i, pat in enumerate(x):
for dim in range(x.shape[2]):
start_seg = pat[:window_starts[i],dim]
window_seg = np.interp(np.linspace(0, warp_size-1, num=int(warp_size*warp_scales[i])), window_steps, pat[window_starts[i]:window_ends[i],dim])
end_seg = pat[window_ends[i]:,dim]
warped = np.concatenate((start_seg, window_seg, end_seg))
ret[i,:,dim] = np.interp(np.arange(x.shape[1]), np.linspace(0, x.shape[1]-1., num=warped.size), warped).T
return ret
def spawner(x, labels, sigma=0.05, verbose=0):
# https://www.ncbi.nlm.nih.gov/pmc/articles/PMC6983028/
import utils.dtw as dtw
random_points = np.random.randint(low=1, high=x.shape[1]-1, size=x.shape[0])
window = np.ceil(x.shape[1] / 10.).astype(int)
orig_steps = np.arange(x.shape[1])
l = np.argmax(labels, axis=1) if labels.ndim > 1 else labels
ret = np.zeros_like(x)
for i, pat in enumerate(tqdm(x)):
# guarentees that same one isnt selected
choices = np.delete(np.arange(x.shape[0]), i)
# remove ones of different classes
choices = np.where(l[choices] == l[i])[0]
if choices.size > 0:
random_sample = x[np.random.choice(choices)]
# SPAWNER splits the path into two randomly
path1 = dtw.dtw(pat[:random_points[i]], random_sample[:random_points[i]], dtw.RETURN_PATH, slope_constraint="symmetric", window=window)
path2 = dtw.dtw(pat[random_points[i]:], random_sample[random_points[i]:], dtw.RETURN_PATH, slope_constraint="symmetric", window=window)
combined = np.concatenate((np.vstack(path1), np.vstack(path2+random_points[i])), axis=1)
if verbose:
print(random_points[i])
dtw_value, cost, DTW_map, path = dtw.dtw(pat, random_sample, return_flag = dtw.RETURN_ALL, slope_constraint=slope_constraint, window=window)
dtw.draw_graph1d(cost, DTW_map, path, pat, random_sample)
dtw.draw_graph1d(cost, DTW_map, combined, pat, random_sample)
mean = np.mean([pat[combined[0]], random_sample[combined[1]]], axis=0)
for dim in range(x.shape[2]):
ret[i,:,dim] = np.interp(orig_steps, np.linspace(0, x.shape[1]-1., num=mean.shape[0]), mean[:,dim]).T
else:
print("There is only one pattern of class %d, skipping pattern average"%l[i])
ret[i,:] = pat
return jitter(ret, sigma=sigma)
def wdba(x, labels, batch_size=6, slope_constraint="symmetric", use_window=True):
# https://ieeexplore.ieee.org/document/8215569
import utils.dtw as dtw
if use_window:
window = np.ceil(x.shape[1] / 10.).astype(int)
else:
window = None
orig_steps = np.arange(x.shape[1])
l = np.argmax(labels, axis=1) if labels.ndim > 1 else labels
ret = np.zeros_like(x)
for i in tqdm(range(ret.shape[0])):
# get the same class as i
choices = np.where(l == l[i])[0]
if choices.size > 0:
# pick random intra-class pattern
k = min(choices.size, batch_size)
random_prototypes = x[np.random.choice(choices, k, replace=False)]
# calculate dtw between all
dtw_matrix = np.zeros((k, k))
for p, prototype in enumerate(random_prototypes):
for s, sample in enumerate(random_prototypes):
if p == s:
dtw_matrix[p, s] = 0.
else:
dtw_matrix[p, s] = dtw.dtw(prototype, sample, dtw.RETURN_VALUE, slope_constraint=slope_constraint, window=window)
# get medoid
medoid_id = np.argsort(np.sum(dtw_matrix, axis=1))[0]
nearest_order = np.argsort(dtw_matrix[medoid_id])
medoid_pattern = random_prototypes[medoid_id]
# start weighted DBA
average_pattern = np.zeros_like(medoid_pattern)
weighted_sums = np.zeros((medoid_pattern.shape[0]))
for nid in nearest_order:
if nid == medoid_id or dtw_matrix[medoid_id, nearest_order[1]] == 0.:
average_pattern += medoid_pattern
weighted_sums += np.ones_like(weighted_sums)
else:
path = dtw.dtw(medoid_pattern, random_prototypes[nid], dtw.RETURN_PATH, slope_constraint=slope_constraint, window=window)
dtw_value = dtw_matrix[medoid_id, nid]
warped = random_prototypes[nid, path[1]]
weight = np.exp(np.log(0.5)*dtw_value/dtw_matrix[medoid_id, nearest_order[1]])
average_pattern[path[0]] += weight * warped
weighted_sums[path[0]] += weight
ret[i,:] = average_pattern / weighted_sums[:,np.newaxis]
else:
print("There is only one pattern of class %d, skipping pattern average"%l[i])
ret[i,:] = x[i]
return ret
# Proposed
def dtw_warp(x, labels, slope_constraint="symmetric", use_window=True, dtw_type="normal"):
import utils.dtw as dtw
if use_window:
window = np.ceil(x.shape[1] / 10.).astype(int)
else:
window = None
orig_steps = np.arange(x.shape[1])
l = np.argmax(labels, axis=1) if labels.ndim > 1 else labels
ret = np.zeros_like(x)
for i, pat in enumerate(tqdm(x)):
# guarentees that same one isnt selected
choices = np.delete(np.arange(x.shape[0]), i)
# remove ones of different classes
choices = np.where(l[choices] == l[i])[0]
if choices.size > 0:
# pick random intra-class pattern
random_prototype = x[np.random.choice(choices)]
if dtw_type == "shape":
path = dtw.shape_dtw(random_prototype, pat, dtw.RETURN_PATH, slope_constraint=slope_constraint, window=window)
else:
path = dtw.dtw(random_prototype, pat, dtw.RETURN_PATH, slope_constraint=slope_constraint, window=window)
# Time warp
warped = pat[path[1]]
for dim in range(x.shape[2]):
ret[i,:,dim] = np.interp(orig_steps, np.linspace(0, x.shape[1]-1., num=warped.shape[0]), warped[:,dim]).T
else:
print("There is only one pattern of class %d, skipping timewarping"%l[i])
ret[i,:] = pat
return ret
def shape_dtw_warp(x, labels, slope_constraint="symmetric", use_window=True):
return dtw_warp(x, labels, slope_constraint, use_window, dtw_type="shape")
def discriminative_dtw_warp(x, labels, batch_size=6, slope_constraint="symmetric", use_window=True, dtw_type="normal", use_variable_slice=True):
import utils.dtw as dtw
if use_window:
window = np.ceil(x.shape[1] / 10.).astype(int)
else:
window = None
orig_steps = np.arange(x.shape[1])
l = np.argmax(labels, axis=1) if labels.ndim > 1 else labels
positive_batch = np.ceil(batch_size / 2).astype(int)
negative_batch = np.floor(batch_size / 2).astype(int)
ret = np.zeros_like(x)
warp_amount = np.zeros(x.shape[0])
for i, pat in enumerate(tqdm(x)):
# guarentees that same one isnt selected
choices = np.delete(np.arange(x.shape[0]), i)
# remove ones of different classes
positive = np.where(l[choices] == l[i])[0]
negative = np.where(l[choices] != l[i])[0]
if positive.size > 0 and negative.size > 0:
pos_k = min(positive.size, positive_batch)
neg_k = min(negative.size, negative_batch)
positive_prototypes = x[np.random.choice(positive, pos_k, replace=False)]
negative_prototypes = x[np.random.choice(negative, neg_k, replace=False)]
# vector embedding and nearest prototype in one
pos_aves = np.zeros((pos_k))
neg_aves = np.zeros((pos_k))
if dtw_type == "shape":
for p, pos_prot in enumerate(positive_prototypes):
for ps, pos_samp in enumerate(positive_prototypes):
if p != ps:
pos_aves[p] += (1./(pos_k-1.))*dtw.shape_dtw(pos_prot, pos_samp, dtw.RETURN_VALUE, slope_constraint=slope_constraint, window=window)
for ns, neg_samp in enumerate(negative_prototypes):
neg_aves[p] += (1./neg_k)*dtw.shape_dtw(pos_prot, neg_samp, dtw.RETURN_VALUE, slope_constraint=slope_constraint, window=window)
selected_id = np.argmax(neg_aves - pos_aves)
path = dtw.shape_dtw(positive_prototypes[selected_id], pat, dtw.RETURN_PATH, slope_constraint=slope_constraint, window=window)
else:
for p, pos_prot in enumerate(positive_prototypes):
for ps, pos_samp in enumerate(positive_prototypes):
if p != ps:
pos_aves[p] += (1./(pos_k-1.))*dtw.dtw(pos_prot, pos_samp, dtw.RETURN_VALUE, slope_constraint=slope_constraint, window=window)
for ns, neg_samp in enumerate(negative_prototypes):
neg_aves[p] += (1./neg_k)*dtw.dtw(pos_prot, neg_samp, dtw.RETURN_VALUE, slope_constraint=slope_constraint, window=window)
selected_id = np.argmax(neg_aves - pos_aves)
path = dtw.dtw(positive_prototypes[selected_id], pat, dtw.RETURN_PATH, slope_constraint=slope_constraint, window=window)
# Time warp
warped = pat[path[1]]
warp_path_interp = np.interp(orig_steps, np.linspace(0, x.shape[1]-1., num=warped.shape[0]), path[1])
warp_amount[i] = np.sum(np.abs(orig_steps-warp_path_interp))
for dim in range(x.shape[2]):
ret[i,:,dim] = np.interp(orig_steps, np.linspace(0, x.shape[1]-1., num=warped.shape[0]), warped[:,dim]).T
else:
print("There is only one pattern of class %d"%l[i])
ret[i,:] = pat
warp_amount[i] = 0.
if use_variable_slice:
max_warp = np.max(warp_amount)
if max_warp == 0:
# unchanged
ret = window_slice(ret, reduce_ratio=0.9)
else:
for i, pat in enumerate(ret):
# Variable Sllicing
ret[i] = window_slice(pat[np.newaxis,:,:], reduce_ratio=0.9+0.1*warp_amount[i]/max_warp)[0]
return ret
def discriminative_shape_dtw_warp(x, labels, batch_size=6, slope_constraint="symmetric", use_window=True):
return discriminative_dtw_warp(x, labels, batch_size, slope_constraint, use_window, dtw_type="shape")
``` |
{
"source": "josearthur/bbb-v2",
"score": 3
} |
#### File: bbb-v2/src/account_manager.py
```python
import threading
import datetime
import time
from queue import Queue
from threading import Thread, BoundedSemaphore
from colorama import Fore, Back
from user_bot import UserBot
from utils import read_configuration_file
config = read_configuration_file()
MAX_CONNS = config['contasEmParalelo'] or 1
RETRY_TIME = 30 * 60 # 30 minutes * secs
semaphore = BoundedSemaphore(MAX_CONNS)
def producer(out_q, data, participant):
username = data['username']
password = data['password']
semaphore.acquire()
try:
bot = UserBot(username, password, participant)
bot.run()
except Exception as e:
print(f"{Fore.RED} Teve erro em {username}. {e} Testando com outro usuário...")
semaphore.release()
out_q.put(username)
# A thread that consumes data
def consumer(in_q, accounts, participant):
while True:
username = in_q.get()
accounts[username]['timestamp'] = datetime.datetime.now()
for username, creds in accounts.items():
diff = datetime.datetime.now() - creds['timestamp']
if diff.seconds > RETRY_TIME:
Thread(
target=producer,
args=(in_q, creds, participant)
).start()
time.sleep(60)
class AccountManager(object):
def __init__(self, credentials, participant):
accs = {}
for creds in credentials:
username = creds['username']
accs[username] = creds
accs[username]['timestamp'] = datetime.datetime.now()
self.accounts = accs
self.participant = participant
self.q = Queue(maxsize=MAX_CONNS)
def run(self):
t1 = Thread(
target=consumer,
args=(self.q, self.accounts, self.participant)
)
t1.start()
for _, creds in self.accounts.items():
t2 = Thread(
target=producer,
args=(self.q, creds, self.participant)
)
t2.start()
```
#### File: src/classifier/classify.py
```python
from utils import list_individual_images, break_captcha, save_candidate_name, save_image
import sys
import random
import os
import readline
READ_PATH = './src/classifier/pieces/'
SAVE_PATH = './src/classifier/classified/'
CAPTCHAS_TXT = './list_captchas.txt'
with open(CAPTCHAS_TXT, 'r') as fp:
SOLVED = [i.strip() for i in fp.readlines()]
captchas = set([i.split('_')[0] for i in os.listdir(READ_PATH)])
[captchas.add(c) for c in SOLVED]
def completer(text, state):
options = [i for i in captchas if i.startswith(text)]
if state < len(options):
return options[state]
else:
return None
readline.parse_and_bind("tab: complete")
readline.set_completer(completer)
if __name__ == "__main__":
pieces = sorted(list_individual_images(READ_PATH))
total = len(pieces)
for i, (name, image) in enumerate(pieces):
print(name, image)
print('Count:', i, total)
os.system(f'imgcat "{READ_PATH}{name}"')
new_name = input('>')
if new_name:
new_filename = f"{SAVE_PATH}{new_name}_{random.randint(10000, 99999)}.png"
image.save(new_filename)
```
#### File: bbb-v2/src/login_bot.py
```python
import requests
import json
from colorama import Fore
def login_bot(email, password):
headers = {
"Host": "login.globo.com",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:72.0) Gecko/20100101 Firefox/72.0",
"Content-Type": "application/json",
"Origin": "http://login.globo.com",
"Connection": "close",
}
data = {
"payload":{
"email":email,
"password":password,
"serviceId":1
},
"captcha":"",
}
session = requests.Session()
print("[+] Fazendo login com "+ Fore.LIGHTBLUE_EX + email)
response = session.post('https://login.globo.com/api/authentication', headers=headers, json=data)
HIDDEN = 80
if response.status_code != 200:
raise Exception(Fore.RED + "Login não foi feito com sucesso, tente de novo.")
else:
print(Fore.GREEN + f"[+] Login feito com sucesso... ID: {response.cookies['GLBID'][:-HIDDEN]}{'*'*HIDDEN}")
return session
``` |
{
"source": "JoseArtur/phyton-exercices",
"score": 3
} |
#### File: PyUdemy/Day14/test.py
```python
b=0
def add(a,b):
b+=1
return
print(add(1,b))
```
#### File: Day20/SnakeGame/snake.py
```python
from turtle import Turtle, Screen
class Snake():
def __init__(self):
self.parts = []
self.create()
self.head = self.parts[0]
def create(self):
for i in range(3):
jonny = Turtle(shape="square")
jonny.penup()
jonny.color("white")
jonny.setx(-20 * i)
self.parts.append(jonny)
def move(self):
for seg_num in range(len(self.parts) - 1, 0, -1):
# The new x and y will be the current x and y of the anterior square, e.g(if 3,
# it will go to 2 pos)
new_x = self.parts[seg_num - 1].xcor()
new_y = self.parts[seg_num - 1].ycor()
self.parts[seg_num].goto(new_x, new_y)
self.head.forward(20)
def up(self):
self.head.setheading(90) if self.head.heading()!=270 else 0
def down(self):
self.head.setheading(270) if self.head.heading()!=90 else 0
def left(self):
self.head.setheading(180) if self.head.heading()!=0 else 0
def right(self):
self.head.setheading(0) if self.head.heading() != 180 else 0
```
#### File: PyUdemy/Day8/AreaCalc.py
```python
def paint_calc(height,width,cover):
import math
calc=math.ceil((height*width)/cover)
print(f"You'll need {calc} cans of paint")
#Write your code above this line 👆
# Define a function called paint_calc() so that the code below works.
# 🚨 Don't change the code below 👇
test_h = int(input("Height of wall: "))
test_w = int(input("Width of wall: "))
coverage = 5
paint_calc(height=test_h, width=test_w, cover=coverage)
```
#### File: PyUdemy/Day8/PrimeNumber.py
```python
def prime_checker(number):
x=1
count=0
while x<=number:
if number%x==0:
count+=1
x+=1
if count==2:
print("This is a prime number")
else:
print("This is not a prime number")
``` |
{
"source": "JoseAVallejo12/holbertonschool-web_back_end",
"score": 4
} |
#### File: holbertonschool-web_back_end/0x00-python_variable_annotations/0-add.py
```python
def add(numberOne: float, number_two: float) -> float:
"""Return a plus b
Args:
numberOne (float): number one
number_two (float): number two
Returns:
float: return
"""
return numberOne + number_two
```
#### File: holbertonschool-web_back_end/0x00-python_variable_annotations/2-floor.py
```python
def floor(n: float) -> int:
"""return int part of n
Args:
n (float): arg
Returns:
int: value int of np
"""
return int(n)
```
#### File: holbertonschool-web_back_end/0x01-python_async_function/1-concurrent_coroutines.py
```python
import asyncio
from typing import List
wait_random = __import__('0-basic_async_syntax').wait_random
async def wait_n(n: int, max_delay: int = 10) -> List[float]:
""" Waits for ran delay until max_delay, returns list of actual delays """
spawn_list = []
delay_list = []
for i in range(n):
delayed_task = asyncio.create_task(wait_random(max_delay))
delayed_task.add_done_callback(lambda x: delay_list.append(x.result()))
spawn_list.append(delayed_task)
for spawn in spawn_list:
await spawn
return delay_list
```
#### File: holbertonschool-web_back_end/0x03-caching/base_caching.py
```python
class BaseCaching():
""" BaseCaching defines:
- constants of your caching system
- where your data are stored (in a dictionary)
"""
MAX_ITEMS = 4
def __init__(self):
"""Initiliaze."""
self.cache_data = {}
def print_cache(self):
""" Print the cache
"""
print("Current cache:")
for key in sorted(self.cache_data.keys()):
print("{}: {}".format(key, self.cache_data.get(key)))
def put(self, key, item):
""" Add an item in the cache
"""
raise NotImplementedError("put must be implemented in your cache class")
def get(self, key):
""" Get an item by key
"""
raise NotImplementedError("get must be implemented in your cache class")
```
#### File: holbertonschool-web_back_end/0x04-pagination/0-simple_helper_function.py
```python
from typing import Tuple
def index_range(page: int, page_size: int) -> Tuple[int]:
"""Cal the first and last index an list
Args:
page (int): number of pag
page_size (int): item per pag
Returns:
Tuple[int]: Tuple first and last index
"""
first_index = 0
last_index = page * page_size
if page > 1:
first_index = (page - 1) * page_size
return (first_index, last_index)
```
#### File: v1/auth/auth.py
```python
from typing import List, TypeVar
from flask import request
class Auth:
"""Auth main class."""
def require_auth(self, path: str, excluded_paths: List[str]) -> bool:
"""Protect path."""
if excluded_paths is None:
return True
for element in excluded_paths:
if "*" in element:
return not(path.startswith(element.replace("*", "")))
return not(path in excluded_paths or f'{path}/' in excluded_paths)
def authorization_header(self, request=None) -> str:
"""Authorization header."""
if request is None:
return None
return request.headers.get('Authorization', None)
def current_user(self, request=None) -> TypeVar('User'):
"""Current user function."""
return None
```
#### File: api/v1/app.py
```python
from os import getenv
from flask.globals import g
from api.v1.auth.auth import Auth
from api.v1.views import app_views
from flask_cors import (CORS)
from api.v1.auth.basic_auth import BasicAuth
from flask import Flask, jsonify, abort, request
from api.v1.auth.session_auth import SessionAuth
from api.v1.auth.session_db_auth import SessionDBAuth
from api.v1.auth.session_exp_auth import SessionExpAuth
app = Flask(__name__)
app.register_blueprint(app_views)
CORS(app, resources={r"/api/v1/*": {"origins": "*"}})
auth = None
if getenv('AUTH_TYPE') == 'auth':
auth = Auth()
elif getenv('AUTH_TYPE') == 'basic_auth':
auth = BasicAuth()
elif getenv('AUTH_TYPE') == 'session_auth':
auth = SessionAuth()
elif getenv('AUTH_TYPE') == 'session_exp_auth':
auth = SessionExpAuth()
elif getenv('AUTH_TYPE') == 'session_db_auth':
auth = SessionDBAuth()
@app.errorhandler(404)
def not_found(error) -> str:
""" Not found handler
"""
return jsonify({"error": "Not found"}), 404
@app.errorhandler(401)
def unauthorized(error) -> str:
""" Not found authorizedX
"""
return jsonify({"error": error.description}), 401
@app.errorhandler(403)
def forbidden(error) -> str:
""" Forbidden resource
"""
return jsonify({"error": error.description}), 403
@app.before_request
def before_request() -> str:
"""Handle request."""
exclude_paths = ['/api/v1/status/', '/api/v1/auth_session/login/',
'/api/v1/unauthorized/', '/api/v1/forbidden/']
if not auth or not auth.require_auth(request.path, exclude_paths):
return None
if (auth.authorization_header(request) is None
and auth.session_cookie(request) is None):
abort(401, description="Unauthorized")
if (auth.current_user(request) is None):
abort(403, description="Forbidden")
request.current_user = auth.current_user(request)
if __name__ == "__main__":
host = getenv("API_HOST", "0.0.0.0")
port = getenv("API_PORT", "5000")
app.run(host=host, port=port, debug=True)
```
#### File: v1/auth/session_db_auth.py
```python
from api.v1.auth.session_exp_auth import SessionExpAuth
from models.user_session import UserSession
from os import getenv
from datetime import datetime, timedelta
class SessionDBAuth(SessionExpAuth):
"""SessionExpAuth class to manage API authentication
"""
def create_session(self, user_id=None):
"""Create session
"""
if user_id:
session_id = super().create_session(user_id)
us = UserSession(user_id=user_id, session_id=session_id)
us.save()
UserSession.save_to_file()
return session_id
def user_id_for_session_id(self, session_id=None):
"""Get user ID from session
"""
if not session_id:
return None
UserSession.load_from_file()
users = UserSession.search({'session_id': session_id})
for u in users:
delta = timedelta(seconds=self.session_duration)
if u.created_at + delta < datetime.now():
return None
return u.user_id
def destroy_session(self, request=None):
"""Delete the user session / log out
"""
if request:
session_id = self.session_cookie(request)
if not session_id:
return False
if not self.user_id_for_session_id(session_id):
return False
users = UserSession.search({'session_id': session_id})
for u in users:
u.remove()
UserSession.save_to_file()
return True
return False
```
#### File: v1/auth/session_exp_auth.py
```python
from api.v1.auth.session_auth import SessionAuth
from os import getenv
from datetime import datetime, timedelta
class SessionExpAuth(SessionAuth):
"""SessionExpAuth class to manage API authentication
"""
def __init__(self):
"""Initialize SessionExpAuth
"""
try:
self.session_duration = int(getenv('SESSION_DURATION'))
except Exception:
self.session_duration = 0
def create_session(self, user_id=None):
"""Create session
"""
session_id = super().create_session(user_id)
if session_id:
SessionAuth.user_id_by_session_id[session_id] = {
'user_id': user_id, 'created_at': datetime.now()}
return session_id
def user_id_for_session_id(self, session_id=None):
"""Get user ID from session
"""
if not session_id:
return None
session_dict = SessionExpAuth.user_id_by_session_id.get(session_id)
if not session_dict:
return None
if self.session_duration <= 0:
return session_dict['user_id']
if 'created_at' not in session_dict:
return None
delta = timedelta(seconds=self.session_duration)
if session_dict['created_at'] + delta < datetime.now():
return None
return session_dict['user_id']
```
#### File: v1/views/index.py
```python
from flask import jsonify, abort
from api.v1.views import app_views
@app_views.route('/status', methods=['GET'], strict_slashes=False)
def status() -> str:
""" GET /api/v1/status
Return:
- the status of the API
"""
return jsonify({"status": "OK"})
@app_views.route('/stats/', methods=['GET'], strict_slashes=False)
def stats() -> str:
""" GET /api/v1/stats
Return:
- the number of each objects
"""
from models.user import User
stats = {}
stats['users'] = User.count()
return jsonify(stats)
@app_views.route('/unauthorized/', methods=['GET'], strict_slashes=False)
def unauthorized() -> str:
""" GET /api/v1/unauthorized
Returns:
str: abort 401 status
"""
abort(401, description="Unauthorized")
@app_views.route('/forbidden/', methods=['GET'], strict_slashes=False)
def forbidden() -> str:
""" GET /api/v1/forbidden
Returns:
str: abort 403 status
"""
abort(403, description="Forbidden")
```
#### File: v1/views/session_auth.py
```python
from os import getenv
from api.v1.views.index import status
from api.v1.views import app_views
from flask import abort, jsonify, request
from models.user import User
@app_views.route('/auth_session/login/', methods=['POST'],
strict_slashes=False)
def Session_authentication() -> str:
"""Session authentication method."""
if request.form.get('email') is None:
return jsonify({"error": "email missing"}), 400
elif request.form.get('password') is None:
return jsonify({"error": "password missing"}), 400
users = User.search({'email': request.form.get('email')})
if len(users) == 0:
return jsonify({"error": "no user found for this email"}), 404
for user in users:
if not user.is_valid_password(request.form.get('password')):
return jsonify({"error": "wrong password"}), 401
from api.v1.app import auth
user_found = users[0]
session_id = auth.create_session(user_found.id)
response = jsonify(user_found.to_json())
response.set_cookie(getenv('SESSION_NAME'), session_id)
return response
@app_views.route('/auth_session/logout', methods=['DELETE'],
strict_slashes=False)
def logout():
""" DELETE /auth_session/logout
Return:
- Empty json
"""
from api.v1.app import auth
if not auth.destroy_session(request):
abort(404)
return jsonify({}), 200
```
#### File: holbertonschool-web_back_end/0x07-Session_authentication/main_3.py
```python
from flask import Flask, request
from api.v1.auth.auth import Auth
auth = Auth()
app = Flask(__name__)
@app.route('/', methods=['GET'], strict_slashes=False)
def root_path():
""" Root path
"""
return "Cookie value: {}\n".format(auth.session_cookie(request))
if __name__ == "__main__":
app.run(host="0.0.0.0", port="5000")
```
#### File: holbertonschool-web_back_end/0x07-Session_authentication/main_4.py
```python
from flask import Flask, request
from api.v1.auth.session_auth import SessionAuth
from models.user import User
""" Create a user test """
user_email = "<EMAIL>"
user_clear_pwd = "<PASSWORD>"
user = User()
user.email = user_email
user.password = <PASSWORD>
user.save()
""" Create a session ID """
sa = SessionAuth()
session_id = sa.create_session(user.id)
print("User with ID: {} has a Session ID: {}".format(user.id, session_id))
""" Create a Flask app """
app = Flask(__name__)
@app.route('/', methods=['GET'], strict_slashes=False)
def root_path():
""" Root path
"""
request_user = sa.current_user(request)
if request_user is None:
return "No user found\n"
return "User found: {}\n".format(request_user.id)
if __name__ == "__main__":
app.run(host="0.0.0.0", port="5000", debug=True)
```
#### File: holbertonschool-web_back_end/0x09-Unittests_and_integration_tests/test_utils.py
```python
from unittest import TestCase
from unittest.mock import patch
from utils import access_nested_map, get_json, memoize
from parameterized import parameterized
class TestAccessNestedMap(TestCase):
"""main access_nested_map testing."""
@parameterized.expand([
({"a": 1}, ['a'], 1),
({"a": {"b": 2}}, ['a'], {"b": 2}),
({"a": {"b": 2}}, ['a', 'b'], 2)
])
def test_access_nested_map(self, nested_map, path, expected):
"""method access_nested_map test."""
self.assertEqual(access_nested_map(nested_map, path), expected)
@parameterized.expand([
({}, ['a']),
({"a": 1}, ['a', 'b'])
])
def test_access_nested_map_exception(self, nested_map, path):
"""Test raise error method access_nested_map."""
with self.assertRaises(KeyError) as e:
access_nested_map(nested_map, path)
class TestGetJson(TestCase):
""" Class for Get Json Tests """
@parameterized.expand([
("http://example.com", {"payload": True}),
("http://holberton.io", {"payload": False})
])
def test_get_json(self, test_url, test_payload):
""" Test that utils.get_json returns the expected result."""
config = {'return_value.json.return_value': test_payload}
patcher = patch('requests.get', **config)
mock = patcher.start()
self.assertEqual(get_json(test_url), test_payload)
mock.assert_called_once()
patcher.stop()
class TestMemoize(TestCase):
""" Class for Memoize Tests """
def test_memoize(self):
""" Test that when calling a_property twice, the correct result
is returned but a_method is only called once using
assert_called_once
"""
class TestClass:
""" Test Class for wrapping with memoize """
def a_method(self):
return 42
@memoize
def a_property(self):
return self.a_method()
with patch.object(TestClass, 'a_method') as mock:
test_class = TestClass()
test_class.a_property()
test_class.a_property()
mock.assert_called_once()
```
#### File: holbertonschool-web_back_end/0x0B_redis_basic/web.py
```python
from functools import wraps
import redis
import requests
from typing import Callable
client = redis.Redis()
def count_requests(method: Callable) -> Callable:
""" Decortator to count how many request has been made"""
@wraps(method)
def wrapper(url):
""" Function wrapper """
client.incr(f"count:{url}")
cached_html = client.get(f"cached:{url}")
if cached_html:
return cached_html.decode('utf-8')
html = method(url)
client.setex(f"cached:{url}", 10, html)
return html
return wrapper
@count_requests
def get_page(url: str) -> str:
"""Gets the html content of a web page
"""
req = requests.get(url)
return req.text
```
#### File: holbertonschool-web_back_end/0x0D-NoSQL/10-update_topics.py
```python
def update_topics(mongo_collection, name, topics):
"""Update collection based in name"""
mongo_collection.update_many({'name': name}, {'$set': {'topics': topics}})
```
#### File: holbertonschool-web_back_end/0x0D-NoSQL/11-schools_by_topic.py
```python
def schools_by_topic(mongo_collection, topic):
"""Main function to get list of collection."""
return mongo_collection.find({'topics': {'$in': [topic]}})
```
#### File: holbertonschool-web_back_end/0x0D-NoSQL/mongo_connect.py
```python
from pymongo import MongoClient
from os import environ
USER_MONGO = environ['USER_MONGO']
PASSWORD = environ['PASSWORD']
DB_MONGO = environ['DB_MONGO']
STRING_CONNECTION = F'mongodb+srv://{USER_MONGO}:{PASSWORD}@cluster0.idafl.mongodb.net/{DB_MONGO}?retryWrites=true&w=majority'
def mongo_client():
"""Create an conection to mongo atlas server."""
return MongoClient(STRING_CONNECTION)
``` |
{
"source": "joseb73/Stick2It",
"score": 3
} |
#### File: Stick2It/widgets/app.py
```python
from PyQt5 import QtWidgets
from widgets.MainMenu import MainMenuWindow
"""
Stick2it widgets: a lot of small productivity apps
The main menu is the default widget that is loaded. Other widgets that will be loaded are located in folders within.
each widget has:
- a design folder (with .ui or .qml designs)
- app.py for the main app class (the window)
"""
def run():
app = QtWidgets.QApplication([])
window = MainMenuWindow()
window.show()
app.exec_()
```
#### File: widgets/Calendar/__init__.py
```python
from PyQt5 import QtCore, QtWidgets
from PyQt5.QtWidgets import QMainWindow, QLabel, QGridLayout, QWidget
from PyQt5.QtCore import QSize
from PyQt5 import QtCore, QtGui, QtWidgets, uic
from widgets import global_vars as gv
qtcreator_file = "widgets/Calendar/Calendar.ui" # Enter file here.
Ui_CalendarWindow, QtBaseClass = uic.loadUiType(qtcreator_file)
class Window(QtWidgets.QMainWindow, Ui_CalendarWindow):
def __init__(self):
QtWidgets.QMainWindow.__init__(self)
Ui_CalendarWindow.__init__(self)
self.setupUi(self)
self.name = "widget2"
def closeEvent(self, event):
print("closing window...")
print(gv.open_widgets)
del gv.open_widgets[self.name]
print(gv.open_widgets)
``` |
{
"source": "josebadoe/keybender",
"score": 2
} |
#### File: keybender/keybender/config.py
```python
import configparser
import os
import re
from Xlib import X
from keybender.knox import Modifiers, KnoX, Waiter as Sleeper
from keybender.event import Event
from keybender.rctl import StreamSender
from types import GeneratorType
from collections.abc import Iterable
import functools
import shlex
import fnmatch
import subprocess
import time, datetime
import sys
import fcntl
import pickle, base64
class Step:
def __init__(self, *args):
self.actions = []
self.triggers = None
def execute(self, *args, **kwargs):
for a in self.actions:
a.execute(*args, **kwargs)
class Key:
def __init__(self, knox, descr, origin=None, mods=None, keysym=None):
"""if keysym is None, itt will take everything as modifier it can, and use the
first non-modifier one as a key. When keysym is False it will not
permit non-modifier keys and raise an exception if found one. When it
is True, and no keysym found so far, it will use the last key as
keysym, even if it could be used as a modifier.
"""
self.knox = knox
self._net_modifiers = None
self.keysym = None
self.negate = False
self.named_modifiers = None
if mods:
self.named_modifiers = Modifiers(self.knox, mods.all())
if self.named_modifiers is None:
self.named_modifiers = Modifiers(self.knox, None)
if descr:
descr = descr.strip()
if not descr:
return
while descr.startswith("not "):
descr = descr[4:].strip()
self.negate = not self.negate
l = list(filter(lambda s: len(s) > 0,
map(lambda s: s.strip(),
descr.split('+'))))
for (i,n) in enumerate(l):
mods = self.knox.modifiers.find(name=n)
force_keysym = (keysym is True and i == len(l) - 1 and not self.keysym)
if mods and not force_keysym:
for m in mods:
self.named_modifiers.add(m)
break
elif not self.keysym:
self.keysym = self.knox.string_to_keysym(n)
if not self.keysym:
raise Exception("Unrecognized key '%s' in %s" % (n, origin))
if keysym is None:
pass
elif keysym:
if self.keysym is None:
# maybe use the last one as keysym...
raise Exception("Missing non-modifier key in '%s' in %s" (descr, origin))
else:
if self.keysym is not None:
raise Exception("Non-modifier key '%s' in %s"
% (self.knox.keysym_to_string(self.keysym), origin))
@property
def modifiers(self):
if not self.negate:
return self.named_modifiers
elif self._net_modifiers is None:
self._net_modifiers = ~self.named_modifiers
return self._net_modifiers
def __str__(self):
l = [ str(m) for m in self.named_modifiers.all() ]
if self.keysym:
l.append(self.knox.keysym_to_string(self.keysym))
if self.negate:
return "not " + "+".join(l)
else:
return "+".join(l)
def __eq__(a, b):
return a.keysym == b.keysym and a.modifiers.bitmap == b.modifiers.bitmap
def __hash__(self):
return hash((a.keysym, a.modifiers.bitmap))
class Listener(Step):
def __init__(self, config, section, name=None):
super().__init__(self, config, section)
self.config = config
self.section = section
self._name = name
self._description = None
for e in section:
if e == 'triggers':
self.triggers = Config.Parser.trigger_list(config, section, e)
elif e == 'mask':
pass
elif e == 'execute':
action_name = "action:%s" % section[e]
self.actions.append(config.action(action_name))
elif e == 'description':
self._description = section[e]
elif e == 'comment':
pass
else:
raise Exception("Unrecognized entry '%s' in section '%s'"
% (e, section.name))
@property
def name(self):
if self._name is not None:
return self._name
elif self.section:
return self.section.name
else:
return "?"
class Start(Listener):
pass
class Action(Step):
def __new__(cls, config, section, *args, **kwargs):
actions = []
if cls == Action:
for e in section:
if e == 'select-windows':
# this goes first so it fills up the target list before other actions
actions[0:0] = [ WindowSelector ]
elif e == 'run':
actions.append(ShellCommandAction)
elif e == 'consult':
actions.append(ConsultCommandAction)
elif e == 'do':
actions.append(AutonomousCommandAction)
else:
print("Warning: Unrecognized entry '%s' in section '%s'"
% (e, section.name))
if len(actions) > 1:
# __init__ is called on the returned object...
return MultiAction(config, section, *actions)
elif actions:
return actions[0](config, section)
else:
return object.__new__(cls)
def __init__(self, config, section):
raise Exception("Unrecognized action in section '%s'" % section.name)
class MultiAction(Action):
def __init__(self, config, section, *actions):
if getattr(self, '_initialized', False):
# __init__ is called twice if this object is created and
# initialized and then returned from __new__
return
self._initialized = True
self.actions = []
assert actions
for action_class in actions:
self.actions.append(action_class(config, section))
def __repr__(self):
return "MultiAct(%s)" % ' + '.join(map(repr, self.actions))
def execute(self, *args, **kwargs):
r = None
for a in self.actions:
ra = a.execute(*args, **kwargs)
if ra is not None:
if r is None:
r = list()
r.extend(ra)
return r
class ShellCommandAction(Action):
def __init__(self, config, section, cmd=None):
self.config = config
self.section = section
if cmd is not None:
self.cmd = cmd.strip()
else:
self.cmd = self.section['run'].strip()
def execute(self, *args, **kwargs):
print("RUNNING %r" % self.cmd)
os.system(self.cmd)
def __repr__(self):
return "run(%r)" % self.cmd
class Consultant:
def call_toggle_fn(self, fn, w):
actions = {
'+': KnoX._NET_WM_STATE_ADD,
'-': KnoX._NET_WM_STATE_REMOVE,
'!': KnoX._NET_WM_STATE_TOGGLE,
}
if w and w[0] in actions:
action = actions[w[0]]
w = w[1:]
else:
action = None
return fn(int(w), action=action)
def toggle_frame(self, w):
frame_states = {
'+': True,
'-': False,
'!': None
}
if w and w[0] in frame_states:
return self.config.knox.toggle_frame(int(w[1:]), frame=frame_states[w[0]])
else:
return self.config.knox.toggle_frame(int(w))
def __init__(self, config):
self.config = config
self.commands = {
'select-windows': self.select_windows,
'close': lambda w: self.config.knox.close_window(int(w)),
'minimize': lambda w: self.config.knox.minimize_window(int(w)),
'frame': self.toggle_frame,
'raise': lambda w: self.config.knox.raise_window(int(w)),
'activate': lambda w: self.config.knox.active_window(int(w)),
'focus': lambda w: self.config.knox.set_focused_window(int(w)),
'below': functools.partial(
self.call_toggle_fn,
self.config.knox.below_window),
'fullscreen': functools.partial(
self.call_toggle_fn,
self.config.knox.fullscreen_window),
'sticky': functools.partial(
self.call_toggle_fn,
self.config.knox.sticky_window),
'skip_pager': functools.partial(
self.call_toggle_fn,
self.config.knox.skip_pager),
'skip_taskbar': functools.partial(
self.call_toggle_fn,
self.config.knox.skip_taskbar),
'maximize': functools.partial(
self.call_toggle_fn,
self.config.knox.maximize_window),
'geometry': self.geometry,
'save_state': self.save_state,
'restore_state': self.restore_state,
'action': self.call_action,
'key': self.send_keys,
'send_keys': self.send_keys,
'desktop': self.show_desktop,
'display_count': self.display_count
}
def incoming(self, lines, responder=None):
cnt = 0
if responder is None:
responder = lambda x: x
for s in lines:
cnt += 1
found=False
print("Incoming: %r" % s)
if s == 'bye':
responder([ "bye\n" ])
return 0
for k in self.commands.keys():
prefix = k + ":"
if s.startswith(prefix) or s == k:
a = s[len(prefix):]
r = self.commands[k](a.strip())
if isinstance(r, str):
responder([r + "\n"])
elif isinstance(r, Iterable):
responder(r)
elif r is None or r is True:
self.config.knox.flush()
responder("OK\n")
elif r is False:
responder("Failed\n")
found = True
break
if not found:
print("Bad command from external process: %r" % s)
return cnt
def call_action(self, s):
a = self.config.action("action:" + s)
a.execute()
def geometry(self, s):
# TODO: WxH, pero tambien *Wx*H para multiplos (float) del workarea
# size. accepto ! after numbers (position and size) for using screen
# space instead of workarea
m = re.match(r'^\s*(?P<win_id>\d+)\s+'
r'('
r'(?P<w_op>[*]\s*)?(?P<width>[.\d]+)\s*(?P<sz_selector_w>[!])?'
r'\s*x\s*'
r'((?P<h_op>[*]\s*)?(?P<height>[.\d]+)\s*(?P<sz_selector_h>[!])?)'
r')?'
r'('
r'\s*(?P<x_sign>[+-]\s*)(?P<x>\d+)\s*(?P<sz_selector_x>[!])?'
r'\s*(?P<y_sign>[+-]\s*)(?P<y>\d+)\s*(?P<sz_selector_y>[!])?'
r')?\s*$', s)
if not m:
raise Exception("Syntax error in geometry string: %r'" % s)
win_id = int(m['win_id'])
args = dict()
wa = self.config.knox.usable_workarea()
ra = self.config.knox.get_geometry(self.config.knox.root)
w = self.config.knox.get_geometry(win_id)
f = self.config.knox.get_frame_extents(win_id)
print("GEOMETRYCA: workarea %r, window %r, frame %r" % (wa, w, f))
if m['width']:
sz = ra if m['sz_selector_w'] and m['sz_selector_w'] == '!' else wa
if m['w_op'] and m['w_op'][0] == '*':
args['width'] = int(sz.width * float(m['width']))
else:
args['width'] = int(float(m['width']))
if m['height']:
sz = ra if m['sz_selector_h'] and m['sz_selector_h'] == '!' else wa
if m['h_op'] and m['h_op'][0] == '*':
args['height'] = int(sz.height * float(m['height']))
else:
args['height'] = int(float(m['height']))
if m['x']:
sz = ra if m['sz_selector_x'] and m['sz_selector_x'] == '!' else wa
if m['x_sign'] and m['x_sign'].startswith('-'):
args['x'] = sz.width - args.get('width', w.width) - int(m['x'])
#args['x'] = sz.width - (args.get('width', w.width) + int(m['x']) + f.left + f.right)
else:
args['x'] = sz.x + int(m['x'])
if m['y']:
sz = ra if m['sz_selector_y'] and m['sz_selector_y'] == '!' else wa
if m['y_sign'] and m['y_sign'].startswith('-'):
# top and bottom seem to be included already
#args['y'] = sz.height - (args.get('height', w.height) + f.top + f.bottom) + int(m['y'])
args['y'] = sz.height - args.get('height', w.height) - int(m['y'])
else:
args['y'] = sz.y + int(m['y'])
self.config.knox.set_geometry(win_id, **args)
def show_desktop(self, s):
if s == '-':
self.config.knox.show_desktop(action=False)
elif s == '+':
self.config.knox.show_desktop(action=True)
elif s == '!':
self.config.knox.show_desktop(action=None)
else:
raise Exception("Syntax error in desktop command: %r" % s)
def select_windows(self, s):
prefix="select-windows:"
full_msg = prefix + s
(_, name, args, timeout) = Config.Parser.section_reference(
self.config, "remote control message", prefix, full_msg, timeout=True)
print("Selecting %r" % name)
if not name:
return
section_name = "match-window:%s" % name
if args:
section_backup = self.config.save_section(section_name)
for (entry_name, value) in args.items():
self.config.config.set(section_name, entry_name, value)
else:
section_backup = None
ws = WindowSelector.Worker(
parent=None,
finder=self.config.window_finder(section_name, use_cache=False),
destination=None)
first = True
if timeout:
started = datetime.datetime.now()
r = ""
while timeout or first:
first = False
r = ws.execute()
if r or not timeout:
break
now = datetime.datetime.now()
if (now - started).total_seconds() > timeout:
break
print("WAITING FOR %s" % (full_msg))
time.sleep(0.1)
if section_backup is not None:
self.config.restore_section(section_name, section_backup)
return "select-windows:%s %s" % (name, r or "")
def save_state(self, *args):
state = self.config.knox.save_state()
return "save_state %s" % base64.b64encode(pickle.dumps(state)).decode()
def restore_state(self, s):
if not s:
return
state = pickle.loads(base64.b64decode(s))
self.config.knox.restore_state(state)
def send_keys(self, s):
ps = s.split(maxsplit=1)
if len(ps) != 2:
print("Syntax error in key command, missing window id: %r" % s)
return False
window_id = int(ps[0])
descr_lst = list()
for v in shlex.shlex(ps[1], posix=True):
if (v == '+' and descr_lst) or (descr_lst and descr_lst[-1][-1] == '+'):
descr_lst[-1] += v
else:
descr_lst.append(v)
for descr in descr_lst:
k = Key(self.config.knox, descr, origin="incoming command key:%r" % s)
self.config.knox.send_key(window_id, k.keysym, k.modifiers)
self.config.knox.flush()
def display_count(self, s):
return "display_count %d" % self.config.knox.display_count
class ConsultCommandAction(Action):
def __init__(self, config, section):
self.config = config
self.section = section
def __repr__(self):
return "consult(%r)" % self.section.get('consult', '?', raw=True).strip()
def call_action(self, s):
a = self.config.action("action:" + s)
a.execute()
def incoming_command(self, event, event_loop):
n = event.consultant.incoming(
map(lambda l: l.decode().strip(),
event.child.stdout.readlines()))
if n == 0:
exit_code = event.child.wait()
if exit_code:
print("Child process exited with %r: %r" % (exit_code, event.command))
event_loop.unregister(event.key)
def execute(self, *args, **kwargs):
cmd = self.section['consult'].strip()
print("CONSULTING %r" % cmd)
child = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=sys.stderr)
print("GOT STREAMS #%r for talking and #%r for listening"
% (child.stdin.fileno(), child.stdout.fileno()))
fl = fcntl.fcntl(child.stdout, fcntl.F_GETFL)
fcntl.fcntl(child.stdout, fcntl.F_SETFL, fl | os.O_NONBLOCK)
fl = fcntl.fcntl(child.stdin, fcntl.F_GETFL)
fcntl.fcntl(child.stdin, fcntl.F_SETFL, fl | os.O_NONBLOCK)
k = self.config.event_loop.register(
Event.READABLE, self.control_message,
fd=child.stdout,
child=child,
command=cmd,
consultant=Consultant(self.config))
def control_message(self, event, event_loop):
data = event.fd.read()
if not data:
print("CLOSING incoming #%r and outgoing #%r"
% (event.child.stdout.fileno(), event.child.stdin.fileno()),
"==" * 20)
event.child.stdout.close()
event.child.stdin.close()
event_loop.unregister(event.key)
else:
event.consultant.incoming(
data.decode('utf-8').splitlines(),
responder=StreamSender(event.child.stdin, event_loop))
class AutonomousCommandAction(Action):
def __init__(self, config, section):
self.config = config
self.section = section
def __repr__(self):
return "do(%r)" % self.section.get('do', '?', raw=True).strip()
def execute(self, *args, **kwargs):
consultant = Consultant(self.config)
commands = self.section['do']
n = 0
for cmd in commands.split(';'):
n += 1
cmd = cmd.strip()
consultant.incoming([ cmd ],
responder=functools.partial(self.chatter, cmd))
def chatter(self, cmd, whatever):
print("Response to do %r: %r" % (cmd, whatever))
class WindowSelector(Action):
class Worker:
def __init__(self, parent, finder, destination):
self.parent = parent
self.finder = finder
self.destination = destination
def execute(self, *args, **kwargs):
v = " ".join(map(str, self.finder(*args, **kwargs)))
if self.destination is not None:
self.parent.section[self.destination] = v
else:
return v
def __init__(self, config, section):
self.config = config
self.section = section
self.workers = []
self.waits = dict()
e = 'select-windows'
for s in section[e].split(';'):
v = s.split()
if len(v) == 3 and v[1] == 'into':
self.workers.append(
self.Worker(self, self.config.window_finder("match-window:%s" % v[0]), v[2]))
else:
raise Exception(
"Wrong in entry '%s' in section '%s'. Should be '<section-name> into <entry-name>' but it's '%s'"
% (e, section.name, s))
self.process_waits()
def process_waits(self):
if 'wait' not in self.section:
return
for s in self.section['wait'].split(';'):
s = s.strip()
m = re.match(
r'^(?P<time>\d+)s\s+for\s+(?P<list>{name_chars}+)\s*$'
.format(name_chars=Config.name_chars), s)
if not m:
raise Exception("Syntax error in entry '%s' in section '%s', in %r"
% ('wait', section.name, s))
self.waits[m['list']] = int(m['time'])
def execute(self, *args, **kwargs):
waits = dict(**self.waits)
s = Sleeper(0)
while s.wait():
s.timeout = max(waits.values() or [ 0 ])
for w in self.workers:
w.execute(*args, **kwargs)
for (name, patience) in list(waits.items()):
if not self.section[name]:
print("Entry %s still empty" % name, datetime.datetime.now())
now = datetime.datetime.now()
else:
del waits[name]
if not waits:
break
if waits:
print("Nothing good came for %s" % ",".join(waits.keys()))
class Expression:
operators = "()!|&"
def __init__(self, txt, translator):
self.translator = translator
lst = self.cleanup(shlex.shlex(txt, posix=True, punctuation_chars=self.operators))
sofat = None
(expr, rest) = self.compile(lst, sofar=None, expect_value=True)
if rest:
raise Exception("Syntax error, still having this: %r" % (rest,))
self.expr = expr
def __call__(self, *args, **kwargs):
return self.expr(*args, **kwargs)
def __repr__(self):
return ("Expr(%r)" % self.expr)
def cleanup(self, token_groups):
r = []
for tg in token_groups:
if all(map(lambda c: c in self.operators, tg)):
r.extend(list(tg))
else:
r.append(tg)
return r
priorities = {
'|': 20,
'&': 40,
'!': 99,
}
def compile(self, lst, sofar=None, priority=0, expect_value=True):
if not lst and not expect_value:
return (sofar, lst)
elif not lst:
raise Exception("Missing value on the end of epression")
if lst[0] in self.priorities:
new_priority = self.priorities[lst[0]]
else:
new_priority = None
if lst[0] == '(' and expect_value:
(expr, rest) = self.compile(lst[1:], sofar=None, priority=0, expect_value=True)
if not (rest and rest[0] == ')'):
raise Exception("Missing closing parenthesis")
return (expr, rest[1:])
elif lst[0] == ')' and not expect_value:
return (sofar, lst)
elif lst[0] == '!' and expect_value:
(expr, rest) = self.compile(
lst[1:], sofar=None, priority=new_priority, expect_value=True)
r_expr = self.translator.compile_op(lst[0], expr)
return (r_expr, rest)
elif lst[0] in '&|' and not expect_value:
if priority > new_priority:
return (sofar, lst)
(expr, rest) = self.compile(
lst[1:], sofar=None, priority=new_priority, expect_value=True)
r_expr = self.translator.compile_op(lst[0], sofar, expr)
return self.compile(
rest, sofar=r_expr, priority=priority, expect_value=False)
elif lst[0] not in self.operators:
l_expr = self.translator.compile_token(lst[0])
(r_expr, rest) = self.compile(
lst[1:], sofar=l_expr, priority = priority, expect_value=False)
return (r_expr, rest)
raise Exception("Whatsgoinon, expect_value=%r, sofar=%r, lst=%r, priority=%r, new_priority=%r" %(expect_value, sofar, lst, priority, new_priority))
class And:
def __init__(self, a, b):
self.a = a
self.b = b
def __call__(self, *args, **kwargs):
return self.a(*args, **kwargs) and self.b(*args, **kwargs)
def __repr__(self):
return ("And(%r, %r)" % (self.a, self.b))
class Or:
def __init__(self, a, b):
self.a = a
self.b = b
def __repr__(self):
return ("Or(%r, %r)" % (self.a, self.b))
def __call__(self, *args, **kwargs):
return self.a(*args, **kwargs) or self.b(*args, **kwargs)
class Not:
def __init__(self, v):
self.v = v
def __repr__(self):
return ("Not(%r)" % (self.v))
def __call__(self, *args, **kwargs):
return not self.v(*args, **kwargs)
class TokenMatch:
def __init__(self, token):
self.token = token
def __repr__(self):
return ("Token:%r" % self.token)
class WindowFinder:
class MatchAttr:
class StringCompare:
def __init__(self, wanted):
self.wanted = wanted
def __call__(self, value):
if isinstance(self.wanted, str) and isinstance(value, str):
return fnmatch.fnmatch(value, self.wanted)
else:
return self.wanted == value
def __repr__(self):
return repr(self.wanted)
class Translator:
def compile_token(self, s):
return WindowFinder.MatchAttr.StringCompare(s)
def compile_op(self, op, *args):
ops = {
"|": Expression.Or,
"&": Expression.And,
"!": Expression.Not
}
return ops[op](*args)
def __init__(self, section, entry, getter):
self.getter = getter
self.expression = Expression(section[entry], self.Translator())
def __call__(self, window):
value = self.getter(window)
return self.expression(value)
def get_name(self, window):
return self.config.knox.get_wm_name(window)
def get_class(self, window):
cls = window.get_wm_class()
return cls[-1] if cls else None
def get_instance(self, window):
cls = window.get_wm_class()
return cls[0] if cls else None
def get_pid(self, window):
return str(self.config.knox.get_wm_pid(window))
def get_type(self, window):
type_details = self.config.knox.get_window_type(window)
separator = "."
if type_details is not None:
types = list(type_details)
types.sort()
# separator char on both sides should help to avoid false matches
return separator + separator.join(types) + separator
else:
return "?"
class MatchAll:
def __init__(self, finder, *matchers):
self.matchers = list(matchers)
def __call__(self, window):
for m in self.matchers:
if not m(window):
return False
return True
class MatchAny:
def __init__(self, finder, *matchers):
self.matchers = matchers
def __call__(self, window):
for m in self.matchers:
if m(window):
return True
return False
def __init__(self, config, section):
self.config = config
self.section = section
self.config.config.BOOLEAN_STATES['any'] = None
self.config.config.BOOLEAN_STATES['?'] = None
self.match = self.MatchAll(self)
self.toplevel = True
self.focused = None
getters = {
'title': self.get_name,
'name': self.get_name,
'class': self.get_class,
'instance': self.get_instance,
'pid': self.get_pid,
'type': self.get_type,
}
if 'title' in section and 'name' in section:
raise Exception(
"title and name refer to the same property in section '%s'"
% (section.name))
for e in section:
if e == 'match':
if section[e] == 'any':
self.match = self.MatchAny(self, *self.match.matchers)
elif section[e] == 'all':
self.match = self.MatchAll(self, *self.match.matchers)
else:
raise Exception(
"Unrecognized value in entry '%s' in section '%s'"
% (e, section.name))
elif e in getters:
self.match.matchers.append(
self.MatchAttr(section, e, getters[e]))
elif e == 'focused':
self.focused = section.getboolean(e)
elif e == 'toplevel':
self.toplevel = section.getboolean(e)
# if section[e] in ['0', 'no', 'false']:
# self.toplevel = False
# elif section[e] in ['1', 'yes', 'true']:
# self.toplevel = True
# elif section[e] in ['any']:
# self.toplevel = None
# else:
# raise Exception(
# "Unrecognized value in entry '%s' in section '%s'"
# % (e, section.name))
else:
raise Exception("Unrecognized entry '%s' in section '%s'"
% (e, section.name))
def get_focused_window(self):
n = "Focused Window"
if self.x_state and n in self.x_state:
if self.x_state[n] is not None:
f = set([self.x_state[n]])
else:
f = set()
else:
f = set([self.config.knox.get_focused_window()])
return f
def __call__(self, *args, x_state=None, **kwargs):
self.x_state = x_state
if self.toplevel is True and self.focused is True:
wls = (
set(self.config.knox.toplevel_windows(id_only=True) or [])
& self.get_focused_window())
elif self.toplevel is True and self.focused is False:
wls = (
set(self.config.knox.toplevel_windows(id_only=True))
- self.get_focused_window())
elif self.toplevel is True: # and self.focused is None
wls = set(self.config.knox.toplevel_windows(id_only=True) or [])
elif self.toplevel is False and self.focused is True:
wls = (
self.get_focused_window()
- set(self.config.knox.toplevel_windows(id_only=True) or []))
else:
wls = set()
for (window, _, _) in self.config.knox.window_tree():
wls.add(window.id)
if self.toplevel is False:
wls -= set(self.config.knox.toplevel_windows(id_only=True) or [])
if self.focused is True:
wls &= self.get_focused_window()
elif self.focused is False:
wls -= self.get_focused_window()
if self.x_state and "Ignore" in self.x_state:
wls -= self.x_state["Ignore"]
lst = []
for win_id in wls:
if self.match(self.config.knox.get_window(win_id)):
lst.append(win_id)
return lst
class TriggerList():
def __init__(self, config, section, entry, triggers=None):
self.triggers = []
self.section = section
self.entry = entry
if triggers is not None:
self.triggers = triggers
# else:
# for w in Config.Parser.waiter_list(config, section, entry):
# self.add(w)
def add(self, trigger):
for t in self.triggers:
if t.key == trigger.key:
raise Exception(
"Multiple waiters (%s and %s) in entry '%s' in section '%s' waiting for the same trigger: %s"
% (w.section.name, waiter.section.name,
self.entry, self.section.name, t.key))
self.triggers.append(trigger)
def __iter__(self):
return iter(self.triggers)
class Trigger(Step):
def __init__(self, trigger, mask, action=None, waiter=None):
super().__init__()
self.key = trigger
self.mask = mask
# self.waiter = waiter
if action:
self.actions.append(action)
self.waiter = waiter
def __repr__(self):
return ("Trigger(%s)<w:%r,x:%r>" % (self.key, self.waiter, self.actions))
class Waiter(Listener):
pass
class Config:
name_chars=r'[-\w$]'
def __init__(self, knox, filename, event_loop, extra_options=None, add_env=True):
self.knox = knox
self.event_loop = event_loop
self.config = configparser.ConfigParser(
interpolation=configparser.ExtendedInterpolation())
self.config.add_section("env")
self.extra_options = dict()
if extra_options:
for (name, value) in extra_options.items():
if isinstance(name, tuple):
self.extra_options[name] = value
else:
parts = name.split(':', maxsplit=1)
if len(parts) == 1:
self.extra_options[("cfg", parts[0])] = value
else:
self.extra_options[(parts[0], parts[1])] = value
self.config.read(filename)
self.add_extra_options(self.extra_options)
if add_env:
self.add_extra_options(os.environ, section="env")
self.waiters = dict()
self.actions = dict()
self.finders = dict()
self.start = Start(self, self.config['start'])
self.config_file = filename
self.config_id = os.stat(filename).st_mtime
def add_extra_options(self, options, section=None):
for (option_name, value) in options.items():
if isinstance(option_name, tuple):
(section_name, option_name) = option_name
else:
section_name = section
if not self.config.has_section(section_name):
self.config.add_section(section_name)
if not self.config.has_option(section_name, option_name):
self.config[section_name][option_name] = value
def changed(self):
return not (os.stat(self.config_file).st_mtime == self.config_id)
def reload(self):
return Config(self.knox, self.config_file, self.event_loop,
extra_options=self.extra_options, add_env=False)
def waiter(self, name):
if name in self.waiters:
return self.waiters[name]
else:
w = Waiter(self, self.config[name])
self.waiters[name] = w
return w
def action(self, name):
if name in self.actions:
return self.actions[name]
elif name in self.config:
a = Action(self, self.config[name])
self.actions[name] = a
return a
else:
raise Exception("Action %r not found" % name)
def window_finder(self, name, use_cache=True):
if name in self.finders and use_cache:
return self.finders[name]
else:
f = WindowFinder(self, self.config[name])
self.finders[name] = f
return f
def save_section(self, section):
return list(map(lambda e: (e, self.config[section][e]), self.config[section].keys()))
def restore_section(self, section, bkp):
cur_keys = set(self.config[section].keys())
orig_keys = set(map(lambda e: e[0], bkp))
for (e, v) in bkp:
self.config[section][e] = v
for k in cur_keys - orig_keys:
del self.config[section][k]
class Parser:
@classmethod
def waiter_list(cls, config, section, entry):
waiters = []
for w in section[entry].split(','):
w = w.strip()
if not w:
raise Exception("Syntax error in entry '%s' in section '%s'"
% (entry, section.name))
waiter_name = "waiter:%s" % w
waiters.append(config.waiter(waiter_name))
return waiters
@classmethod
def trigger_list(cls, config, section, entry):
triggers = TriggerList(config, section, entry)
mask = Key(config.knox, section.get('mask', ''),
origin="entry 'mask' in section '%s'" % section.name)
for t in section[entry].split(';'):
t = t.strip()
if not t:
continue
ps = t.split('::')
if len(ps) != 2:
raise Exception("Syntax error in entry '%s' in section '%ss'"
% (entry, section.name))
key_descr, step = [ s.strip() for s in ps ]
key = Key(config.knox, key_descr, keysym=True,
origin="entry '%s' in section '%s'" % (entry, section.name))
if step.startswith("run:"):
a = ShellCommandAction(config, section=None, cmd=step[4:])
triggers.add(
Trigger(trigger=key, mask=mask,
action=a))
elif step.startswith("action:"):
action_name = "action:%s" % step[7:].strip()
triggers.add(
Trigger(trigger=key, mask=mask,
action=config.action(action_name)))
elif step.startswith("waiter:"):
waiter_name = "waiter:%s" % step[7:].strip()
triggers.add(
Trigger(trigger=key, mask=mask,
waiter=config.waiter(waiter_name)))
else:
raise Exception("WTF: %r" % step)
return triggers
@classmethod
def action_list(cls, config, section, entry):
actions = []
for a in map(lambda s: s.strip(), section[entry].split(';')):
if not a:
continue
(type_name, name, args) = cls.section_reference(
config, section.name, entry, a)
@classmethod
def section_reference(cls, config, section_name, entry_name, ref, timeout=False):
type_name = None
name = None
args = dict()
if timeout:
arg_re = (
r"(\s+with\s+(?P<args>(.(?!waiting))+))?"
r"(\s+waiting\s+(?P<timeout>\d+)s?\s*)?$")
else:
arg_re = (
r"(\s+with\s+(?P<args>.*)$")
m = re.match(
(r"^(\s*(?P<type_name>{name_chars}+)\s*:)?"
r"\s*(?P<name>{name_chars}+)"
+ arg_re)
.format(name_chars=Config.name_chars), ref)
if not m:
raise Exception(
"Syntax error in section options in %r in entry '%s', section '%s'"
% (ref, entry_name, section_name))
type_name = m['type_name']
section_name = m['name']
# shlex.split stops (infinite loop?) when called with None
if m['args'] is not None:
for (i, p) in enumerate(shlex.split(m['args'])):
am = re.match(
r'^\s*(?P<name>{name_chars}+)(=(?P<value>.*))?$'
.format(name_chars=Config.name_chars), p)
if am:
args[am['name']] = am['value']
else:
raise Exception(
"Syntax error in section options in %r in entry '%s', section '%s'"
% (p, entry_name, section_name))
if timeout:
if m['timeout']:
timeout = int(m['timeout'])
else:
timeout = None
return (type_name, section_name, args, timeout)
else:
return (type_name, section_name, args)
```
#### File: keybender/keybender/__main__.py
```python
from keybender import config
from keybender.knox import KnoX
from keybender.listener import Listener
from keybender.event import Event, EventLoop
from keybender.rctl import SocketMgr, SocketSender
import sys
import os
import argparse
import socket
import traceback
"""
argument parser
open named pipe for communication with external control
on startup a script can start urxvt and this inside of it, then send the request
to the pipe to find that PID's window, then ask for removing borders, removing it from
the taskbar, setting it to always under everything else, etc...
bindkeysequence -t urxvt
then run urxvt -e runme...
"""
# argumentparser,
# decide to start at start (1st level) or at a specific waiter or even an action
# or the same but at 2nd level (with opening tk root and exit on undefined key)
class Director:
def process_args(self):
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config", metavar="FILENAME",
help="Configuration file",
dest="config", default=None, required=True)
parser.add_argument("-s", "--socket", metavar="SOCKET",
help="Socket path to listen on for commands.",
dest="socket_path", default=None)
parser.add_argument("-o", "--options", metavar="[SECTION:]OPTION=VALUE",
help=
"Option name and value to set in the opt section"
" in the configuration file.",
action="append",
dest="options", default=[])
self.options = parser.parse_args()
if not self.options:
parser.print_help()
sys.exit(1)
self.special_options = dict()
broken = False
for opt_str in self.options.options:
parts = opt_str.split('=')
if len(parts) != 2:
print("Bad option: %r" % opt_str, file=sys.stderr)
broken = True
continue
if parts[0] in self.special_options:
print("Repeated option name in: %r" % opt_str, file=sys.stderr)
broken = True
continue
self.special_options[parts[0]] = parts[1]
if broken:
sys.exit(2)
def __init__(self):
self.process_args()
self.knox = KnoX()
self.event_loop = EventLoop()
self.cfg = config.Config(self.knox,
self.options.config, self.event_loop,
extra_options=self.special_options)
self.cfg.start.execute()
if self.options.socket_path:
if os.path.exists(self.options.socket_path):
os.unlink(self.options.socket_path)
self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.socket.bind(self.options.socket_path)
self.socket.listen(0)
self.event_loop.register(
Event.READABLE, self.remote_control_connection, fd=self.socket)
else:
self.socket = None
self.event_loop.register(Event.IDLE, self.check_config, timeout=4)
self.ls = Listener(self.knox, self.event_loop, self.cfg.start.triggers)
def main(self):
while True:
self.ls.listen()
def remote_control_connection(self, event, event_loop):
(conn, _) = self.socket.accept()
print("Somebody connected on #%r" % conn.fileno())
conn = SocketMgr(conn)
self.event_loop.register(
Event.READABLE, self.remote_control_msg,
fd=conn, consultant=config.Consultant(self.cfg))
def remote_control_msg(self, event, event_loop):
data = event.fd.recv(1024)
if not data:
print("CLOSING #%r" % event.fd.fileno(), "==" * 30)
event.fd.close_rd()
#event.fd.close()
self.event_loop.unregister(event.key)
else:
r = event.consultant.incoming(data.decode().splitlines(),
responder=SocketSender(event.fd, event_loop))
def check_config(self, event, event_loop):
try:
if self.cfg.changed():
if self.ls.level > 1:
print("Config file changed, but cannot reload...")
else:
print("Config file changed, reloading...")
new_cfg = self.cfg.reload()
self.ls = Listener(self.knox, self.event_loop, new_cfg.start.triggers)
self.cfg = new_cfg
event_loop.quit()
except Exception as e:
traceback.print_exc(file=sys.stderr)
#print(e, file=sys.stderr)
Director().main()
``` |
{
"source": "josebadoe/trkm",
"score": 3
} |
#### File: trkm/sequencer/faker.py
```python
import configparser
import sys
from datetime import datetime, timedelta
import statistics, random
class RecordWrapper:
def __init__(self, name, time, idx, data):
self.name = name
self.time = time
self._idx = idx
self._data = data
@property
def hr(self):
return self._data['hr']
@property
def distance(self):
return self._data['total_distance']
@property
def speed(self):
return self._data['speed']
@property
def cadence(self):
return self._data['cadence']
@property
def temperature(self):
return None
class Fragment:
def __init__(self, length, start, end, min=None, max=None, starting_at=None):
self._length = length
if start < 0:
raise Exception("Start %f" % start)
if end < 0:
raise Exception("End %f" % end)
self.starting_at = starting_at or 0
self._start = start
self._end = end
self._min = min
self._max = max
self._parts = None
self._step = None
def init_cache(self):
if self._parts is None:
if self._step is None:
self._step = (self._end - self._start) / len(self)
def __getitem__(self, at):
if at < 0:
at += len(self)
if self._parts is None:
self.init_cache()
v = self._start + self._step * at
if self._min is not None:
v = max(v, self._min)
if self._max is not None:
v = min(v, self._max)
return v
(elt, at, _) = self.element_at(at)
if elt is not None:
return elt[at]
return self[-1]
def element_at(self, at):
if self._parts is None:
return (None, None, None)
for (i, elt) in enumerate(self._parts):
if at < len(elt):
return (elt, at, i)
else:
at -= len(elt)
return (None, None, None)
def __len__(self):
if self._parts:
return sum(map(len, self._parts))
else:
return self._length
def divide(self, at, displacement=0, absolute=None):
if at == 0:
if absolute is not None:
self._start == absolute
else:
self._start += displacement
elif at == self._length:
if absolute is not None:
self._end == absolute
else:
self._end += displacement
elif self._parts is None:
if absolute is not None:
p = absolute
else:
step = (self._end - self._start) / len(self)
p = self._start + step * at + displacement
self._parts = [
Fragment(at, self._start, p,
min=self._min, max=self._max,
starting_at = self.starting_at),
Fragment(self._length - at, p, self._end,
min=self._min, max=self._max,
starting_at = self.starting_at + at)
]
else:
(elt, at, i) = self.element_at(at)
if elt and at != 0:
elt.divide(at, displacement, absolute)
# if at == 0 and i > 0:
# self._parts[i-1].divide(len(self._parts[i-1]), displacement, absolute)
def force(self, starting_at, length, value):
if starting_at > self._length:
pass
elif starting_at <= 0 and length >= self._length:
self._start = value
self._end = value
self._parts = None
self._step = None
else:
length = min(length, self._length - starting_at)
(s_elt, s_at, _) = self.element_at(starting_at)
if s_elt is None:
self.divide(starting_at)
(e_elt, e_at, _) = self.element_at(starting_at + length)
if e_elt is None:
self.divide(starting_at + length)
for elt in self._parts:
if starting_at < len(elt):
l = min(length, len(elt) - starting_at)
elt.force(starting_at, l, 0)
if l >= length:
break
length -= l
starting_at = 0
else:
starting_at -= len(elt)
def __repr__(self):
if self._parts is None:
return ("Fragment[%r:%ds, %.2f, %.2f]"
% (self.starting_at, self._length, self._start, self._end))
else:
return ("Fragments %r:%ds[%s]"
% (self.starting_at, len(self), ", ".join(map(repr, self._parts))))
class Faker:
def __init__(self, name):
self.name = name
self.config = configparser.ConfigParser(interpolation=None, strict=True,
empty_lines_in_values=True)
self.config.read(self.name)
def parse_range(self, s, parser=int):
l = list(map(parser, s.split(',')))
return (l[0], l[-1])
def error(self, msg):
print(msg)
sys.exit(1)
def displacement(self, val, lo, hi):
return random.triangular(lo, hi, val) - val
def displace_midpoint(self, route, start, end, bounds, displacement_reduction):
if end - start < self.min_frag_len:
return
at = int(random.triangular(start, end, (start + end) / 2))
v = route[at]
lo = v - bounds
hi = v + bounds
route.divide(at, self.displacement(v, lo, hi))
new_bounds = bounds * displacement_reduction
self.displace_midpoint(route, start, at, new_bounds, displacement_reduction)
self.displace_midpoint(route, at, end, new_bounds, displacement_reduction)
def add_pause(self, route, at, lead_in, length, lead_out):
start = max(0, at - int(length / 2))
end = min(len(route), start + length)
p1 = start
p2 = end
leadin_start = max(0, start - lead_in)
leadout_end = min(end + lead_out, len(route))
x_start_v = route[leadin_start]
x_end_v = route[leadout_end]
if start > 0:
p1 = leadin_start
route.divide(leadin_start, absolute=x_start_v)
if end < len(route):
p2 = leadout_end
route.divide(leadout_end, absolute=x_end_v)
if start > 0:
route.divide(start, 0)
else:
leadin_start = None
if end < len(route):
route.divide(end, absolute=0)
route.divide(leadout_end)
else:
leadout_end = None
# for i in range(p1, p2+1):
# print("Pause of %d going at %d: %r" % (length, i, route[i]))
route.force(start, length, 0)
# for i in range(p1, p2+1):
# print("Pause of %d went at %d: %r" % (length, i, route[i]))
return route
def print_route(self, route):
for n in range(0, len(route)):
print("%5d: %.2f" % (n, route[n]))
# def squash(self, route, correction_factor, c_med, c_min, c_max, w_med, w_min, w_max):
# # keeping shape
# f_lo = (w_med - w_min) / ((c_med - c_min) * correction_factor)
# f_hi = (w_max - w_med) / ((c_max - c_med) * correction_factor)
# for (i, v) in enumerate(route):
# if v < c_med:
# route[i] = c_med - ((c_med - v) * f_lo)
# elif v > c_med:
# route[i] = c_med + ((v - c_med) * f_hi)
# return route
def route(self, length, avg_speed, speed_range, pauses=[]):
base = 1000
displacement_bounds = 500
decay_power = 1
displacement_reduction = 1 / (2 ** decay_power)
hi = base + displacement_bounds
lo = base - displacement_bounds
start = 1000 + self.displacement(1000, lo, hi)
end = 1000 + self.displacement(1000, lo, hi)
route = Fragment(length, start, end)
self.displace_midpoint(route, 0, length,
displacement_bounds,
displacement_reduction)
pp = sorted(map(lambda _: int(random.weibullvariate(length, 1.5)), pauses))
#print("BEFORE-APU: %r" % route)
for (i, p) in enumerate(pp):
self.add_pause(route, p, length=pauses[i], lead_in=2, lead_out=2)
#print("AFTER-APU: %r" % route)
r0 = list(map(lambda i: route[i], range(0, length)))
min_v = min(r0)
max_v = max(r0)
m = statistics.mean(r0)
f = avg_speed / m
# if min_v * f < speed_range[0] or max_v * f > speed_range[1]:
# r0 = self.squash(r0, f, m, min_v, max_v, avg_speed, *speed_range)
# m2 = statistics.mean(r0)
# print("Squashed, m0: %r, m2: %r" % (m, m2))
#r = list(map(lambda s: min(speed_range[1], max(speed_range[0], s * f)), r0))
#mr = statistics.mean(r)
#print("Cut, m0: %r, m2: %r" % (m, mr))
return [ min(max(s * f, speed_range[0]),
speed_range[1]) if s
else 0
for s in r0 ]
def all(self):
cfg = self.config['training']
cadence_range = self.parse_range(cfg['cadence'])
speed_range = self.parse_range(cfg['speed'], parser=float)
time_range = self.parse_range(cfg['time'],
parser=(lambda s:
datetime.strptime(s.strip(),
'%Y-%m-%d %H:%M:%S%z')))
base_hr = int(cfg['base_heart_rate'])
hr_range = self.parse_range(cfg['heart_rate'])
hr_effect_lasting = int(cfg['hr_effect_lasting'])
hr_effect_delay = int(cfg['hr_effect_delay'])
hr_factor0 = (hr_range[0] - base_hr) / (cadence_range[0])
hr_factor = (hr_range[1] - hr_range[0]) / (cadence_range[1] - cadence_range[0])
pauses = list(map(int, cfg['pauses'].split(',')))
# from km to meters
total_distance = float(cfg['distance']) * 1000
total_time = (time_range[1] - time_range[0]).seconds
avg_speed = (total_distance / 1000) / (total_time / 3600)
cadence_acc_factor = (
(cadence_range[1] - cadence_range[0])
/ (speed_range[1] - speed_range[0]))
if not speed_range[0] <= avg_speed <= speed_range[1]:
self.error("Required average speed %f is not in permitted range %f - %f"
% (avg_speed, *speed_range))
self.min_frag_len = 5 # seconds
route = self.route(total_time, avg_speed, speed_range, pauses)
distance_so_far = 0
hr_effect = hr_effect_delay + hr_effect_lasting
cadence_log = [ 0 ] * hr_effect
prev_t = 0
for t in range(0, total_time):
speed = route[t]
dist = speed * 1000 / 3600 * (t - prev_t)
cadence = (cadence_range[0]
+ (speed - speed_range[0]) * cadence_acc_factor)
cadence_log = cadence_log[1:] + [ cadence ]
cm = statistics.mean(cadence_log[0:hr_effect_lasting])
if cm >= cadence_range[0]:
hr = hr_range[0] + (cm - cadence_range[0]) * hr_factor
else:
hr = base_hr + hr_factor0 * cm
distance_so_far += dist
hr = round(hr)
cadence = round(cadence)
# print("At %d, speed: %.2f, dist: %.2f, total dist: %.2f, cadence: %.2f, cm: %.2f, hr: %.2f"
# % (t, speed, dist, distance_so_far, cadence, cm, hr))
data = {
'hr': hr,
'total_distance': distance_so_far,
'speed': speed,
'cadence': cadence
}
prev_t = t
yield RecordWrapper(self.name,
time_range[0] + timedelta(seconds=t), t, data)
def __iter__(self):
self._g = self.all()
return self
def __next__(self):
return next(self._g)
```
#### File: trkm/sequencer/interpolator.py
```python
import gpxdata
import math
from .zipper import ZipperWrapper
class InterpolatorPointWrapper:
def __init__(self, point, lst, name=None, time=None, real=True):
self.point = point
self.lst = lst
self.name = name or point.name
self.time = time or point.time
self.collected = {}
self.real = real
def __getattr__(self, key):
if self.real:
v = self.getattr_local(key)
if v is not None:
v0 = (self, v)
else:
v0 = None
v1 = None
else:
v0 = self.getattr(key, direction=-1)
if v0 and v0[1] == None:
v0 = None
if v0 and v0[0] == self:
return v0[1]
else:
v1 = self.getattr(key, direction=1)
if v1 and v1[1] == None:
v1 = None
if v0 and v1:
tdelta_t = (v1[0].time - v0[0].time).seconds
tdelta_c = (self.time - v0[0].time).seconds
if False and key in ['lat', 'lon']:
ratio = tdelta_c / tdelta_t
(lat, lon) = gpxdata.Util.interpolate(v0[0].lat, v0[0].lon,
v1[0].lat, v1[0].lon,
ratio)
if key == 'lat':
return lat
else:
return lon
else:
v = v0[1] + (v1[1] - v0[1]) / tdelta_t * tdelta_c
return v
elif v0:
return v0[1]
elif v1:
return v1[1]
else:
return None
def getattr(self, key, direction=0, force=False):
if (key, direction) in self.collected:
return self.collected[(key, direction)]
if force:
v = None
else:
v = self.getattr_local(key)
if v != None:
self.collected[(key, direction)] = (self, v)
return (self, v)
elif direction != 0:
pt = self.lst.neighbor(self, direction)
while pt != None:
v = pt.getattr(key)
if v != None:
self.collected[(key, direction)] = v
return v
else:
pt = self.lst.neighbor(pt, direction)
self.collected[(key, direction)] = (self, None)
return None
def getattr_local(self, key):
if key in self.__dict__:
v = self.__dict__[key]
elif key in type(self).__dict__:
v = type(self).__dict__[key]
elif self.point != None:
v = getattr(self.point, key)
if v != None and not math.isnan(v):
setattr(self, key, v)
else:
v = None
else:
v = None
return v
class Deduplicator(ZipperWrapper):
def __init__(self, name, points, lst, time=None):
super().__init__(name, points)
self.lst = lst
self.time = time or points[0].time
self.collected = {}
def getattr(self, key, direction=0, force=False):
if (key, direction) in self.collected:
return self.collected[(key, direction)]
if force:
v = None
else:
v = getattr(self, key)
if v != None:
self.collected[(key, direction)] = (self, v)
return (self, v)
elif direction != 0:
pt = self.lst.neighbor(self, direction)
while pt != None:
v = pt.getattr(key)
if v != None:
self.collected[(key, direction)] = v
return v
else:
pt = self.lst.neighbor(pt, direction)
self.collected[(key, direction)] = (self, None)
return None
class Interpolator:
def __init__(self, sequencer):
self.by_time_idx = dict()
self.last_used_time = None
self.last_used_time_idx = None
self.points = []
self.sequencer = iter(sequencer)
#self.name = sequencer.name + ':ipt'
def __getitem__(self, k):
return self.get(k)[0]
def get(self, k, return_empty=True):
"""Returns (point, found_flag)
"""
if type(k) == int:
try:
while len(self.points) <= k:
self.load_next()
except StopIteration:
pass
return (self.points[k], True)
else: # timestamp
if k in self.by_time_idx:
return (self.points[self.by_time_idx[k]], True)
if self.last_used_time and self.last_used_time <= k:
i = self.last_used_time_idx
else:
i = 0
first_match = None
# print("Getting %s" % (k,))
while True:
if i >= len(self.points) or self.points[i].time > k:
if first_match and first_match == i - 1:
# print(" true:A %s" % (k, ))
return (self.points[first_match], True)
elif first_match is not None:
# repeated timestamps
pt = Deduplicator("ddp", self.points[first_match:i], self, time=k)
self.points[first_match:i] = [pt]
self.last_used_time = k
self.last_used_time_idx = i
# print(" true:B %s" % (k, ))
return (pt, True)
else:
if return_empty:
pt = InterpolatorPointWrapper(None, self, name=self.name, time=k, real=False)
self.points[i:i] = [pt]
else:
pt = None
# #print("Inserting %r at %r in %s" % (pt.time, i, self.name))
# print(" false:C %s, %r" % (k, pt.distance if pt else None))
return (pt, False)
elif self.points[i].time == k:
self.by_time_idx[self.points[i].time] = i
if first_match == None:
first_match = i
i += 1
def load_next(self):
if self.sequencer == None:
raise StopIteration
try:
pt = next(self.sequencer)
self.name = pt.name + ':ipt'
#print("JAAAA %r" % pt.time)
except StopIteration:
self.sequencer = None
raise
if self.points and self.points[-1].time == pt.time:
self.points[-1] = InterpolatorPointWrapper(pt, self)
else:
self.points.append(InterpolatorPointWrapper(pt, self))
def first(self):
return self[0]
def neighbor(self, point, step=1):
for i in range(0, len(self.points)):
if self.points[i] == point:
if i + step < 0:
return None
try:
return self[i+step]
except IndexError:
return None
return None
```
#### File: trkm/trkm/trkm.py
```python
import argparse
import activityio as aio
import sys, os
import gpxpy, gpxpy.gpx
from gpxpy import gpxfield
from . import sequencer
import math
import datetime
# print("11 fields: %r" % (gpxpy.gpx.GPXTrackPoint.gpx_11_fields,))
# gpxpy.gpx.GPXTrackPoint.gpx_11_fields = [
# *gpxpy.gpx.GPXTrackPoint.gpx_11_fields,
# gpxfield.GPXField('speed', type=gpxfield.FLOAT_TYPE)
# ]
class GPXWriter:
def __init__(self, sequencer, options, multiplier=1):
self.seq = sequencer
self.options = options
self.multiplier = multiplier
def write(self, output_file):
gpx = gpxpy.gpx.GPX()
gpx.creator = "TrackMerge"
trk = gpxpy.gpx.GPXTrack()
gpx.tracks.append(trk)
seg = gpxpy.gpx.GPXTrackSegment()
trk.segments.append(seg)
ptcnt = 0
for t in self.seq:
ptcnt += 1
if self.options.progress and ptcnt % 27 == 0:
print("\r%8d" % ptcnt, end="")
sys.stdout.flush()
ext = {}
lat = getattr(t, 'lat', None)
lon = getattr(t, 'lon', None)
alt = getattr(t, 'alt', None)
spd = getattr(t, 'speed', None)
if spd:
spd = spd * self.multiplier
pt = gpxpy.gpx.GPXTrackPoint(latitude=lat,
longitude=lon,
elevation=alt,
time=t.time,
speed=spd)
if t.hr != None:
ext['gpxtpx:hr'] = int(round(t.hr))
if t.cadence != None:
ext['gpxtpx:cad'] = int(round(t.cadence))
if t.temperature != None:
ext['gpxtpx:atemp'] = int(round(t.temperature))
if spd != None:
ext['gpxtpx:speed'] = spd
if ext:
pt.extensions = { 'gpxtpx:TrackPointExtension': self.dict_to_xml(ext) }
seg.points.append(pt)
print("%8d" % ptcnt)
self.write_gpx(output_file, gpx)
def dict_to_xml(self, tab):
r = ""
for k in sorted(tab.keys(), reverse=True):
v = tab[k]
# for (k, v) in tab.items():
if type(v) == dict:
s = dict_to_xml(v)
else:
s = str(v)
r += "<%s>%s</%s>" % (k, s, k)
return r
def write_gpx(self, output_file, gpx):
version='1.1'
v = '1/1'
xml_attributes = {
'xmlns:xsi': 'http://www.w3.org/2001/XMLSchema-instance',
'xmlns': 'http://www.topografix.com/GPX/%s' % v,
'xmlns:gpxtpx': 'http://www.garmin.com/xmlschemas/TrackPointExtension/v1',
'xsi:schemaLocation': 'http://www.topografix.com/GPX/%s http://www.topografix.com/GPX/%s/gpx.xsd' % (v, v) }
#gpxfield.GPXField('speed', type=gpxfield.FLOAT_TYPE) ]
content = gpxfield.gpx_fields_to_xml(gpx, 'gpx', version=version,
custom_attributes=xml_attributes)
xml = '<?xml version="1.0" encoding="UTF-8"?>\n' + content.strip() + "\n"
f = open(output_file, 'w')
f.write(xml)
f.close()
class TCXWriter:
def __init__(self, sequencer, options, multiplier=1):
self.seq = sequencer
self.options = options
self.multiplier = multiplier
def write(self, output_file):
tcx = None
lap = None
trk = None
start_time = None
end_time = None
total_distance = 0
max_speed = 0
prev_time = None
ptcnt = 0
for t in self.seq:
if t.time is None:
continue
ptcnt += 1
if self.options.progress and ptcnt % 27 == 0:
#print("\r%8d - %8.2f" % (ptcnt, t.distance), end="")
print(t.time)
sys.stdout.flush()
if tcx is None:
(tcx, lap, trk) = self.setup_tcx(t.time)
start_time = t.time
prev_time = end_time
end_time = t.time
if t.distance:
total_distance = t.distance
trkpt = tcx.createElement("Trackpoint")
trk.appendChild(trkpt)
tm = t.time - (t.time.utcoffset() or datetime.timedelta())
self.add_text_elt(tcx, trkpt, "Time",
tm.strftime("%Y-%m-%dT%H:%M:%SZ"))
lat = getattr(t, 'lat', None)
lon = getattr(t, 'lon', None)
alt = getattr(t, 'alt', None)
if lat and lon:
pos = tcx.createElement("Position")
self.add_text_elt(tcx, pos, "LatitudeDegrees", lat)
self.add_text_elt(tcx, pos, "LongitudeDegrees", lon)
trkpt.appendChild(pos)
if alt:
self.add_text_elt(tcx, trkpt, "AltitudeMeters", "55.8")
if t.hr != None:
hr = tcx.createElement("HeartRateBpm")
trkpt.appendChild(hr)
self.add_text_elt(tcx, hr, "Value", int(t.hr))
if t.cadence != None:
self.add_text_elt(tcx, trkpt, "Cadence", int(round(t.cadence)))
self.add_text_elt(tcx, trkpt,
"DistanceMeters",
"%.2f" % (total_distance * self.multiplier))
spd = getattr(t, 'speed', None)
if spd is not None:
max_speed = max(spd * self.multiplier, max_speed)
ext = tcx.createElement("Extensions")
trkpt.appendChild(ext)
tpx = tcx.createElement("TPX")
tpx.setAttribute("xmlns",
"http://www.garmin.com/xmlschemas/ActivityExtension/v2")
ext.appendChild(tpx)
self.add_text_elt(tcx, tpx, "Speed", round(spd, 1))
# <Watts>0</Watts>
self.add_text_elt(tcx, lap, "DistanceMeters",
"%.2f" % (total_distance * self.multiplier))
self.add_text_elt(tcx, lap, "Calories", 0)
self.add_text_elt(tcx, lap, "TriggerMethod", "Manual")
#self.add_text_elt(tcx, lap, "MaximumSpeed", max_speed)
# <AverageHeartRateBpm>
# <Value>158</Value>
# </AverageHeartRateBpm>
# <MaximumHeartRateBpm>
# <Value>195</Value>
# </MaximumHeartRateBpm>
# <Intensity>Active</Intensity>
# <Cadence>61</Cadence>
# <TriggerMethod>Manual</TriggerMethod>
with open(output_file, "wt") as writer:
tcx.writexml(writer, newl="\n", addindent=" ", encoding="UTF-8")
# gpx.creator = "TrackMerge"
# trk = gpxpy.gpx.GPXTrack()
# gpx.tracks.append(trk)
# seg = gpxpy.gpx.GPXTrackSegment()
# trk.segments.append(seg)
# ptcnt = 0
# for t in self.zip:
# ptcnt += 1
# if ptcnt % 27 == 0:
# print("\r%8d" % ptcnt, end="")
# sys.stdout.flush()
# ext = {}
# lat = getattr(t, 'lat', None)
# lon = getattr(t, 'lon', None)
# alt = getattr(t, 'alt', None)
# spd = getattr(t, 'speed', None)
# if spd:
# print(spd, spd * math.pi)
# spd = spd * math.pi
# pt = gpxpy.gpx.GPXTrackPoint(latitude=lat,
# longitude=lon,
# elevation=alt,
# time=t.time,
# speed=spd)
# if t.hr != None:
# ext['gpxtpx:hr'] = int(round(t.hr))
# if t.cad != None:
# ext['gpxtpx:cad'] = int(round(t.cad))
# if t.temperature != None:
# ext['gpxtpx:atemp'] = int(round(t.temperature))
# if spd != None:
# ext['gpxtpx:speed'] = spd
# if ext:
# pt.extensions = { 'gpxtpx:TrackPointExtension': self.dict_to_xml(ext) }
# seg.points.append(pt)
# print("%8d" % ptcnt)
# self.write_gpx(output_file, gpx)
def add_text_elt(self, doc, parent, name, value):
elt = doc.createElement(name)
elt.appendChild(doc.createTextNode(str(value)))
parent.appendChild(elt)
return elt
def setup_tcx(self, start_time):
from xml.dom.minidom import Document
start_time -= start_time.utcoffset() or datetime.timedelta()
start_time_str = start_time.strftime("%Y-%m-%dT%H:%M:%SZ")
tcx = Document()
root = tcx.createElement("TrainingCenterDatabase")
root.setAttribute(
"xsi:schemaLocation",
"http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2 http://www.garmin.com/xmlschemas/TrainingCenterDatabasev2.xsd")
root.setAttribute(
"xmlns:ns5",
"http://www.garmin.com/xmlschemas/ActivityGoals/v1")
root.setAttribute(
"xmlns:ns3",
"http://www.garmin.com/xmlschemas/ActivityExtension/v2")
root.setAttribute(
"xmlns:ns2",
"http://www.garmin.com/xmlschemas/UserProfile/v2")
root.setAttribute(
"xmlns",
"http://www.garmin.com/xmlschemas/TrainingCenterDatabase/v2")
root.setAttribute(
"xmlns:xsi",
"http://www.w3.org/2001/XMLSchema-instance")
# root.setAttribute("xmlns:ns2", "http://www.garmin.com/xmlschemas/UserProfile/v2")
# root.setAttribute("xmlns:ns3",
# "http://www.garmin.com/xmlschemas/ActivityExtension/v2")
# root.setAttribute("xmlns:ns5", "http://www.garmin.com/xmlm")
tcx.appendChild(root)
acts = tcx.createElement("Activities")
root.appendChild(acts)
act = tcx.createElement("Activity")
act.setAttribute("Sport", "Biking")
acts.appendChild(act)
self.add_text_elt(tcx, act, "Id", start_time_str)
lap = tcx.createElement("Lap")
lap.setAttribute("StartTime", start_time_str)
act.appendChild(lap)
trk = tcx.createElement("Track")
lap.appendChild(trk)
return (tcx, lap, trk)
class Options:
def __init__(self, lst):
self.opts = dict()
for e in lst:
parts = e.split('=', 1)
name = parts[0]
if len(parts) > 1:
value = parts[1]
else:
value = "1"
self.opts[name] = value
def get_int(self, name):
s = self.opts.get(name, None)
if s is not None:
return int(s)
else:
return 0
def get_float(self, name):
s = self.opts.get(name, None)
if s is not None:
return float(s)
else:
return 0
def get_bool(self, name):
s = self.opts.get(name, None)
if s is not None:
return not not int(s)
else:
return False
class TrackMerge:
def main():
parser = argparse.ArgumentParser(description="GPS track merge")
parser.add_argument("-f", "--format", metavar="FORMAT",
help="Output file format, may be GPX or TCX",
dest="format", default="GPX")
parser.add_argument("-m", "--multiplier", metavar="N",
help="Constant to multiply speed and distance values with, "
"like PI or a floating point value, for cases like when you "
"used diameter in place of circumference on roller training.",
dest="multiplier", default=1)
parser.add_argument("-c", "--cleanup",
help="Check and clean up common some data errors",
dest="cleanup", default=False, action="store_true")
parser.add_argument("-o", "--cleanup-option", metavar="NAME=VALUE",
help="Set cleanup options: MaxSpeed=<kmh>",
dest="cleanup_opts", action="append")
parser.add_argument("-v", "--verbose",
help="Show processing details",
dest="verbose", default=False, action="store_true")
parser.add_argument("-p", "--progress",
help="Show progress info",
dest="progress", default=False, action="store_true")
parser.add_argument("input_files", metavar="TRACK", nargs="*",
help="Input file to process. One of these formats: "
"Garmin TCX (.tcx), FIT or Flexible and Interoperable "
"Data Transfer (.fit), or GPS Exchange Format (.gpx)")
parser.add_argument("output_file", metavar="OUTPUT.gpx",
help="The merged track is always in GPX format with "
"Garmin extensions")
args = parser.parse_args()
if args.format.upper() == "GPX":
writer = GPXWriter
elif args.format.upper() == "TCX":
writer = TCXWriter
else:
raise Exception("Format '%s' not recognized" % args.format)
if args.multiplier == "PI":
m = math.pi
else:
m = float(args.multiplier)
TrackMerge(args, m).merge(writer, args.output_file)
def __init__(self, options, multiplier=1):
self.sequencers = []
self.options = options
self.multiplier = multiplier
for file_name in self.options.input_files:
if os.path.splitext(file_name)[1] == '.ini':
seq = sequencer.Faker(file_name)
else:
data = aio.read(file_name)
seq = sequencer.AIO(file_name, data)
ipt = sequencer.Interpolator(seq)
self.sequencers.append(ipt)
self.seq = sequencer.Zipper(*self.sequencers)
if options.cleanup:
if self.options.verbose:
cleanup_opts = Options(self.options.cleanup_opts + ["verbose=1"])
else:
cleanup_opts = Options(self.options.cleanup_opts)
self.seq = sequencer.Cleanup(self.seq, cleanup_opts)
def merge(self, writer, output_file=None):
if not output_file:
output_file = self.options.output_file
writer(self.seq, self.options, multiplier=self.multiplier).write(output_file)
``` |
{
"source": "josebalius/go-spacemesh",
"score": 3
} |
#### File: app_engine/gcloud_tasks/add_task_to_queue.py
```python
import datetime
from google.cloud import tasks_v2
import json
def create_google_cloud_task(queue_params, payload, path='/', in_seconds=None, **kwargs):
"""
:param queue_params: dictionary, a dictionary with keys project_id, queue_name, queue_zone for resolving
gcloud queue path.
:param payload: dictionary, the arguments for the app engine task.
:param path: string, path to task relative url
:param in_seconds: int, number of seconds to delay task.
:param kwargs: dictionary, optional - additional arguments.
:return:
"""
# Create a client.
client = tasks_v2.CloudTasksClient()
# these params will be validated in case 'is_dump' var is True
# these params are vital for resolving queue path
gcloud_params_val = ["project_id", "queue_name", "queue_zone"]
for g_param in gcloud_params_val:
if g_param not in queue_params:
raise ValueError(f"missing {g_param} param in order to resolve queue")
# add optional arguments to payload (currently necessary only for dumping)
payload["dump_params"] = kwargs
# Construct the fully qualified queue name.
parent = client.queue_path(queue_params["project_id"], queue_params["queue_zone"], queue_params["queue_name"])
# Construct the request body.
task = {
'app_engine_http_request': { # Specify the type of request.
'http_method': tasks_v2.HttpMethod.POST,
'relative_uri': path
}
}
if payload is not None:
if isinstance(payload, dict):
# Convert dict to JSON string
payload = json.dumps(payload)
# specify http content-type to application/json
task["app_engine_http_request"]["headers"] = {"Content-type": "application/json"}
# The API expects a payload of type bytes.
converted_payload = payload.encode()
# Add the payload to the request.
task['app_engine_http_request']['body'] = converted_payload
if in_seconds is not None:
# Convert "seconds from now" into an rfc3339 datetime string.
timestamp = datetime.datetime.utcnow() + datetime.timedelta(seconds=in_seconds)
# Add the timestamp to the tasks.
task['schedule_time'] = timestamp
# Use the client to build and send the task.
response = client.create_task(parent=parent, task=task)
print('\nCreated task {}'.format(response.name))
return response
```
#### File: go-spacemesh/tests/k8s_handler.py
```python
from datetime import datetime
from kubernetes import client
from kubernetes.client.rest import ApiException
import os
import time
import yaml
from tests import config as conf
import tests.utils as ut
def remove_clusterrole_binding(shipper_name, crb_name):
# remove clusterrolebind
k8s_client = client.RbacAuthorizationV1Api()
try:
k8s_client.delete_cluster_role_binding(crb_name)
print(f"\nsuccessfully deleted: {crb_name}")
except Exception as e:
print(f"\n{shipper_name} cluster role binding deletion has failed, please manually delete {crb_name}:")
print(f"kubectl delete clusterrolebinding {crb_name}")
def filebeat_teardown(namespace):
# remove clusterrolebind
# TODO: find a solution for sharing the name both here and in the kube object
crb_name = f"filebeat-cluster-role-binding-{namespace}"
remove_clusterrole_binding("filebeat", crb_name)
def fluent_bit_teardown(namespace):
# remove clusterrolebind
# TODO: find a solution for sharing the name both here and in the kube object
crb_name = f"fluent-bit-clusterrole-binding-{namespace}"
remove_clusterrole_binding("fluent-bit", crb_name)
def add_elastic_cluster(namespace):
print("\nDeploying ElasticSearch\n")
add_deployment_dir(namespace, conf.ELASTIC_CONF_DIR)
def add_filebeat_cluster(namespace):
print("\nDeploying FileBeat\n")
add_deployment_dir(namespace, conf.FILEBEAT_CONF_DIR)
def add_fluent_bit_cluster(namespace):
print("\nDeploying Fluent-bit\n")
add_deployment_dir(namespace, conf.FLUENT_BIT_CONF_DIR)
def add_kibana_cluster(namespace):
print("\nDeploying Kibana\n")
add_deployment_dir(namespace, conf.KIBANA_CONF_DIR)
def add_logstash_cluster(namespace):
print("\nDeploying LogStash\n")
add_deployment_dir(namespace, conf.LOGSTASH_CONF_DIR)
def add_deployment_dir(namespace, dir_path, delete=False):
with open(os.path.join(dir_path, 'dep_order.txt')) as f:
dep_order = f.readline()
dep_lst = [x.strip() for x in dep_order.split(',')]
print(dep_lst)
phrases_to_replace = ["(?<!_)NAMESPACE", "REP_ES_USER", "REP_ES_PASS"]
values_for_replacement = [namespace, conf.ES_USER_LOCAL, conf.ES_PASS_LOCAL]
for filename in dep_lst:
# replace all phrases with the actual values if exists
modified_file_path, is_change = ut.duplicate_file_and_replace_phrases(
dir_path, filename, f"{namespace}_{filename}", phrases_to_replace, values_for_replacement
)
print(f"applying file: {filename}")
with open(modified_file_path) as f:
dep = yaml.safe_load(f)
if modified_file_path != os.path.join(dir_path, filename) and is_change:
# remove modified file
ut.delete_file(modified_file_path)
name = dep["metadata"]["name"]
if dep['kind'] == 'StatefulSet':
k8s_client = client.AppsV1Api()
if not delete:
k8s_client.create_namespaced_stateful_set(body=dep, namespace=namespace)
else:
k8s_client.delete_namespaced_stateful_set(name=name, namespace=namespace)
elif dep['kind'] == 'DaemonSet':
k8s_client = client.AppsV1Api()
k8s_client.create_namespaced_daemon_set(body=dep, namespace=namespace)
elif dep['kind'] == 'Deployment':
k8s_client = client.AppsV1Api()
k8s_client.create_namespaced_deployment(body=dep, namespace=namespace)
elif dep['kind'] == 'Service':
try:
k8s_client = client.CoreV1Api()
k8s_client.create_namespaced_service(body=dep, namespace=namespace)
except ApiException as e:
if e.status == 409:
print(f"Service exists: {dep['metadata']['name']}")
continue
raise e
elif dep['kind'] == 'PodDisruptionBudget':
k8s_client = client.PolicyV1beta1Api()
k8s_client.create_namespaced_pod_disruption_budget(body=dep, namespace=namespace)
elif dep["kind"] == 'Role':
k8s_client = client.RbacAuthorizationV1Api()
k8s_client.create_namespaced_role(body=dep, namespace=namespace)
elif dep["kind"] == 'ClusterRole':
try:
k8s_client = client.RbacAuthorizationV1Api()
k8s_client.create_cluster_role(body=dep)
except ApiException as e:
if e.status == 409:
print(f"cluster role already exists")
continue
raise e
elif dep["kind"] == 'RoleBinding':
k8s_client = client.RbacAuthorizationV1Api()
dep["subjects"][0]["namespace"] = namespace
k8s_client.create_namespaced_role_binding(body=dep, namespace=namespace)
elif dep["kind"] == 'ClusterRoleBinding':
k8s_client = client.RbacAuthorizationV1Api()
try:
k8s_client.create_cluster_role_binding(body=dep)
except ApiException as e:
if e.status == 409:
print(f"cluster role binding already exists")
continue
raise e
elif dep["kind"] == 'ConfigMap':
k8s_client = client.CoreV1Api()
k8s_client.create_namespaced_config_map(body=dep, namespace=namespace)
elif dep["kind"] == 'ServiceAccount':
k8s_client = client.CoreV1Api()
k8s_client.create_namespaced_service_account(body=dep, namespace=namespace)
print("\nDone\n")
def remove_deployment_dir(namespace, dir_path):
with open(os.path.join(dir_path, 'dep_order.txt')) as f:
dep_order = f.readline()
dep_lst = [x.strip() for x in dep_order.split(',')]
print(dep_lst)
for filename in dep_lst:
print(f"deleting {filename}")
with open(os.path.join(dir_path, filename)) as f:
dep = yaml.safe_load(f)
name = dep["metadata"]["name"]
if dep['kind'] == 'StatefulSet':
k8s_client = client.AppsV1Api()
k8s_client.delete_namespaced_stateful_set(name=name, namespace=namespace)
elif dep['kind'] == 'DaemonSet':
k8s_client = client.AppsV1Api()
k8s_client.delete_namespaced_daemon_set(name=name, namespace=namespace)
elif dep['kind'] == 'Deployment':
k8s_client = client.AppsV1Api()
k8s_client.delete_namespaced_deployment(name=name, namespace=namespace)
elif dep['kind'] == 'Service':
k8s_client = client.CoreV1Api()
k8s_client.delete_namespaced_service(name=name, namespace=namespace, grace_period_seconds=0)
delete_func = k8s_client.delete_namespaced_service
list_func = k8s_client.list_namespaced_service
wait_for_namespaced_deletion(name, namespace, delete_func, list_func)
elif dep['kind'] == 'PodDisruptionBudget':
k8s_client = client.PolicyV1beta1Api()
k8s_client.delete_namespaced_pod_disruption_budget(name=name, namespace=namespace)
elif dep["kind"] == 'Role':
k8s_client = client.RbacAuthorizationV1Api()
k8s_client.delete_namespaced_role(name=name, namespace=namespace)
elif dep["kind"] == 'RoleBinding':
k8s_client = client.RbacAuthorizationV1Api()
k8s_client.delete_namespaced_role_binding(name=name, namespace=namespace)
elif dep["kind"] == 'ClusterRoleBinding':
k8s_client = client.RbacAuthorizationV1Api()
k8s_client.delete_cluster_role_binding(name=name)
elif dep["kind"] == 'ConfigMap':
k8s_client = client.CoreV1Api()
k8s_client.delete_namespaced_config_map(name=name, namespace=namespace)
elif dep["kind"] == 'ServiceAccount':
k8s_client = client.CoreV1Api()
k8s_client.delete_namespaced_service_account(name=name, namespace=namespace)
print("\nDone\n")
def wait_for_namespaced_deletion(name, namespace, deletion_func, list_func, timeout=15):
deleted = False
orig_timeout = timeout
while not deleted:
# find by name and delete requested item
for item in list_func(namespace).items:
if item.metadata.name == name:
if timeout < 0:
raise TimeoutError(f"{orig_timeout} was not enough for deleting item:\n{item}\n")
deletion_func(name=name, namespace=namespace)
print(f"service {name} was not deleted, retrying")
time.sleep(1)
timeout -= 1
# validate item was deleted
for item in list_func(namespace).items:
deleted = True
if item.metadata.name == name:
deleted = False
return deleted
def wait_for_daemonset_to_be_ready(name, namespace, timeout=None):
wait_for_to_be_ready("daemonset", name, namespace, timeout=timeout)
def resolve_read_status_func(obj_name):
if obj_name == "daemonset":
return client.AppsV1Api().read_namespaced_daemon_set_status
else:
raise ValueError(f"resolve_read_status_func: {obj_name} is not a valid value")
def wait_for_to_be_ready(obj_name, name, namespace, timeout=None):
start = datetime.now()
while True:
read_func = resolve_read_status_func(obj_name)
resp = read_func(name=name, namespace=namespace)
total_sleep_time = (datetime.now()-start).total_seconds()
number_ready = resp.status.number_ready
updated_number_scheduled = resp.status.updated_number_scheduled
if number_ready and updated_number_scheduled and number_ready == updated_number_scheduled:
print("Total time waiting for {3} {0} [size: {1}]: {2} sec".format(name, number_ready, total_sleep_time,
obj_name))
break
print("{0}/{1} pods ready {2} sec ".format(number_ready, updated_number_scheduled, total_sleep_time), end="\r")
time.sleep(1)
if timeout and total_sleep_time > timeout:
raise Exception(f"Timeout waiting for {obj_name} to be ready")
```
#### File: stress/blocks_stress/test_stress_blocks.py
```python
from pytest_testconfig import config as testconfig
import tests.analyse as analyse
from tests.queries import wait_for_latest_layer
from tests.setup_network import setup_network
def test_blocks_stress(init_session, setup_network):
epochs_to_wait = 4
layers_per_epoch = int(testconfig['client']['args']['layers-per-epoch'])
layer_avg_size = int(testconfig['client']['args']['layer-average-size'])
number_of_cl = int(testconfig['client']['replicas'])
number_of_cl += 1 # add bootstrap node
last_layer = layers_per_epoch * epochs_to_wait
wait_for_latest_layer(init_session, last_layer, layers_per_epoch, number_of_cl)
analyse.analyze_mining(init_session, epochs_to_wait, layers_per_epoch, layer_avg_size, number_of_cl)
```
#### File: stress/sync_stress/test_sync.py
```python
from datetime import datetime
import time
from pytest_testconfig import config as testconfig
from tests.convenience import convert_ts_to_datetime
import tests.queries as q
from tests.setup_utils import add_multi_clients
from tests.utils import get_conf
# ==============================================================================
# TESTS
# ==============================================================================
SYNC_DONE = "sync done"
START_SYNC = "start synchronize"
def test_sync_stress(init_session, setup_bootstrap, save_log_on_exit):
# currently the only data we have is for 2.5 days, ~700+ layers
max_time_in_mins = 20
max_time_for_sync_mins = max_time_in_mins
clients_num = testconfig["client"]["replicas"]
bs_info = setup_bootstrap.pods[0]
cspec = get_conf(bs_info, testconfig['client'], testconfig['genesis_delta'])
_ = add_multi_clients(testconfig, init_session, cspec, clients_num)
hits = []
number_of_pods = clients_num + 1 # add 1 for bootstrap pod
tts = 70
while len(hits) != number_of_pods:
print(f"waiting for all clients to finish downloading all files, sleeping for {tts} seconds")
time.sleep(tts)
hits = q.get_all_msg_containing(init_session, init_session, "Done downloading")
del cspec.args['remote-data']
cspec.args['data-folder'] = ""
# Adding a single new client
res_lst = add_multi_clients(testconfig, init_session, cspec, 1, 'client')
new_client = res_lst[0]
# wait for the new node to start syncing
while True:
start_sync_hits = q.get_all_msg_containing(init_session, new_client, START_SYNC, is_print=False)
if start_sync_hits:
print(f"new client started syncing\n")
break
tts = 60
print(f"new client did not start syncing yet sleeping for {tts} secs")
time.sleep(tts)
curr_try = 0
# longest run witnessed ~18:00 minutes (12:00 minutes is the shortest), 2.5 days data, 700+ layers
max_retries = max_time_in_mins
interval_time = 60
print("waiting for new client to be synced")
while True:
hits = q.get_all_msg_containing(init_session, new_client, SYNC_DONE, is_print=False)
if hits:
print(f"synced after {curr_try}/{max_retries} tries of {interval_time} seconds each\n")
break
print(f"not synced after {curr_try}/{max_retries} tries of {interval_time} secs each", end="\r")
time.sleep(interval_time)
curr_try += 1
assert curr_try <= max_retries, f"node failed syncing after waiting for {max_retries} minutes"
# There are several messages containing "start synchronize" according to Almog,
# this is due to a bug in the sync test binary.
# We would like the timestamp of the latest one.
start_sync_hits = q.get_all_msg_containing(init_session, new_client, START_SYNC, is_print=False)
last_sync_msg = start_sync_hits[-1]
# parsing sync start time
st = convert_ts_to_datetime(last_sync_msg["T"])
et = convert_ts_to_datetime(hits[0]["T"])
ass_err = f"it took too long for syncing: {str(et - st)}, max {max_retries} minutes"
passed_minutes = (et-st).seconds / 60
assert passed_minutes < max_time_for_sync_mins, ass_err
# total time since starting sync until finishing
print(f"new client is synced after {str(et - st)}")
assert 1
```
#### File: tests/tx_generator/aws_generate_txs.py
```python
import argparse
import os
import sys
import time
# this hack is for importing packages located above
# this file and it's imports files location
dir_path = os.getcwd()
print(f"adding {dir_path} to sys.path")
sys.path.insert(0, dir_path)
from tests.convenience import str2bool
from tests.tx_generator import actions
from tests.tx_generator import config as conf
from tests.tx_generator.models.wallet_api import WalletAPI
from tests.tx_generator.models.accountant import Accountant
def set_parser():
parser = argparse.ArgumentParser(description='This is a transactions generator program',
usage='%(prog)s [-h]',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-p', '--pod_ip', dest="pod_ip", metavar='',
help='miner ip to interact with', required=True)
parser.add_argument('-n', '--new_accounts', dest="new_accounts", default=100, type=int, metavar='',
help='number of new accounts to be created by sending coins from tap', required=False)
parser.add_argument('-t', '--tx_count', dest="tx_count", default=0, type=int, metavar='',
help='number of txs sent between newly created accounts', required=False)
parser.add_argument('-gp', '--gas_price', dest='gas_price', default=1, type=int, metavar='',
help='the sending account\'s private key', required=False)
parser.add_argument('-ld', '--layer_duration', dest='layer_duration', default=300, type=int, metavar='',
help='duration of each layer', required=False)
parser.add_argument('-w', '--wait_layers', dest='layer_wait', default=6, type=int, metavar='',
help='layers to wait until new state is processed', required=False)
parser.add_argument('-c', '--concurrent', dest='is_concurrent', type=str2bool, nargs='?', default=False, metavar='',
help='send transactions concurrently', required=False)
return parser.parse_args()
if __name__ == "__main__":
"""
This script relays on the fact that we have a tap in our cluster
with the public and private keys that are mentioned in the conf.py
file.
initially #new_accounts number of transactions will be sent and create
#new_accounts new accounts
Sleep for 4 layers, until the state is updated and new accounts created
send #tx_num transactions using different account for each.
in case of concurrency, after every full iteration where all accounts were involved,
run all processes at once using multiprocessing and continue accumulating
more txs for the next run.
"""
# Parse arguments
parsed_args = set_parser()
print(f"\narguments received:\n{parsed_args}\n")
new_acc_num = parsed_args.new_accounts
tx_count = parsed_args.tx_count
layer_duration = parsed_args.layer_duration
layer_wait = parsed_args.layer_wait
is_concurrent = parsed_args.is_concurrent
amount = 100
pod_ip = parsed_args.pod_ip
pod_lst = [{"pod_ip": pod_ip, "name": "AWS_GATEWAY"}]
my_wallet = WalletAPI(None, pod_lst)
# Get TAP initial values
tap_nonce = my_wallet.get_nonce_value(conf.acc_pub)
tap_balance = my_wallet.get_balance_value(conf.acc_pub)
if not tap_nonce or not tap_balance:
print(f"could not resolve nonce/balance, nonce={tap_nonce}, balance={tap_balance}")
# Create an accountant to follow state
tap_acc = Accountant.set_tap_acc(balance=tap_balance, nonce=tap_nonce)
acc = Accountant({conf.acc_pub: tap_acc}, tap_init_amount=tap_balance)
acc.tx_cost = parsed_args.gas_price
# Create new accounts by sending them coins
actions.send_coins_to_new_accounts(my_wallet, new_acc_num, amount, acc, parsed_args.gas_price)
if not tx_count:
print("\nbye bye!\n")
exit(0)
tts = layer_wait * layer_duration
print(f"sleeping for {tts} to enable new state to be processed")
sleep_interval = 60
while tts >= 0:
time.sleep(sleep_interval)
tts -= sleep_interval
print(f"{tts} seconds remain")
print("\n")
actions.send_tx_from_each_account(my_wallet, acc, tx_count, is_concurrent=is_concurrent, is_use_tap=False)
``` |
{
"source": "JoseBarreiros/RosslerAttractor_DynamicalSystems",
"score": 3
} |
#### File: JoseBarreiros/RosslerAttractor_DynamicalSystems/atractor3d.py
```python
import math, random
import matplotlib.pyplot as plt
import matplotlib as mpl
from mpl_toolkits.mplot3d import Axes3D
# Step Size
h = .1
#Initial conditions for Rossler system
x = 0
y = 0
z = 0
# Parameters for the Rossler System
a = .41 #.3
b = 2 #2
c = 4 #4
# Functions that define the system
def f(x,y,z):
global a,b,c
dxdt = -y-z
return dxdt
def g(x,y,z):
global a,b,c
dydt = x + a * y
return dydt
def e(x,y,z):
global a,b,c
dzdt = b + z * (x - c)
return dzdt
# randomly perturb the initial conditions to create variable time series
x = x + random.random() / 2.0
y = y + random.random() / 2.0
z = z + random.random() / 2.0
dataX0 = []
dataY0 = []
dataZ0 = []
yList = []
xList = []
zList = []
tList = []
lamdaList = []
lyapunovList = []
t = 1
xList.append(x)
yList.append(y)
zList.append(z)
tList.append(z)
# Use the 4th order Runge-Kutta method
def rk4o(x, y, z):
global h
k1x = h*f(x, y, z)
k1y = h*g(x, y, z)
k1z = h*e(x, y, z)
k2x = h*f(x + k1x/2.0, y + k1y/2.0, z + k1z/2.0)
k2y = h*g(x + k1x/2.0, y + k1y/2.0, z + k1z/2.0)
k2z = h*e(x + k1x/2.0, y + k1y/2.0, z + k1z/2.0)
k3x = h*f(x + k2x/2.0, y + k2y/2.0, z + k2z/2.0)
k3y = h*g(x + k2x/2.0, y + k2y/2.0, z + k2z/2.0)
k3z = h*e(x + k2x/2.0, y + k2y/2.0, z + k2z/2.0)
k4x = h*f(x + k3x, y + k3y, z + k3z)
k4y = h*g(x + k3x, y + k3y, z + k3z)
k4z = h*e(x + k3x, y + k3y, z + k3z)
x = x + k1x/6.0 + k2x/3.0 + k3x/3.0 + k4x/6.0
y = y + k1y/6.0 + k2y/3.0 + k3y/3.0 + k4y/6.0
z = z + k1z/6.0 + k2z/3.0 + k3z/3.0 + k4z/6.0
return [x,y,z]
t = 1
changeInTime = h
startLE = True
perturb=0
while changeInTime < 2000: # Perform 20000 / h iterations
[x,y,z] = rk4o(xList[t-1], yList[t-1], zList[t-1])
xList.append(x)
yList.append(y)
zList.append(z)
tList.append(t)
if 200 < changeInTime: # Remove the transient after 200 / h iterations
if startLE:
cx = xList[t-1] + perturb
cy = yList[t-1]
cz = zList[t-1]
startLE = False
t = t + 1
#tList.append(t)
#t = t + 1
changeInTime += h
print(len(tList))
print(len(xList))
#plt.plot(xList, yList, '-', linewidth=0.1) #2D
#f, axarr = plt.subplots(2, sharex=True)
fig = plt.figure(figsize=plt.figaspect(.4),facecolor='white')
fig.suptitle('Rossler attractor\n a=%s'%(str(a)),fontsize=18)
ax = fig.add_subplot(1,2,1)
ax.plot(xList, yList, '-', linewidth=0.1)
ax.set_xlabel('X', fontsize=14)
ax.set_ylabel('Y', fontsize=14)
ax = fig.add_subplot(1,2,2,projection='3d')
ax.plot(xList, yList, zList, '-', linewidth=0.1)
ax.set_xlabel('X', fontsize=14)
ax.set_ylabel('Y', fontsize=14)
ax.set_zlabel('Z', fontsize=14)
plt.show()
```
#### File: JoseBarreiros/RosslerAttractor_DynamicalSystems/quadratic_map.py
```python
import math, operator, random
import numpy as np
import matplotlib.pyplot as plt
def my_range(start, end, step):
while start <= end:
yield start
start += step
a_c=[]
l_max=[]
co=0
for ii in my_range(0.3,0.4,0.0001):
co+=1
dataX0 = []
dataY0 = []
dataZ0 = []
yList = []
xList = []
zList = []
y_v=[]
z_v=[]
count = 1
t = 1
##
x = 0
y = 0
z = 0
xList.append(x)
yList.append(y)
zList.append(z)
h = .1
a = ii
b = 2
c = 4
def f(x,y,z):
global a,b,c
dxdt = -y-z
return dxdt
def g(x,y,z):
global a,b,c
dydt = x + a * y
return dydt
def e(x,y,z):
global a,b,c
dzdt = b + z * (x - c)
return dzdt
def rk4o(x, y, z):
global h
k1x = h*f(x, y, z)
k1y = h*g(x, y, z)
k1z = h*e(x, y, z)
k2x = h*f(x + k1x/2.0, y + k1y/2.0, z + k1z/2.0)
k2y = h*g(x + k1x/2.0, y + k1y/2.0, z + k1z/2.0)
k2z = h*e(x + k1x/2.0, y + k1y/2.0, z + k1z/2.0)
k3x = h*f(x + k2x/2.0, y + k2y/2.0, z + k2z/2.0)
k3y = h*g(x + k2x/2.0, y + k2y/2.0, z + k2z/2.0)
k3z = h*e(x + k2x/2.0, y + k2y/2.0, z + k2z/2.0)
k4x = h*f(x + k3x, y + k3y, z + k3z)
k4y = h*g(x + k3x, y + k3y, z + k3z)
k4z = h*e(x + k3x, y + k3y, z + k3z)
x = x + k1x/6.0 + k2x/3.0 + k3x/3.0 + k4x/6.0
y = y + k1y/6.0 + k2y/3.0 + k3y/3.0 + k4y/6.0
z = z + k1z/6.0 + k2z/3.0 + k3z/3.0 + k4z/6.0
return [x,y,z]
X0 = []
Y0 = []
Z0 = []
previousLocalMax = x
localMax = x
t = 1
changeInTime = h
avgChange = []
LastPoint = changeInTime
while changeInTime < 2000 and len(dataX0) < 100:
[x,y,z] = rk4o(xList[t-1], yList[t-1], zList[t-1])
xList.append(x)
yList.append(y)
zList.append(z)
if 201 < changeInTime:
# Print y and z points when x is between 3.5 and 3.75 (as shown in the figure)
if x < xList[t-1] and xList[t-2] < xList[t-1]:
previousLocalMax = localMax
localMax = xList[t-1]
dataX0.append(localMax)
dataY0.append(previousLocalMax)
# Calculate the change between this and last point
avgChange.append(changeInTime - LastPoint)
LastPoint = changeInTime
t = t + 1
changeInTime += h
#l_max[0]=dataX0
a_c.append(a)
l_max.append(dataX0)
#print(a_c)
#print(l_max)
ax=plt.figure(facecolor='white')
plt.plot(a_c,l_max,'b.',markersize=2)
ax.suptitle('Quadratic map - Rossler Attractor', fontsize=18)
plt.xlabel('a', fontsize=14)
plt.ylabel('Xmax', fontsize=14)
plt.show()
print('done')
``` |
{
"source": "josebasilioo/fastapi-template-base",
"score": 3
} |
#### File: src/controllers/movieController.py
```python
from typing import List
from fastapi import Header, APIRouter
from src.models.movies import MovieIn, MovieOut
from src import database_manager
fake_movie_db = [
{
'name': 'Star Wars: Episode IX - The Rise of Skywalker',
'plot': 'The surviving members of the resistance face the First Order once again.',
'genres': ['Action', 'Adventure', 'Fantasy'],
'casts': ['<NAME>', '<NAME>']
}
]
movies = APIRouter()
@movies.get('/list', response_model=List[MovieOut])
async def index():
return await db_manager.get_all_movies()
@movies.post('/create', status_code=201)
async def add_movie(payload: MovieIn):
movie_id = await db_manager.add_movie(payload)
response = {
'id': movie_id,
**payload.dict()
}
return response
@movies.put('/update/{id}')
async def update_movie(id: int, payload: MovieIn):
movie = payload.dict()
fake_movie_db[id] = movie
return None
@movies.put('/update/{id}')
async def update_movie(id: int, payload: MovieIn):
movie = await db_manager.get_movie(id)
if not movie:
raise HTTPException(status_code=404, detail="Movie not found")
update_data = payload.dict(exclude_unset=True)
movie_in_db = MovieIn(**movie)
updated_movie = movie_in_db.copy(update=update_data)
return await db_manager.update_movie(id, updated_movie)
@movies.delete('/delete/{id}')
async def delete_movie(id: int):
movie = await db_manager.get_movie(id)
if not movie:
raise HTTPException(status_code=404, detail="Movie not found")
return await db_manager.delete_movie(id)
``` |
{
"source": "joseberlines/ipyflex",
"score": 2
} |
#### File: ipyflex/ipyflex/flex_layout.py
```python
import json
import os
from enum import Enum
from typing import Dict as TypeDict
from typing import List as TypeList
from typing import Union
from ipywidgets import DOMWidget, Widget, widget_serialization
from traitlets.traitlets import Bool, Dict, Instance, Unicode
from ._frontend import module_name, module_version
from .utils import get_nonexistant_path
import copy
class MESSAGE_ACTION(str, Enum):
SAVE_TEMPLATE = 'save_template'
UPDATE_CHILDREN = 'update_children'
class FlexLayout(DOMWidget):
RESERVED_NAME = {'Create new'}
_model_name = Unicode('FlexLayoutModel').tag(sync=True)
_model_module = Unicode(module_name).tag(sync=True)
_model_module_version = Unicode(module_version).tag(sync=True)
_view_name = Unicode('FlexLayoutView').tag(sync=True)
_view_module = Unicode(module_name).tag(sync=True)
_view_module_version = Unicode(module_version).tag(sync=True)
# children = TypedTuple(
# trait=Instance(Widget), help="List of widget children"
# ).tag(sync=True, **widget_serialization)
children = Dict(
key_trait=Unicode,
value_trait=Instance(Widget),
help='Dict of widget children',
).tag(sync=True, **widget_serialization)
layout_config = Dict(
{'borderLeft': False, 'borderRight': False},
help='Layout configuration',
).tag(sync=True)
style = Dict(
{},
help='Style configuration',
).tag(sync=True)
template = Unicode(
None, help='Path to template json file.', allow_none=True
).tag(sync=True)
template_json = Dict(
None, help='Template configuration', allow_none=True
).tag(sync=True)
editable = Bool(
True, help='Flag to activate/deactivate edit mode', config=True
).tag(sync=True)
def __init__(
self,
widgets: Union[TypeDict, TypeList] = [],
# layout_config: TypeDict,
**kwargs,
):
super().__init__(**kwargs)
if isinstance(widgets, dict):
self.children = widgets
elif isinstance(widgets, list):
self.children = {
f'Widget {i}': widgets[i] for i in range(0, len(widgets))
}
else:
raise TypeError('Invalid input!')
if len(list(self.RESERVED_NAME & set(self.children))) > 0:
raise KeyError('Please do not use widget name in reserved list!')
self.template_json = None
if self.template is not None:
try:
with open(self.template, 'r') as f:
self.template_json = json.load(f)
except FileNotFoundError:
self.log.warning(
f'Failed to read {self.template}! Using default template.'
)
self.template_json = None
self.on_msg(self._handle_frontend_msg)
def add(self, name: str, widget: Widget) -> None:
if not self.editable:
self.log.warning('Widget is in readonly mode!')
return
if name in self.RESERVED_NAME:
raise KeyError('Please do not use widget name in reserved list!')
old = copy.copy(self.children)
old[name] = widget
self.children = old
self.send(
{
'action': MESSAGE_ACTION.UPDATE_CHILDREN,
'payload': {'name': name},
}
)
def _handle_frontend_msg(
self, model: 'FlexLayout', msg: Dict, buffers: TypeList
) -> None:
action = msg.get('action')
payload = msg.get('payload', None)
if action == MESSAGE_ACTION.SAVE_TEMPLATE:
file_name = str(payload.get('file_name'))
json_data = payload.get('json_data')
if not file_name.endswith('.json'):
file_name += '.json'
if file_name != self.template:
file_path = get_nonexistant_path(
os.path.join(os.getcwd(), file_name)
)
else:
file_path = self.template
with open(file_path, 'w') as f:
json.dump(json_data, f)
self.template = file_path
``` |
{
"source": "JoseBonoko/ptavi-p3",
"score": 3
} |
#### File: JoseBonoko/ptavi-p3/karaoke.py
```python
import sys
import json
import SmallSMILHandler
from xml.sax import make_parser
from urllib.request import urlretrieve
from xml.sax.handler import ContentHandler
class KaraokeLocal:
def __init__(self, file):
self.get_tags = []
self.dicc = {}
parser = make_parser()
cHandler = smallsmilhandler.SmallSMILHandler()
parser.setContentHandler(cHandler)
parser.parse(open(file))
self.get_tags = cHandler.get_tags()
def __str__(self):
line_etiqueta = ''
for biblio in self.lista:
nom_etiqueta = biblio['tag']
line_etiqueta += biblio['tag']
biblio['tag'] = 'tag'
for atributo, valor in biblio.items():
if atributo != biblio['tag'] and valor != "":
line_etiqueta += '\t'+'{0}="{1}"'.format(atributo, valor)
line_etiqueta += '\n'
biblio['tag'] = nom_etiqueta
return line_etiqueta
def to_json(self, file):
smiltojson = file.replace('.smil', '.json')
with open(smiltojson, 'w') as fichjson:
json.dump(self.get_tags, fichjson, indent=4)
def do_local(self):
for biblio in self.lista:
for atributo, valor in biblio.items():
if atributo == 'src':
if valor.startswith('http://'):
file_local = valor[valor.rfind('/'):]
urllib.request.urlretrieve(valor, file_local[1:])
biblio['src'] = file_local[1:]
if __name__ == "__main__":
try:
file = sys.argv[1]
except IndexError:
sys.exit("Usage:python3 karaoke.py file.smil.")
karaokelocal = KaraokeLocal(file)
print(karaokelocal)
karaokelocal.to_json(file)
karaokelocal.do_local()
karaokelocal.to_json('local.smil')
print(karaoke)
``` |
{
"source": "joseboveda/simple-salesforce",
"score": 3
} |
#### File: simple_salesforce/tests/test_api.py
```python
import re
try:
# Python 2.6
import unittest2 as unittest
except ImportError:
import unittest
import httpretty
try:
# Python 2.6/2.7
import httplib as http
from mock import Mock, patch
except ImportError:
# Python 3
import http.client as http
from unittest.mock import Mock, patch
import requests
from simple_salesforce import tests
from simple_salesforce.api import (
_exception_handler,
Salesforce,
SalesforceMoreThanOneRecord,
SalesforceMalformedRequest,
SalesforceExpiredSession,
SalesforceRefusedRequest,
SalesforceResourceNotFound,
SalesforceGeneralError
)
class TestSalesforce(unittest.TestCase):
"""Tests for the Salesforce instance"""
def setUp(self):
"""Setup the SalesforceLogin tests"""
request_patcher = patch('simple_salesforce.api.requests')
self.mockrequest = request_patcher.start()
self.addCleanup(request_patcher.stop)
@httpretty.activate
def test_custom_session_success(self):
httpretty.register_uri(
httpretty.POST,
re.compile(r'^https://.*$'),
body=tests.LOGIN_RESPONSE_SUCCESS,
status=http.OK
)
session_state = {
'called': False,
}
def on_response(*args, **kwargs):
session_state['called'] = True
session = requests.Session()
session.hooks = {
'response': on_response,
}
sf = Salesforce(
session=session,
username='<EMAIL>',
password='password',
security_token='token')
self.assertEqual(tests.SESSION_ID, sf.session_id)
self.assertEqual(session, sf.request)
@httpretty.activate
def test_custom_version_success(self):
httpretty.register_uri(
httpretty.POST,
re.compile(r'^https://.*$'),
body=tests.LOGIN_RESPONSE_SUCCESS,
status=http.OK
)
# Use an invalid version that is guaranteed to never be used
expected_version = '4.2'
sf = Salesforce(
session=requests.Session(), username='<EMAIL>',
password='password', security_token='<PASSWORD>',
version=expected_version)
self.assertEqual(
sf.base_url.split('/')[-2], 'v%s' % expected_version)
class TestExceptionHandler(unittest.TestCase):
"""Test the exception router"""
def setUp(self):
"""Setup the exception router tests"""
self.mockresult = Mock()
self.mockresult.url = 'http://www.example.com/'
self.mockresult.json.return_value = 'Example Content'
def test_multiple_records_returned(self):
"""Test multiple records returned (a 300 code)"""
self.mockresult.status_code = 300
with self.assertRaises(SalesforceMoreThanOneRecord) as cm:
_exception_handler(self.mockresult)
self.assertEqual(str(cm.exception), (
'More than one record for '
'http://www.example.com/. Response content: Example Content'))
def test_malformed_request(self):
"""Test a malformed request (400 code)"""
self.mockresult.status_code = 400
with self.assertRaises(SalesforceMalformedRequest) as cm:
_exception_handler(self.mockresult)
self.assertEqual(str(cm.exception), (
'Malformed request '
'http://www.example.com/. Response content: Example Content'))
def test_expired_session(self):
"""Test an expired session (401 code)"""
self.mockresult.status_code = 401
with self.assertRaises(SalesforceExpiredSession) as cm:
_exception_handler(self.mockresult)
self.assertEqual(str(cm.exception), (
'Expired session for '
'http://www.example.com/. Response content: Example Content'))
def test_request_refused(self):
"""Test a refused request (403 code)"""
self.mockresult.status_code = 403
with self.assertRaises(SalesforceRefusedRequest) as cm:
_exception_handler(self.mockresult)
self.assertEqual(str(cm.exception), (
'Request refused for '
'http://www.example.com/. Response content: Example Content'))
def test_resource_not_found(self):
"""Test resource not found (404 code)"""
self.mockresult.status_code = 404
with self.assertRaises(SalesforceResourceNotFound) as cm:
_exception_handler(self.mockresult, 'SpecialContacts')
self.assertEqual(str(cm.exception), (
'Resource SpecialContacts Not'
' Found. Response content: Example Content'))
def test_generic_error_code(self):
"""Test an error code that is otherwise not caught"""
self.mockresult.status_code = 500
with self.assertRaises(SalesforceGeneralError) as cm:
_exception_handler(self.mockresult)
self.assertEqual(str(cm.exception), (
'Error Code 500. Response content'
': Example Content'))
``` |
{
"source": "josebrule/Ant",
"score": 2
} |
#### File: josebrule/Ant/app.py
```python
from flask import Flask, render_template, flash, redirect, url_for, session, request
from flask_mysqldb import MySQL
from wtforms import Form, StringField, PasswordField, validators, SelectField
from passlib.hash import sha256_crypt
from functools import wraps
import timeit
import datetime
import os
from wtforms.fields.html5 import EmailField
from firebase import firebase
IIDD=1
app = Flask(__name__)
app.secret_key = os.urandom(24)
app.config['UPLOADED_PHOTOS_DEST'] = 'static/image/product'
# Config MySQL
mysql = MySQL()
app.config['MYSQL_HOST'] = 'mysql-j0h5nn.alwaysdata.net'
app.config['MYSQL_USER'] = 'j0h5nn'
app.config['MYSQL_PASSWORD'] = '<PASSWORD>'
app.config['MYSQL_DB'] = 'j0h5nn_saludables'
app.config['MYSQL_CURSORCLASS'] = 'DictCursor'
# Initialize the app for use with this MySQL class
mysql.init_app(app)
#Inicializar firebase
firebase = firebase.FirebaseApplication("https://antojossaludable1234.firebaseio.com/",None)
import mysql.connector as mc
mydb = mc.connect(
host="mysql-j0h5nn.alwaysdata.net",
user="j0h5nn",
password="<PASSWORD>",
database="j0h5nn_saludables"
)
def is_logged_in(f):
@wraps(f)
def wrap(*args, **kwargs):
if 'logged_in' in session:
return f(*args, *kwargs)
else:
return redirect(url_for('login'))
return wrap
def not_logged_in(f):
@wraps(f)
def wrap(*args, **kwargs):
if 'logged_in' in session:
return redirect(url_for('index'))
else:
return f(*args, *kwargs)
return wrap
def is_admin_logged_in(f):
@wraps(f)
def wrap(*args, **kwargs):
if 'admin_logged_in' in session:
return f(*args, *kwargs)
else:
return redirect(url_for('admin_login'))
return wrap
def not_admin_logged_in(f):
@wraps(f)
def wrap(*args, **kwargs):
if 'admin_logged_in' in session:
return redirect(url_for('admin'))
else:
return f(*args, *kwargs)
return wrap
def wrappers(func, *args, **kwargs):
def wrapped():
return func(*args, **kwargs)
return wrapped
def content_based_filtering(product_id):
cur = mysql.connection.cursor()
cur.execute("SELECT * FROM products WHERE id=%s", (product_id,)) # getting id row
data = cur.fetchone() # get row info
data_cat = data['category'] # get id category ex shirt
print('Showing result for Product Id: ' + product_id)
category_matched = cur.execute("SELECT * FROM products WHERE category=%s", (data_cat,)) # get all shirt category
print('Total product matched: ' + str(category_matched))
cat_product = cur.fetchall() # get all row
cur.execute("SELECT * FROM product_level WHERE product_id=%s", (product_id,)) # id level info
id_level = cur.fetchone()
recommend_id = []
cate_level = ['v_shape', 'polo', 'clean_text', 'design', 'leather', 'color', 'formal', 'converse', 'loafer', 'hook',
'chain']
for product_f in cat_product:
cur.execute("SELECT * FROM product_level WHERE product_id=%s", (product_f['id'],))
f_level = cur.fetchone()
match_score = 0
try:
if f_level['product_id'] != int(product_id):
for cat_level in cate_level:
if f_level[cat_level] == id_level[cat_level]:
match_score += 1
if match_score == 11:
recommend_id.append(f_level['product_id'])
except:
recommend_id.append(11)
print('Total recommendation found: ' + str(recommend_id))
if recommend_id:
cur = mysql.connection.cursor()
placeholders = ','.join((str(n) for n in recommend_id))
query = 'SELECT * FROM products WHERE id IN (%s)' % placeholders
cur.execute(query)
recommend_list = cur.fetchall()
return recommend_list, recommend_id, category_matched, product_id
else:
return ''
@app.route('/')
def index():
form = OrderForm(request.form)
# Create cursor
cur = mysql.connection.cursor()
# Get message
values = 'tortas'
cur.execute("SELECT * FROM products WHERE category=%s ORDER BY RAND() LIMIT 4", (values,))
tortas = cur.fetchall()
values = 'Anchetas'
cur.execute("SELECT * FROM products WHERE category=%s ORDER BY RAND() LIMIT 4", (values,))
Anchetas = cur.fetchall()
values = 'BebidasMermeladas'
cur.execute("SELECT * FROM products WHERE category=%s ORDER BY RAND() LIMIT 4", (values,))
BebidasMermeladas = cur.fetchall()
values = 'Galletas'
cur.execute("SELECT * FROM products WHERE category=%s ORDER BY RAND() LIMIT 4", (values,))
Galletas = cur.fetchall()
# Close Connection
cur.close()
return render_template('home.html', tortas=tortas, Anchetas=Anchetas, BebidasMermeladas=BebidasMermeladas, Galletas=Galletas, form=form)
class LoginForm(Form): # Create Login Form
username = StringField('', [validators.length(min=1)],
render_kw={'autofocus': True, 'placeholder': 'Username'})
password = PasswordField('', [validators.length(min=3)],
render_kw={'placeholder': 'Password'})
# User Login
@app.route('/login', methods=['GET', 'POST'])
@not_logged_in
def login():
form = LoginForm(request.form)
if request.method == 'POST' and form.validate():
# GEt user form
username = form.username.data
# password_candidate = request.form['password']
password_candidate = form.password.data
# Create cursor
cur = mysql.connection.cursor()
# Get user by username
result = cur.execute("SELECT * FROM users WHERE username=%s", [username])
if result > 0:
# Get stored value
data = cur.fetchone()
password = data['password']
uid = data['id']
name = data['name']
# Compare password
if sha256_crypt.verify(password_candidate, password):
# passed
session['logged_in'] = True
session['uid'] = uid
session['s_name'] = name
x = '1'
cur.execute("UPDATE users SET online=%s WHERE id=%s", (x, uid))
return redirect(url_for('index'))
else:
flash('Incorrect password', 'danger')
return render_template('login.html', form=form)
else:
flash('Username not found', 'danger')
# Close connection
cur.close()
return render_template('login.html', form=form)
return render_template('login.html', form=form)
@app.route('/out')
def logout():
if 'uid' in session:
# Create cursor
cur = mysql.connection.cursor()
uid = session['uid']
x = '0'
cur.execute("UPDATE users SET online=%s WHERE id=%s", (x, uid))
session.clear()
flash('Cerraste sesión', 'success')
return redirect(url_for('index'))
return redirect(url_for('login'))
class RegisterForm(Form):
name = StringField('', [validators.length(min=3, max=50)],
render_kw={'autofocus': True, 'placeholder': 'Nombre Completo'})
username = StringField('', [validators.length(min=3, max=25)], render_kw={'placeholder': 'Usuario'})
email = EmailField('', [validators.DataRequired(), validators.Email(), validators.length(min=4, max=25)],
render_kw={'placeholder': 'Correo Electronico'})
password = PasswordField('', [validators.length(min=3)],
render_kw={'placeholder': 'Contraseña'})
mobile = StringField('', [validators.length(min=10, max=15)], render_kw={'placeholder': 'Celular'})
@app.route('/register', methods=['GET', 'POST'])
@not_logged_in
def register():
form = RegisterForm(request.form)
if request.method == 'POST' and form.validate():
name = form.name.data
email = form.email.data
username = form.username.data
password = <PASSWORD>256_crypt.encrypt(str(form.password.data))
mobile = form.mobile.data
# Create Cursor
cur = mysql.connection.cursor()
cur.execute("INSERT INTO users(name, email, username, password, mobile) VALUES(%s, %s, %s, %s, %s)",
(name, email, username, password, mobile))
# Commit cursor
mysql.connection.commit()
# Close Connection
cur.close()
flash('Ahora estas registrado puedes iniciar sesión', 'success')
return redirect(url_for('index'))
return render_template('register.html', form=form)
class OrderForm(Form): # Create Order Form
name = StringField('', [validators.length(min=1), validators.DataRequired()],
render_kw={'autofocus': True, 'placeholder': 'Nombre Completo'})
mobile_num = StringField('', [validators.length(min=1), validators.DataRequired()],
render_kw={'autofocus': True, 'placeholder': 'Celular'})
quantity = SelectField('', [validators.DataRequired()],
choices=[('1', '1'), ('2', '2'), ('3', '3'), ('4', '4'), ('5', '5')])
order_place = StringField('', [validators.length(min=1), validators.DataRequired()],
render_kw={'placeholder': 'Lugar de entrega'})
@app.route('/tortas', methods=['GET', 'POST'])
def tortas():
form = OrderForm(request.form)
# Create cursor
cur = mysql.connection.cursor()
# Get message
values = 'tortas'
cur.execute("SELECT * FROM products WHERE category=%s ORDER BY id ASC", (values,))
products = cur.fetchall()
# Close Connection
cur.close()
if 'view' in request.args:
q = request.args['view']
productid = q
x = content_based_filtering(productid)
curso = mysql.connection.cursor()
curso.execute("SELECT * FROM products WHERE id=%s", [q])
product = curso.fetchall()
# print('Execution time: ' + str(execution_time) + ' usec')
if 'uid' in session:
uid = session['uid']
# Create cursor
cur = mysql.connection.cursor()
cur.execute("SELECT * FROM product_view WHERE user_id=%s AND product_id=%s", [uid, productid])
result = cur.fetchall()
if result:
now = datetime.datetime.now()
now_time = now.strftime("%y-%m-%d %H:%M:%S")
cur.execute("UPDATE product_view SET date=%s WHERE user_id=%s AND product_id=%s",
[now_time, uid, productid])
else:
cur.execute("INSERT INTO product_view(user_id, product_id) VALUES(%s, %s)", [uid, productid])
mysql.connection.commit()
return render_template('view_product.html', x=x, tortass=product)
elif 'order' in request.args:
productId = request.args['order']
cur = mysql.connection.cursor()
IDD = request.remote_addr
IDD = IDD.split(".")
userId = ((int(IDD[0]) * (10 ** 9)) + (int(IDD[1]) * (10 ** 6)) + (int(IDD[2]) * (10 ** 3)) + (int(IDD[3])))
try:
cur.execute("INSERT INTO kart0 (userId, productId) VALUES (%s, %s)", [userId, productId])
msg = "Added successfully"
except:
cur.rollback()
msg = "Error occured"
cur.close()
return render_template('tortas.html', tortas=products, form=form)
@app.route('/Anchetas', methods=['GET', 'POST'])
def Anchetas():
form = OrderForm(request.form)
# Create cursor
cur = mysql.connection.cursor()
# Get message
values = 'Anchetas'
cur.execute("SELECT * FROM products WHERE category=%s ORDER BY id ASC", [values])
products = cur.fetchall()
# Close Connection
cur.close()
if 'view' in request.args:
q = request.args['view']
product_id = q
x = content_based_filtering(product_id)
curso = mysql.connection.cursor()
curso.execute("SELECT * FROM products WHERE id=%s", [q])
products = curso.fetchall()
return render_template('view_product.html', x=x, tortass=products)
elif 'order' in request.args:
productId = request.args['order']
cur = mysql.connection.cursor()
IDD = request.remote_addr
IDD = IDD.split(".")
userId = ((int(IDD[0]) * (10 ** 9)) + (int(IDD[1]) * (10 ** 6)) + (int(IDD[2]) * (10 ** 3)) + (int(IDD[3])))
try:
cur.execute("INSERT INTO kart0 (userId, productId) VALUES (%s, %s)", [userId, productId])
msg = "Added successfully"
except:
cur.rollback()
msg = "Error occured"
cur.close()
return render_template('Anchetas.html', Anchetas=products, form=form)
@app.route('/BebidasMermeladas', methods=['GET', 'POST'])
def BebidasMermeladas():
form = OrderForm(request.form)
# Create cursor
cur = mysql.connection.cursor()
# Get message
values = 'BebidasMermeladas'
cur.execute("SELECT * FROM products WHERE category=%s ORDER BY id ASC", [values])
products = cur.fetchall()
# Close Connection
cur.close()
if 'view' in request.args:
q = request.args['view']
product_id = q
x = content_based_filtering(product_id)
curso = mysql.connection.cursor()
curso.execute("SELECT * FROM products WHERE id=%s", [q])
products = curso.fetchall()
return render_template('view_product.html', x=x, tortass=products)
elif 'order' in request.args:
productId = request.args['order']
cur = mysql.connection.cursor()
IDD = request.remote_addr
IDD = IDD.split(".")
userId = ((int(IDD[0]) * (10 ** 9)) + (int(IDD[1]) * (10 ** 6)) + (int(IDD[2]) * (10 ** 3)) + (int(IDD[3])))
try:
cur.execute("INSERT INTO kart0 (userId, productId) VALUES (%s, %s)", [userId, productId])
msg = "Added successfully"
except:
cur.rollback()
msg = "Error occured"
cur.close()
return render_template('BebidasMermeladas.html', BebidasMermeladas=products, form=form)
@app.route('/Galletas', methods=['GET', 'POST'])
def Galletas():
form = OrderForm(request.form)
# Create cursor
cur = mysql.connection.cursor()
# Get message
values = 'Galletas'
cur.execute("SELECT * FROM products WHERE category=%s ORDER BY id ASC", [values])
products = cur.fetchall()
# Close Connection
cur.close()
if 'view' in request.args:
q = request.args['view']
product_id = q
x = content_based_filtering(product_id)
curso = mysql.connection.cursor()
curso.execute("SELECT * FROM products WHERE id=%s", [q])
products = curso.fetchall()
return render_template('view_product.html', x=x, tortass=products)
elif 'order' in request.args:
productId = request.args['order']
cur = mysql.connection.cursor()
IDD=request.remote_addr
IDD=IDD.split(".")
userId = ((int(IDD[0])*(10**9))+(int(IDD[1])*(10**6))+(int(IDD[2])*(10**3))+(int(IDD[3])))
try:
cur.execute("INSERT INTO kart0 (userId, productId) VALUES (%s, %s)", [userId, productId])
msg = "Added successfully"
except:
cur.rollback()
msg = "Error occured"
cur.close()
return render_template('Galletas.html', Galletas=products, form=form)
@app.route('/admin_login', methods=['GET', 'POST'])
@not_admin_logged_in
def admin_login():
if request.method == 'POST':
# GEt user form
username = request.form['email']
password_candidate = request.form['password']
# Create cursor
cur = mysql.connection.cursor()
# Get user by username
result = cur.execute("SELECT * FROM admin WHERE email=%s", [username])
if result > 0:
# Get stored value
data = cur.fetchone()
password = data['password']
uid = data['id']
name = data['firstName']
# Compare password
if sha256_crypt.verify(password_candidate, password):
# passed
session['admin_logged_in'] = True
session['admin_uid'] = uid
session['admin_name'] = name
return redirect(url_for('admin'))
else:
flash('Incorrect password', 'danger')
return render_template('pages/login.html')
else:
flash('Username not found', 'danger')
# Close connection
cur.close()
return render_template('pages/login.html')
return render_template('pages/login.html')
@app.route('/admin_out')
def admin_logout():
if 'admin_logged_in' in session:
session.clear()
return redirect(url_for('admin_login'))
return redirect(url_for('admin'))
@app.route('/admin')
@is_admin_logged_in
def admin():
curso = mysql.connection.cursor()
num_rows = curso.execute("SELECT * FROM products")
result = curso.fetchall()
order_rows = curso.execute("SELECT * FROM orders")
users_rows = curso.execute("SELECT * FROM users")
return render_template('pages/index.html', result=result, row=num_rows, order_rows=order_rows,
users_rows=users_rows)
@app.route('/orders')
@is_admin_logged_in
def orders():
curso = mysql.connection.cursor()
num_rows = curso.execute("SELECT * FROM products")
order_rows = curso.execute("SELECT * FROM orders")
result = curso.fetchall()
users_rows = curso.execute("SELECT * FROM users")
return render_template('pages/all_orders.html', result=result, row=num_rows, order_rows=order_rows,
users_rows=users_rows)
@app.route('/users')
@is_admin_logged_in
def users():
curso = mysql.connection.cursor()
num_rows = curso.execute("SELECT * FROM products")
order_rows = curso.execute("SELECT * FROM orders")
users_rows = curso.execute("SELECT * FROM users")
result = curso.fetchall()
return render_template('pages/all_users.html', result=result, row=num_rows, order_rows=order_rows,
users_rows=users_rows)
@app.route('/search', methods=['POST', 'GET'])
def search():
form = OrderForm(request.form)
if 'q' in request.args:
q = request.args['q']
# Create cursor
cur = mysql.connection.cursor()
# Get message
query_string = "SELECT * FROM products WHERE pName LIKE %s ORDER BY id ASC"
cur.execute(query_string, ('%' + q + '%',))
products = cur.fetchall()
# Close Connection
cur.close()
flash('Showing result for: ' + q, 'success')
return render_template('search.html', products=products, form=form)
else:
flash('Search again', 'danger')
return render_template('search.html')
@app.route("/removeFromCart")
def removeFromCart():
productId = int(request.args.get('productId'))
print(productId)
cur = mysql.connection.cursor()
IDD = request.remote_addr
IDD = IDD.split(".")
userId = ((int(IDD[0]) * (10 ** 9)) + (int(IDD[1]) * (10 ** 6)) + (int(IDD[2]) * (10 ** 3)) + (int(IDD[3])))
try:
cur.execute("DELETE FROM kart0 WHERE userId = %s AND productId = %s", [userId, productId])
msg = "removed successfully"
except:
msg = "error occured"
print("hola")
cur.close()
cur = mydb.cursor()
cur.execute(
"SELECT products.id, products.pName, products.price, products.picture, products.category FROM products, kart0 WHERE products.id = kart0.productId AND kart0.userId = %s",
[userId])
products = cur.fetchall()
totalPrice = 0
for product in products:
totalPrice += product[2]
return render_template("cart.html", products=products, totalPrice=totalPrice)
@app.route("/productDescription")
def productDescription():
productId = request.args.get('productId')
cur = mysql.connection.cursor()
cur.execute('SELECT productId, name, price, description, image, stock FROM products WHERE productId = ?', (productId, ))
productData = cur.fetchone()
cur.close()
return render_template("productDescription.html", data=productData)
@app.route("/addToCart")
def addToCart():
productId = int(request.args.get('productId'))
cur = mysql.connection.cursor()
userId = IIDD
try:
cur.execute("INSERT INTO kart0 (userId, productId) VALUES (?, ?)", (userId, productId))
cur.commit()
msg = "Added successfully"
except:
cur.rollback()
msg = "Error occured"
cur.close()
return redirect(url_for('root'))
@app.route("/cart", methods=['GET', 'POST'])
def cart():
form = OrderForm(request.form)
cur = mydb.cursor()
IDD = request.remote_addr
IDD = IDD.split(".")
userId = ((int(IDD[0]) * (10 ** 9)) + (int(IDD[1]) * (10 ** 6)) + (int(IDD[2]) * (10 ** 3)) + (int(IDD[3])))
cur.execute(
"SELECT products.id, products.pName, products.price, products.picture, products.category FROM products, kart0 WHERE products.id = kart0.productId AND kart0.userId = %s",
[userId])
products = cur.fetchall()
totalPrice = 0
for product in products:
totalPrice += product[2]
if request.method == 'POST':
# Parse form data
email = request.form['email']
firstName = request.form['firstName']
lastName = request.form['lastName']
address1 = request.form['address1']
address2 = request.form['address2']
city = request.form['city']
phone = request.form['phone']
if request.method == 'POST' and "@" in email and phone.isdigit() and totalPrice!=0:
name = firstName+" "+lastName
mobile = phone
order_place = city+" "+address1+" "+address2
now = datetime.datetime.now()
week = datetime.timedelta(days=7)
delivery_date = now + week
now_time = delivery_date.strftime("%y-%m-%d %H:%M:%S")
# Create Cursor
curs = mysql.connection.cursor()
for MJS in range(1, 20):
avaible = firebase.get('/4/data/' + str(MJS - 1), 'available')
curs.execute("UPDATE products SET available=%s WHERE id=%s", [avaible, MJS])
for product in products:
quantity=1
if 'uid' in session:
uid = session['uid']
curs.execute("INSERT INTO orders(uid, pid, ofname, mobile, oplace, quantity, ddate, PName) "
"VALUES(%s, %s, %s, %s, %s, %s, %s, %s)",
[uid, product[0], name, mobile, order_place, quantity, now_time,product[1]])
curs.execute("UPDATE products SET available=available-%s WHERE id=%s", [quantity, product[0]])
curs.execute("SELECT available FROM products WHERE id=%s", [product[0]])
avaible = curs.fetchone().get('available')
P = '/4/data/' + str(int(product[0]) - 1)
firebase.put(P, 'available', avaible)
else:
curs.execute("INSERT INTO orders(pid, ofname, mobile, oplace, quantity, ddate, PName) "
"VALUES(%s, %s, %s, %s, %s, %s, %s)",
[product[0], name, mobile, order_place, quantity, now_time,product[1]])
curs.execute("UPDATE products SET available=available-%s WHERE id=%s", [quantity, product[0]])
curs.execute("SELECT available FROM products WHERE id=%s", [product[0]])
avaible = curs.fetchone().get('available')
P = '/4/data/' + str(int(product[0]) - 1)
firebase.put(P, 'available', avaible)
cur.execute(
"DELETE FROM kart0 WHERE kart0.userId = %s",
[userId])
# Commit cursor
mysql.connection.commit()
# Close Connection
cur.close()
flash('Enseguida nos comunicaremos contigo para efectos de pago', 'success')
return render_template("cart.html")
elif request.method == 'POST' and totalPrice!=0:
flash('Falta información de envío', 'danger')
elif request.method == 'POST':
flash('No se ha agregado ningún producto', 'danger')
return render_template("cart.html", products=products, totalPrice=totalPrice)
# Parse form data
@app.route('/profile')
@is_logged_in
def profile():
if 'user' in request.args:
q = request.args['user']
curso = mysql.connection.cursor()
curso.execute("SELECT * FROM users WHERE id=%s", (q,))
result = curso.fetchone()
if result:
if result['id'] == session['uid']:
curso.execute("SELECT * FROM orders WHERE uid=%s ORDER BY id ASC", (session['uid'],))
res = curso.fetchall()
return render_template('profile.html', result=res)
else:
flash('Unauthorised', 'danger')
return redirect(url_for('login'))
else:
flash('Unauthorised! Please login', 'danger')
return redirect(url_for('login'))
else:
flash('Unauthorised', 'danger')
return redirect(url_for('login'))
class UpdateRegisterForm(Form):
name = StringField('Nombre Completo', [validators.length(min=3, max=50)],
render_kw={'autofocus': True, 'placeholder': 'Nombre Completo'})
email = EmailField('Email', [validators.DataRequired(), validators.Email(), validators.length(min=4, max=25)],
render_kw={'placeholder': 'Correo Electronico'})
password = PasswordField('Password', [validators.length(min=3)],
render_kw={'placeholder': 'Contraseña'})
mobile = StringField('Mobile', [validators.length(min=10, max=15)], render_kw={'placeholder': 'Celular'})
@app.route('/settings', methods=['POST', 'GET'])
@is_logged_in
def settings():
form = UpdateRegisterForm(request.form)
if 'user' in request.args:
q = request.args['user']
curso = mysql.connection.cursor()
curso.execute("SELECT * FROM users WHERE id=%s", (q,))
result = curso.fetchone()
if result:
if result['id'] == session['uid']:
if request.method == 'POST' and form.validate():
name = form.name.data
email = form.email.data
password = sha256_crypt.encrypt(str(form.password.data))
mobile = form.mobile.data
# Create Cursor
cur = mysql.connection.cursor()
exe = cur.execute("UPDATE users SET name=%s, email=%s, password=%s, mobile=%s WHERE id=%s",
(name, email, password, mobile, q))
if exe:
flash('Profile updated', 'success')
return render_template('user_settings.html', result=result, form=form)
else:
flash('Profile not updated', 'danger')
return render_template('user_settings.html', result=result, form=form)
else:
flash('Unauthorised', 'danger')
return redirect(url_for('login'))
else:
flash('Unauthorised! Please login', 'danger')
return redirect(url_for('login'))
else:
flash('Unauthorised', 'danger')
return redirect(url_for('login'))
class DeveloperForm(Form): #
id = StringField('', [validators.length(min=1)],
render_kw={'placeholder': 'Input a product id...'})
@app.route('/developer', methods=['POST', 'GET'])
def developer():
form = DeveloperForm(request.form)
if request.method == 'POST' and form.validate():
q = form.id.data
curso = mysql.connection.cursor()
result = curso.execute("SELECT * FROM products WHERE id=%s", (q,))
if result > 0:
x = content_based_filtering(q)
wrappered = wrappers(content_based_filtering, q)
execution_time = timeit.timeit(wrappered, number=0)
seconds = ((execution_time / 1000) % 60)
return render_template('developer.html', form=form, x=x, execution_time=seconds)
else:
nothing = 'Nothing found'
return render_template('developer.html', form=form, nothing=nothing)
else:
return render_template('developer.html', form=form)
# Routes to Render Something
@app.route('/')
def homeW():
return render_template("home.html")
@app.route('/about', strict_slashes=False)
def about():
return render_template("about.html")
@app.route('/productos', strict_slashes=False)
def productos():
return render_template("productos.html")
@app.route('/contacto', strict_slashes=False)
def contacto():
return render_template("contacto.html")
if __name__ == '__main__':
app.run(host='0.0.0.0'),
app.run(debug=True)
``` |
{
"source": "josebrwn/python-interview",
"score": 3
} |
#### File: python-interview/project/chunk_file.py
```python
def read_file():
with open('./data/textfile.txt') as f:
print(f)
def read_lines():
with open('./data/textfile.txt') as f:
for line in f:
print(line)
def chunk_file():
from functools import partial
blocks = []
with open('./data/textfile.txt') as f:
for block in iter(partial(f.read, 32), ''):
blocks.append(block.replace('\n', '##'))
print(blocks)
def hash_file():
from functools import partial
h = {}
with open('./data/textfile.txt') as f:
i = 0
for line in f:
i += 1
x, y = line.rstrip('\n').split(",")
h[i] = [int(x), int(y)]
print(h)
def read_csv_file():
import csv
with open('../data/textfile.txt') as f:
csv_file = csv.reader(f, delimiter=',')
birth_years = []
death_years = []
for line in csv_file:
birth = line[0]
death = line[1]
birth_years.append(birth)
death_years.append(death)
print(birth_years)
print(death_years)
def read_json_file():
import json
with open('../data/textfile.json', 'r') as f:
json_file = f.read()
json_dictionary = json.loads(json_file)
print(json_dictionary)
```
#### File: python-interview/project/fibonacci.py
```python
def fib(n):
"""the pythonic approach - consider the list index to begin at 0"""
x, y = 0, 1
for _ in range(n):
# print(_, x, y)
x, y = y, x + y
return x
def fib_recursive(n, memo={}):
"""memoized recursion using a dictionary. consider the index to begin at 1.
Note: some version of python reach a maximum recursion after 1000 calls"""
if n in memo:
return memo[n]
if n <= 2:
result = 1
else:
result = fib_recursive(n-1, memo)+fib_recursive(n-2, memo)
memo[n] = result
return result
val = 10
print(fib(val))
print()
print(fib_recursive(val))
# make a random assertion
assert fib(10) == fib_recursive(10) == 55
```
#### File: python-interview/project/heapsort.py
```python
from random_list import random_list
# this is actually just a selection sort! the trick is to create a max heap
# and then pop off the root successively from 0 to n into a sorted array.
# we can do this by passing the length of the remaining heap as a parameter
# instead of looking at len(array) in max_heapify. that way we only re-heapify
# the unsorted part of the array as we successifly swap the root and nth element.
def build_max_heap(array):
"""reverse sort in place using a max heap"""
for index in reversed(range(0,len(array))):
print(index)
max_heapify(array, index)
def max_heapify(array, index):
left = index + 1 # this is just array sorting - in a heap the left and right children are 2i+1 and 2i+2
right = index + 2
biggest = index
if left < len(array):
if array[index] < array[left]:
biggest = left
if right < len(array):
if array[biggest] < array[right]:
biggest = right
if biggest != index:
array[index], array[biggest] = array[biggest], array[index]
max_heapify(array, biggest)
array = random_list(10)
print(array)
build_max_heap(array)
print(array)
array = random_list(10)
```
#### File: python-interview/project/merge_sort.py
```python
def merge_sort(array):
n = len(array)
if n > 1:
mid = n//2
left = array[0:mid]
right = array[mid:n]
print(mid, left, right, array)
merge_sort(left)
merge_sort(right)
merge(left, right, array, n)
def merge(left, right, array, array_length):
right_length = len(right)
left_length = len(left)
left_index = right_index = 0
for array_index in range(0, array_length):
if right_index == right_length:
array[array_index:array_length] = left[left_index:left_length]
break
elif left_index == left_length:
array[array_index:array_length] = right[right_index:right_length]
break
elif left[left_index] <= right[right_index]:
array[array_index] = left[left_index]
left_index += 1
else:
array[array_index] = right[right_index]
right_index += 1
array = [99,2,3,3,12,4,5]
arr_len = len(array)
merge_sort(array)
print(array)
assert len(array) == arr_len
```
#### File: python-interview/project/nth_linked_list.py
```python
class Node:
def __init__(self, value, child=None):
self.value = value
self.child = child
# The string representation of this node.
def __str__(self):
return str(self.value)
# converts the given linked list into an easy-to-read string format.
def linked_list_to_string(head):
current = head
str_list = []
while current:
str_list.append(str(current.value))
current = current.child
str_list.append('(None)')
return ' -> '.join(str_list)
def nth_from_last(head, n):
"""using two pointers a distance of n apart, locate the nth from end"""
if head is None or type(head) is not Node or n is None or type(n) is not int or n < 0:
print("None")
return None
tail_pointer = head
nth_pointer = head
for _ in range(n):
if tail_pointer is None: # !
return None
tail_pointer = tail_pointer.child
while tail_pointer:
tail_pointer = tail_pointer.child
nth_pointer = nth_pointer.child
print(nth_pointer)
return nth_pointer
def run_tests():
current = Node(1)
for i in range(2, 8):
current = Node(i, current)
head = current
# head = 7 -> 6 -> 5 -> 4 -> 3 -> 2 -> 1 -> (None)
nth_from_last(head, 1) # should return 1.
nth_from_last(head, 5) # should return 5.
current = Node(4)
for i in reversed(range(1, 4)):
current = Node(i, current)
head = current
# head = 1 -> 2 -> 3 -> 4 -> (None)
nth_from_last(head, 2) # should return 3.
nth_from_last(head, 4) # should return 1.
nth_from_last(head, 5) # should return None.
nth_from_last(None, 1) # should return None.
# 5 invalid inputs:
# nth_from_last(head, 1.1)
# nth_from_last(head, -1)
# nth_from_last(None, -1)
# nth_from_last("head", 1)
# nth_from_last(head, "1")
run_tests()
```
#### File: python-interview/project/problems.py
```python
class Solution:
def twoSum(self, array, target):
"""find the list indices that sum to a target"""
results = {}
for index, val in enumerate(array):
complement = target - val
if complement in results:
return [results[complement], index]
else:
results[val] = index
return []
def reverse(self, x) -> int:
"""naive solution using a list.
# TODO: in place pairwise swap (n//2 steps)?
# NOTE: blah blah 32 bit if > 2**31 return 0"""
result = 0
is_negative = (x != abs(x))
if is_negative:
x = abs(x)
max_value = 2**31
else:
max_value = 2**31-1
array = list(str(x))
power = len(array)
for index in range(0, power):
result += (10**(index)*int(array[index]))
if result > max_value:
return 0
if is_negative:
result = - result
return result
def simple_reverse(self, x) -> int:
"""reverse an integer using numeric techniques"""
result = 0
is_negative = (x != abs(x))
if is_negative:
x = abs(x)
max_value = 2**31
else:
max_value = 2**31-1
power = -1
temp = x
while(temp > 0):
temp = temp // 10
power += 1
while x > 0:
result += x % 10 * 10**power
if result > max_value:
return 0
x = x // 10
power -= 1
if is_negative:
result = - result
return result
def string_reverse(self, x) -> int:
"""reverses an integer by converting to string and taking a slice"""
is_negative = (x != abs(x))
if is_negative:
x = abs(x)
max_value = 2**31
else:
max_value = 2**31-1
# extended slice = all values in step order = -1
result = str(x)[::-1]
if int(result) > max_value:
return 0
if is_negative:
result = "-" + result
return result
# these are really bad variable names
def isPalindrome(self, x: int) -> bool:
"""without converting to string. TODO: try successively divide by 10
to get the length instead of using a list"""
assert type(x) == int
if x < 0:
return False
print(x)
loop = 0
digits = []
while x > 0:
digit = (x // 10**loop) % 10
digits.append(digit)
x = x - digit * 10**loop
print(digit, loop, x, digits)
loop += 1
length = len(digits)
for index in range(0, length//2):
if digits[index] != digits[length-1-index]:
return False
return True
def romanToInt(self, s: str) -> int:
# MCMLXIII
roman = ['I', 'V', 'X', 'L', 'C', 'D', 'M']
numerals = [1, 5, 10, 50, 100, 500, 1000]
roman_numerals = dict(zip(roman, numerals))
result = 0
characters = len(s)
skip_next = False
for index, letter in enumerate(list(s)):
if not skip_next:
value = roman_numerals[letter]
if index+1 <= characters-1:
if roman_numerals[s[index+1]] > roman_numerals[s[index]]:
value = roman_numerals[s[index+1]] - value
skip_next = True
result += value
else:
skip_next = False
return result
def longestCommonPrefix(self, strs: [str]) -> str:
if len(strs) == 0:
return ""
lengths = {item: len(item) for item in strs}
result = min(lengths, key=lengths.get)
for size in reversed(range(0, lengths[result])):
for word in strs:
if word[:size+1] != result:
result = result[:size]
break
if word[:size+1] == result:
break
return result
def find_repeated_words(self, s) -> dict:
'''returns any repeated words in a string'''
stop_chars = ',.'
# strip out stop characters
for char in stop_chars:
if char in s:
s = s.replace(char, '')
# set approach
unique = set()
repeated = set()
words = s.split()
# a set will automagically ignore repeated inserts
for word in words:
if word in unique:
repeated.add(word)
unique.add(word)
return repeated
def square_sorted_list(self, l) -> list:
'''takes a sorted int list and returns a sorted list of their squares'''
# since the square of a negative is positive, the squares may not be in order!
if len(l) == 0:
return []
result = []
pointer_left = 0
pointer_right = len(l)-1
while pointer_right - pointer_left >= 0:
# compare left and right items and insert the square of the larger
if abs(l[pointer_left]) >= abs(l[pointer_right]):
result.insert(0, l[pointer_left]**2)
pointer_left += 1
else:
result.insert(0, l[pointer_right]**2)
pointer_right -= 1
return result
def recursive_square_sorted_list(self, l, memo = []):
'''recursively takes a sorted list and returns a sorted list of their squares'''
if len(l) == 0:
return []
# base case
if len(l) == 1:
memo.insert(0, l[0]**2)
return memo
# recursive case
else:
if abs(l[0]) >= abs(l[-1]):
memo.insert(0, l[0]**2)
self.recursive_square_sorted_list(l[1:], memo)
else:
memo.insert(0, l[-1]**2)
self.recursive_square_sorted_list(l[:-1], memo)
return memo
def two_sum(self, A, target):
'''Return the indices of two integers in a list, such that their sum equals a given target.'''
# O(nlog(n))
for idx in range(len(A)-2):
for c in range(idx+1, len(A)-1):
if A[idx] + A[c] == target:
return (idx, c)
return (0, 0)
def two_sum_fast(self, A, target):
'''Return the indices of two integers in a list, such that their sum equals a given target.
Keep a dict of the indices complement of all the visited elements.'''
# O(n)
complement = {}
for idx in range(len(A)):
if A[idx] in complement:
return (complement[A[idx]], idx)
complement[target - A[idx]] = idx # target-val:index
return (0,0)
# nums = [11, 2, 7, 15]
# target = 9
# result = Solution().twoSum(nums, target)
# print(result)
print(Solution().simple_reverse(321))
print(Solution().simple_reverse(-321))
print(Solution().reverse(321))
print(Solution().reverse(-321))
# # -1534236469
# # -2147483412
# assert Solution().reverse(-1534236469) == 0
# assert Solution().reverse(-2147483412) == -2143847412
# print(Solution().isPalindrome(121))
# print(Solution().romanToInt('MCMLXIII')) # 1963
# print(Solution().longestCommonPrefix(["flow","flog","flight"]))
# print(Solution().longestCommonPrefix(["flow","flog","flog"]))
# print(Solution().longestCommonPrefix([]))
s = 'here is a word, and here is another word.'
print(Solution().find_repeated_words(s))
print('square sorted list', Solution().square_sorted_list([-6, -4, 1, 2, 3, 7]))
# print('square sorted list', Solution().square_sorted_list([-6, -4, 1, 2, 3, 7, 9]))
# print('square sorted list', Solution().square_sorted_list([-6, -4]))
# print('square sorted list', Solution().square_sorted_list([-6]))
print('recursive square sorted list', Solution().recursive_square_sorted_list([-6, -4, 1, 2, 3, 7]))
print("two sum", Solution().two_sum([2, 8, 12, 15], 20))
print("two sum fast", Solution().two_sum_fast([2, 8, 12, 15], 20))
```
#### File: python-interview/project/rotate_matrix.py
```python
import numpy as np
def rotateMatrix(d):
loops = int(d/2)
print("loops {}".format(loops))
m = np.arange(0, d**2).reshape(d, d) # some randome input
print(m)
# loop through the matrix layer by layer
rows, cols = m.shape
top = m.copy()[:1, :]
right = m.copy()[:, cols-1:]
bottom = m.copy()[rows-1:, :]
left = m.copy()[:, :1]
# swap everything out, and assign the new values back into the matrix
# when it flips depends on if you are going clockwise or counter clockwise
print("counter clockwise")
# top->left
(r, c) = top.shape
m[:, :1] = np.flip(top.reshape(c, r))
# left->bottom
(r, c) = left.shape
m[rows-1:, :] = left.reshape(c, r)
# bottom->right
(r, c) = bottom.shape
m[:, cols-1:] = np.flip(bottom.reshape(c, r))
# right->top
(r, c) = right.shape
m[:1, :] = right.reshape(c, r)
# print ("\n{}\n\n{}\n\n{}\n\n{}\n\n{}\n".format(top,left,bottom,right,m))
print("\n{}\n".format(m))
print ("clockwise")
# top->right
(r,c) = top.shape
m[:,cols-1:] = top.reshape(c,r)
# left-> top
(r,c) = left.shape
m[:1,:] = np.flip(left.reshape(c,r))
# bottom->left
(r,c) = bottom.shape
m[:,:1] = bottom.reshape(c,r)
# right->bottom
(r,c) = right.shape
m[rows-1:,:] = np.flip(right.reshape(c,r))
print ("\n{}\n".format(m))
rotateMatrix(9)
```
#### File: python-interview/project/validate_ip.py
```python
def validate_ip(ip_address):
"""An IP address consists of 32 bits, shown as 4 terms
of numbers from 0-255 represented in decimal form """
terms = ip_address.split(".")
if len(terms) != 4:
return False
for octet in range(0,4):
if not terms[octet].isdecimal():
return False
elif (int(terms[octet]) < 0 or int(terms[octet]) > 255):
return False
return True
print(validate_ip('127.0.0.1'))
print(validate_ip('127.0.0.1.4'))
print(validate_ip('127.b.0.1'))
assert validate_ip('127.0.0.1') == True
assert validate_ip('127.0.0.256') == False
assert validate_ip('127.0.a.1') == False
# filter function BAD
def filter_octects(ip_address):
"""functional approach filters the list"""
terms = ip_address.split(".")
if not len([*filter(lambda octet: octet.isdecimal(), terms)])==4:
return False
elif not len([*filter(lambda octet: 0<=int(octet)<=255, terms)])==4:
return False
else:
return True
print(filter_octects('127.0.0.1'))
assert filter_octects('127.0.0.1') == True
assert filter_octects('127.0.0.256') == False
assert filter_octects('127.0.a.1') == False
# list comprehension (best!)
def all_valid(ip_address):
"""
`all` uses list comprehension to filter
https://docs.python.org/3/library/functions.html#all
"""
terms = ip_address.split(".")
if not all(octet.isdecimal() for octet in terms):
return False
elif not all(0 <= int(octet) <= 255 for octet in terms):
return False
else:
return True
print(all_valid('127.0.0.1'))
assert all_valid('127.0.0.1') == True
assert all_valid('127.0.0.256') == False
assert all_valid('127.0.a.1') == False
```
#### File: josebrwn/python-interview/test_um_nose.py
```python
from project.unnecessary_math import multiply
def test_numbers_3_4():
assert multiply(3,4) == 12
def test_strings_a_3():
assert multiply('a',3) == 'aaa'
test_numbers_3_4()
test_strings_a_3()
``` |
{
"source": "josebsalazar/RTlive",
"score": 3
} |
#### File: RTlive/rtlive/assumptions.py
```python
import logging
import numpy
import os
import pandas
import pathlib
import requests
import scipy.stats
import tarfile
_log = logging.getLogger(__file__)
_DP_DATA = pathlib.Path(pathlib.Path(__file__).parent.parent, "data")
if not _DP_DATA.exists():
_log.warning("Data directory at %s does not exist yet. Creating...")
_DP_DATA.mkdir()
__all__ = [
"delay_distribution",
"generation_time",
]
def _download_patient_data(file_path=None):
""" Downloads patient data to data directory
from: https://stackoverflow.com/questions/16694907/ """
if not file_path:
file_path = pathlib.Path(os.path.join(os.path.dirname(__file__), r"..\data\patients.tar.gz"))
url = "https://github.com/beoutbreakprepared/nCoV2019/raw/master/latest_data/latestdata.tar.gz"
with requests.get(url, stream=True) as r:
r.raise_for_status()
with open(file_path, "wb") as f:
for chunk in r.iter_content(chunk_size=8192):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
def _read_patient_data(file_path=None, max_delay=60) -> pandas.DataFrame:
""" Finds every valid delay between symptom onset and report confirmation
from the patient line list and returns all the delay samples. """
if not file_path:
file_path = pathlib.Path(os.path.join(os.path.dirname(__file__), r"..\data\patients.tar.gz"))
if not file_path.exists():
_download_patient_data()
_log.info("Reading patient data")
with tarfile.open(file_path, "r:*") as tar:
csv_path = tar.getnames()[0]
patients = pandas.read_csv(
tar.extractfile(csv_path),
parse_dates=False,
usecols=["country", "date_onset_symptoms", "date_confirmation"],
low_memory=False,
)
patients.columns = ["Country", "Onset", "Confirmed"]
patients.Country = patients.Country.astype("category")
# There's an errant reversed date
patients = patients.replace("01.31.2020", "31.01.2020")
patients = patients.replace("31.04.2020", "01.05.2020")
# Only keep if both values are present
patients = patients.dropna()
# Must have strings that look like individual dates
# "2020.03.09" is 10 chars long
is_ten_char = lambda x: x.str.len().eq(10)
patients = patients[is_ten_char(patients.Confirmed) & is_ten_char(patients.Onset)]
# Convert both to datetimes
patients.Confirmed = pandas.to_datetime(
patients.Confirmed, format="%d.%m.%Y", errors="coerce"
)
patients.Onset = pandas.to_datetime(patients.Onset, format="%d.%m.%Y", errors="coerce")
# Only keep records where confirmed > onset
patients = patients[patients.Confirmed > patients.Onset]
# Mexico has many cases that are all confirmed on the same day regardless
# of onset date, so we filter it out.
patients = patients[patients.Country.ne("Mexico")]
# Remove any onset dates from the last two weeks to account for all the
# people who haven't been confirmed yet.
patients = patients[patients.Onset < patients.Onset.max() - pandas.Timedelta(days=14)]
return patients
def _extract_test_delays_from_patient_data(file_path=None, max_delay=60):
patients = _read_patient_data(file_path=file_path, max_delay=max_delay)
delays = (patients.Confirmed - patients.Onset).dt.days
delays = delays.reset_index(drop=True)
delays = delays[delays.le(max_delay)]
return delays
def delay_distribution(incubation_days=5) -> numpy.ndarray:
""" Returns the empirical delay distribution between symptom onset and confirmed positive case.
Parameters
----------
incubation_days : int
number of days with 0 probability of confirmation, to insert at the beginning
Returns
-------
p_delay : numpy.ndarray
distribution that describes the probability of positive tests since the day of infection
"""
# The literature suggests roughly 5 days of incubation before becoming
# having symptoms. See:
# https://www.ncbi.nlm.nih.gov/pmc/articles/PMC7081172/
p_delay_path = pathlib.Path(_DP_DATA, "p_delay.csv")
if p_delay_path.exists():
_log.info("Loading precomputed p_delay distribution from %s", p_delay_path)
p_delay = pandas.read_csv(p_delay_path, squeeze=True)
else:
_log.info("Precomputing testing delay distribution from patient data")
delays = _extract_test_delays_from_patient_data()
p_delay = delays.value_counts().sort_index()
new_range = numpy.arange(0, p_delay.index.max() + 1)
p_delay = p_delay.reindex(new_range, fill_value=0)
p_delay /= p_delay.sum()
p_delay = (
pandas.Series(numpy.zeros(incubation_days))
.append(p_delay, ignore_index=True)
.rename("p_delay")
)
p_delay.to_csv(pathlib.Path(_DP_DATA, "p_delay.csv"), index=False)
return p_delay.values
def generation_time(n_days: int=20) -> numpy.ndarray:
""" Create a discrete P(Generation Interval)
Source: https://www.ijidonline.com/article/S1201-9712(20)30119-3/pdf
Parameters
----------
n_days : int
number of days to consider (cuts the tail)
Returns
-------
p_generation_time : numpy.ndarray
distribution that describes the probability of causing a secondary infection
by days since the primary infection
"""
mean_si = 4.7
std_si = 2.9
mu_si = numpy.log(mean_si ** 2 / numpy.sqrt(std_si ** 2 + mean_si ** 2))
sigma_si = numpy.sqrt(numpy.log(std_si ** 2 / mean_si ** 2 + 1))
dist = scipy.stats.lognorm(scale=numpy.exp(mu_si), s=sigma_si)
# Discretize the Generation Interval up to 20 days max
g_range = numpy.arange(0, n_days)
gt = pandas.Series(dist.cdf(g_range), index=g_range)
gt = gt.diff().fillna(0)
gt /= gt.sum()
gt = gt.values
return gt
```
#### File: RTlive/rtlive/data.py
```python
import dataclasses
import datetime
import enum
import iso3166
import logging
import numpy
import typing
import pandas
from . import preprocessing
_log = logging.getLogger(__file__)
LoadFunction = typing.Callable[
[pandas.Timestamp],
pandas.DataFrame
]
ProcessFunction = typing.Callable[
[pandas.DataFrame],
typing.Tuple[
# input: result of LoadFunction
pandas.DataFrame,
# output: dictionary of forecasting results for all regions
typing.Dict[str, preprocessing.ForecastingResult]
]
]
class Zone(enum.Enum):
Asia = "Asia"
Europe = "Europe"
America = "America"
@dataclasses.dataclass
class SupportedCountry:
alpha2: str
compute_zone: Zone
region_name: typing.Dict[str, str]
region_short_name: typing.Dict[str, str]
region_population: typing.Dict[str, int]
fn_load: LoadFunction
fn_process: ProcessFunction
SUPPORTED_COUNTRIES: typing.Dict[str, SupportedCountry] = {}
def set_country_support(
country_alpha2: str,
*,
compute_zone: Zone,
region_name: typing.Dict[str, str],
region_short_name: typing.Optional[typing.Dict[str, str]]=None,
region_population: typing.Dict[str, int],
fn_load: LoadFunction,
fn_process: ProcessFunction,
):
""" Function to set support for a country.
Parameters
----------
country_alpha2 : str
ISO-3166 alpha-2 short code of the country (key in SUPPORTED_COUNTRIES dict)
compute_zone : Zone
used to group countries by timezone for automated computing
region_name : dict
dictionary of { region_code : str }
to map machine-readable region codes to human-readable names
region_short_name : optional, dict
dictionary of { region_code : str }
to map machine-readable region codes to short human-readable names (falls back to [region_name])
region_population : dict
dictionary of { region_code : int }
to map machine-readable region codes to number of inhabitants
fn_load : callable
A function that takes one date argument `run_date` and returns a DataFrame
indexed by ["region", "date"] with columns ["new_cases", "new_tests"].
Use NaN to indicate missing data (e.g. in new_tests).
Ideally the function should return data "as it was on `run_date`", meaning that information
such as corrections that became available after `run_date` should not be taken into account.
This is important to realistically back-test how the model would have performed at `run_date`.
fn_process : callable
A processing function that takes the ["region", "date"]-indexed DataFrame
returned by the load function as the input.
The return value must be a dictionary (by region code) of forecasting results.
"""
if country_alpha2 not in iso3166.countries_by_alpha2:
raise KeyError(f"Unknown ISO-3166 alpha 2 country code '{country_alpha2}'.")
if not region_short_name:
# default to region codes
region_short_name = { rc : rc for rc in region_name }
# register loading functions
SUPPORTED_COUNTRIES[country_alpha2] = SupportedCountry(
country_alpha2,
compute_zone,
region_name,
region_short_name,
region_population,
fn_load,
fn_process,
)
return
def get_data(country: str, run_date: pandas.Timestamp) -> pandas.DataFrame:
""" Retrieves data for a country using the registered data loader method.
Parameters
----------
country : str
ISO-3166 alpha-2 short code of the country (key in SUPPORTED_COUNTRIES dict)
run_date : pandas.Timestamp
date when the analysis is performed
Returns
-------
model_input : pandas.DataFrame
Data as returned by data loader function.
"""
country = country.upper()
if country not in SUPPORTED_COUNTRIES:
raise KeyError(
f"The country '{country}' is not in the collection of supported countries."
)
scountry = SUPPORTED_COUNTRIES[country]
result = scountry.fn_load(run_date)
assert isinstance(result, pandas.DataFrame)
assert result.index.names == ("region", "date"), f"Index names were: {result.index.names}"
missing_names = set(result.reset_index().region) - set(scountry.region_name.keys())
missing_pop = set(result.reset_index().region) - set(
scountry.region_population.keys()
)
if missing_names:
raise Exception(
f"Data contains regions {missing_names} for which no names were registered."
)
if missing_pop:
raise Exception(
f"Data contains regions {missing_pop} for which no population were registered."
)
assert "new_cases" in result.columns, f"Columns were: {result.columns}"
assert "new_tests" in result.columns, f"Columns were: {result.columns}"
for col in ["new_cases", "new_tests", "new_deaths"]:
if col in result and any(result[col] < 0):
_log.warning(
f"Column '%s' has %i negative entries!! Overriding with NaN...",
col,
sum(result[col] < 0),
)
result.loc[result[col] < 0, col] = numpy.nan
return result
def _insert_future(df_raw: pandas.DataFrame, *, future_days: int):
""" Inserts new rows for dates that go beyond what's already in the index.
Parameters
----------
df_raw : pandas.DataFrame
a [region, date]-indexed dataframe
future_days : int
number of days to append after the last date (in every region)
Returns
-------
df : pandas.DataFrame
a new DataFrame that has rows of NaN for the new dates
"""
_log.info("Inserting %i future_days.", future_days)
dfs_with_future = []
regions = df_raw.reset_index().region.unique()
for region in regions:
latest = df_raw.xs(region).index[-1]
df_region = df_raw.xs(region)
new_index = pandas.date_range(
df_region.index[0],
latest + datetime.timedelta(days=future_days),
freq="D",
name="date"
)
dfs_with_future.append(df_region.reindex(new_index, fill_value=numpy.nan))
return pandas.concat(dfs_with_future, keys=regions, names=["region", "date"])
def process_testcounts(
country: str, df_raw: pandas.DataFrame,
future_days: int=0,
) -> typing.Tuple[pandas.DataFrame, typing.Dict[str, preprocessing.ForecastingResult]]:
""" Fills and forecasts test counts with country-specific logic.
Parameters
----------
country : str
ISO-3166 alpha-2 short code of the country (key in FORECASTERS dict)
df_raw : pandas.DataFrame
Data as returned by data loader function.
future_days : int
Number of days to append after the last date (in every region).
Can be used to predict testcounts for days that are not yet covered by any data.
Returns
-------
df_result : pandas.DataFrame
Input dataframe with a new column "predicted_new_tests"
forecasting_results : dict
the fbprophet results by region
"""
country = country.upper()
if country not in SUPPORTED_COUNTRIES:
raise KeyError(
f"The country '{country}' is not in the collection of supported countries."
)
# insert date index for prediction into the future
if future_days > 0:
df_raw = _insert_future(df_raw, future_days=future_days)
# make testcount forecast
df, results = SUPPORTED_COUNTRIES[country].fn_process(df_raw.copy())
assert isinstance(df, pandas.DataFrame)
assert df.index.names == ("region", "date")
assert "predicted_new_tests" in df.columns, f"Columns were: {df.columns}"
return df, results
def iter_countries_by_zone() -> typing.Iterator[
typing.Tuple[Zone, typing.List[SupportedCountry]]
]:
""" Iterates over supported countries, grouped by Zone.
Yields
------
zone : Zone
the compute zone
countries_in_zone : list
the supported countries in the respective zone
"""
for zone in Zone:
countries_in_zone = [
country
for _, country in SUPPORTED_COUNTRIES.items()
if country.compute_zone == zone
]
yield zone, countries_in_zone
return
```
#### File: rtlive/sources/data_be.py
```python
import logging
import pandas
import datetime
import requests
import io
from typing import Dict, Tuple, Union
from .. import preprocessing
_log = logging.getLogger(__file__)
# From https://en.wikipedia.org/wiki/Provinces_of_Belgium
BE_REGION_NAMES = {
'all': 'Belgium',
'FLA': 'Vlaanderen',
'WAL': 'Wallonie',
'BRU': 'Brussel',
'ANT': 'Antwerpen',
'LIM': 'Limburg',
'EFL': 'Oost-Vlaanderen',
'FBR': 'Vlaams-Brabant',
'WFL': 'West-Vlaanderen',
'HAI': 'Hainaut',
'LIE': 'Liège',
'LUX': 'Luxembourg',
'NAM': 'Namur',
'WBR': 'Brabant wallon',
}
# Province and region codes
# [ISO 3166-2:BE](https://en.wikipedia.org/wiki/ISO_3166-2:BE#Provinces) has no english codes
# Mapping of the keys in columns 'REGION' and 'PROVINCE' in the input file to a short code.
BE_REGION_INPUT_ABBR = {
'all': 'all',
'Flanders': 'FLA',
'Wallonia': 'WAL',
'Brussels': 'BRU',
'Antwerpen': 'ANT',
'Limburg': 'LIM',
'OostVlaanderen': 'EFL',
'VlaamsBrabant': 'FBR',
'WestVlaanderen': 'WFL',
'Hainaut': 'HAI',
'Liège': 'LIE',
'Luxembourg': 'LUX',
'Namur': 'NAM',
'BrabantWallon': 'WBR',
}
BE_REGION_CODES = {
v : k
for k, v in BE_REGION_NAMES.items()
}
# Source: https://www.ibz.rrn.fgov.be/fileadmin/user_upload/fr/pop/statistiques/population-bevolking-20200101.pdf
BE_REGION_POPULATION = {
'all': 11_476_279, # Belgium
'FLA': 6_623_505,
'WAL': 3_641_748,
'BRU': 1_211_026,
'ANT': 1_867_366,
'LIM': 876_785,
'EFL': 1_524_077,
'FBR': 1_155_148,
'WFL': 1_200_129,
'HAI': 1_345_270,
'LIE': 1_108_481,
'LUX': 286_571,
'NAM': 495_474,
'WBR': 405_952
}
def get_data_BE(run_date: pandas.Timestamp) -> pandas.DataFrame:
"""
Retrieve daily (run_date) regions and append national data (key 'all') to it
Parameters
----------
run_date : pandas.Timestamp
date for which the data shall be downloaded
Returns
-------
df : pandas.DataFrame
table with columns as required by rtlive/data.py API
"""
def redistribute(group: pandas.DataFrame, col: str) -> pandas.Series:
gdata = group.groupby('REGION')[col].sum()
gdata.loc['Brussels'] += gdata.loc['Nan'] * (gdata.loc['Brussels']/(gdata.loc['Brussels'] + gdata.loc['Flanders'] + gdata.loc['Wallonia']))
gdata.loc['Flanders'] += gdata.loc['Nan'] * (gdata.loc['Flanders']/(gdata.loc['Brussels'] + gdata.loc['Flanders'] + gdata.loc['Wallonia']))
gdata.loc['Wallonia'] += gdata.loc['Nan'] * (gdata.loc['Wallonia']/(gdata.loc['Brussels'] + gdata.loc['Flanders'] + gdata.loc['Wallonia']))
gdata.drop(index='Nan', inplace=True)
gdata = gdata.fillna(0).round(0).astype(int)
return gdata
if run_date.date() > datetime.date.today():
raise ValueError('Run date is in the future. Nice try.')
if run_date.date() < datetime.date.today():
# TODO: implement downloading of historic data
raise NotImplementedError(
'Downloading with a run_date is not yet supported. '
f'Today: {datetime.date.today()}, run_date: {run_date}'
)
# Download data from Sciensano
content = requests.get('https://epistat.sciensano.be/Data/COVID19BE_tests.csv', verify=False,).content
df_tests = pandas.read_csv(
io.StringIO(content.decode('utf-8')),
sep=',',
parse_dates=['DATE'],
usecols=['DATE', 'REGION', 'PROVINCE', 'TESTS_ALL_POS', 'TESTS_ALL']
).rename(columns={
'DATE': 'date'
})
# Reformat data into Rtlive.de format at country level all
df_tests_per_all_day = (df_tests
.assign(region='all')
.groupby('date', as_index=True)
.agg(new_cases=('TESTS_ALL_POS', 'sum'), new_tests=('TESTS_ALL', 'sum'), region=('region', 'first'))
)
df_tests_per_all_day = (df_tests_per_all_day
.reset_index()
.set_index(['region', "date"])
.sort_index()
)
# Redistribute the nan for the column TESTS_ALL_POS for regions Flanders, Wallonia and Brussels
df_tests_positive = (df_tests
.fillna('Nan')
.groupby(['date'])
.apply(redistribute, 'TESTS_ALL_POS')
.stack()
.reset_index()
.rename(columns={'REGION':'region', 0:'new_cases'})
)
# Redistribute the nan for the column TESTS_ALL for regions Flanders, Wallonia and Brussels
df_tests_all = (df_tests
.fillna('Nan')
.groupby(['date'])
.apply(redistribute, 'TESTS_ALL')
.stack()
.reset_index()
.rename(columns={'REGION':'region', 0:'new_tests'})
)
# Combine the total number of tests and the number of positive tests into a basetable
df_tests_per_region_day = pandas.concat([df_tests_all, df_tests_positive['new_cases']], axis=1).set_index(['region', 'date'])
# Test per province (Ignore the nan's for the moment)
df_tests_per_province_day = (df_tests[df_tests['REGION'] != 'Brussels']
.groupby(['PROVINCE', 'date'], as_index=False)
.agg(new_cases=('TESTS_ALL_POS', 'sum'), new_tests=('TESTS_ALL', 'sum'))
.rename(columns={'PROVINCE':'region'})
.set_index(['region', 'date'])
)
df_tests_per_province_day.index.name = ('region', 'date')
# Combine the results at country level with region level
data = pandas.concat([df_tests_per_all_day, df_tests_per_region_day, df_tests_per_province_day], axis=0).sort_index()
data.index = data.index.set_levels(data.index.levels[0].map(BE_REGION_INPUT_ABBR.get), 'region')
assert isinstance(data, pandas.DataFrame)
assert data.index.names == ('region', 'date')
assert 'new_cases' in data.columns, f'Columns were: {data.columns}'
assert 'new_tests' in data.columns, f'Columns were: {data.columns}'
for col in ['new_cases', 'new_tests']:
if any(data[col] < 0):
_log.warning(
f'Column {col} has {sum(data[col] < 0)} negative entries!! Overriding with NaN...'
)
data.loc[data[col] < 0, col] = numpy.nan
return data
def forecast_BE(df: pandas.DataFrame) -> Tuple[pandas.DataFrame, dict]:
"""
Applies test count interpolation/extrapolation to Belgium data.
Parameters
----------
df : pandas.DataFrame
Data as returned by data loader function.
Returns
-------
df : pandas.DataFrame
Input dataframe with a new column "predicted_new_tests" and an index expanded back to
01/01/2020 (filled with zeros until 13/05/2020) to account for the absence of tests in this
period.
results : dict
The fbprophet results by region
"""
# forecast with existing data
df["predicted_new_tests"], results = preprocessing.predict_testcounts_all_regions(
df, "BE"
)
# interpolate the initial testing ramp-up to account for missing data
df_list = []
for region in df.index.get_level_values(level='region').unique():
df_region = df.xs(region).copy()
df_complement = pandas.DataFrame(
index=pandas.date_range(
start='2020-01-01',
end=df_region.index.get_level_values(level='date')[0]
- pandas.DateOffset(1, 'D'),
freq="D",
),
columns=df_region.columns,
)
df_complement['predicted_new_tests'] = 0
df_region = df_complement.append(df_region)
df_region.index.name = 'date'
df_region.predicted_new_tests = df_region.predicted_new_tests.interpolate(
"linear"
)
df_region['region'] = region
df_list.append(df_region.reset_index().set_index(['region', 'date']))
return pandas.concat(df_list), results
from .. import data
data.set_country_support(
country_alpha2='BE',
compute_zone=data.Zone.Europe,
region_name=BE_REGION_NAMES,
region_population=BE_REGION_POPULATION,
fn_load=get_data_BE,
fn_process=forecast_BE,
)
``` |
{
"source": "jose-caballero/oasis-server",
"score": 3
} |
#### File: oasis-server/oasispackage/oasisexcpetions.py
```python
class ConfigurationFailure(Exception):
"""
config file can not be read
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
```
#### File: plugins/distribution/cvmfs.py
```python
import commands
import datetime
import logging
import os
import shutil
import subprocess
import time
#
# base class for cvmfsXY plugins
#
from oasispackage.interfaces import BaseDistribution
class cvmfs(BaseDistribution):
def __init__(self, project):
super(cvmfs, self).__init__(project)
self.src = '%s/%s' %(self.project.repository_src_dir, self.project.project_src_dir)
self.dest = '/cvmfs/%s/%s' %(self.project.repository_dest_dir, self.project.project_dest_dir)
def checkrepository(self):
check = os.path.isdir('/cvmfs/%s' %self.project.repositoryname)
self.log.info('repository /cvmfs/%s exists = %s' %(self.project.repositoryname, check))
return check
def checkproject(self):
check = os.path.isdir('/cvmfs/%s' %self.project.projectname)
self.log.info('project /cvmfs/%s exists = %s' %(self.project.projectname, check))
return check
def synchronize_back(self):
"""
ensure the user scratch area has a perfect copy of what
is currently in the final destination area
"""
self.log.debug('Starting.')
# FIXME temporary solution ??
# maybe it should be implemented in the distribution plugin?
# for example, to allow easier sync from remote host
#
cmd = 'rsync --stats -a --delete %s/ %s/' %(self.dest, self.src)
self.log.debug('synchronization cmd = %s' %cmd)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
(out, err) = p.communicate()
rc = p.returncode
self.log.debug('Output of synchronization cmd = %s' %out)
self.log.debug('Leaving with RC=%s' %rc)
return rc
```
#### File: oasispackage/probes/filesize.py
```python
from oasispackage.interfaces import BaseProbe
import commands
import getopt
import sys
class filesize(BaseProbe):
def __init__(self, options):
super(filesize, self).__init__(options)
def run(self):
cmd = 'find %s -size +1G -type f -exec ls -lh {} \;' %self.rootdir
out = commands.getoutput(cmd)
if out == '':
# FIXME for the time being, it is just a print
print 'Probe passed OK. Output of cmd "%s" was\n %s' %(cmd, out)
return 0
else:
# FIXME for the time being, it is just a print
print 'Probe failed. Output of cmd "%s" was\n %s' %(cmd, out)
return 1
if __name__ == '__main__':
probe = filesize(sys.argv[1:])
rc = probe.run()
sys.exit(rc)
```
#### File: oasispackage/probes/no.py
```python
from oasispackage.interfaces import BaseProbe
import sys
class no(BaseProbe):
"""
Fake probe, just for testing purpopses.
Always return 1
"""
def __init__(self, options):
super(no, self).__init__(options)
def run(self):
return 1
if __name__ == '__main__':
probe = no(sys.argv[1:])
rc = probe.run()
sys.exit(rc)
```
#### File: oasispackage/probes/numberfiles.py
```python
from oasispackage.interfaces import BaseProbe
import sys
class numberfiles(BaseProbe):
def __init__(self, options):
super(numberfiles, self).__init__(options)
def run(self):
# ?? Maybe some min and max limits ??
# ?? Maybe insert .cvmfscatalogsdir if needed ??
return 0
if __name__ == '__main__':
probe = numberfiles(sys.argv[1:])
rc = probe.run()
sys.exit(rc)
```
#### File: oasispackage/probes/quota.py
```python
from oasispackage.interfaces import BaseProbe
import sys
class quota(BaseProbe):
def __init__(self, options):
super(quota, self).__init__(options)
opts, args = getopt.getopt(self.options, '', ['limit='])
for o, a in opts:
if o == '--limit':
self.limit = int(a)
def run(self):
# ?? maybe use df instead of du ??
cmd = 'du -s %s | awk \'{print $1}\'' %self.rootdir
out = commands.getoutput(cmd)
out = int(out)
if out < self.limit:
# FIXME for the time being, it is just a print
print 'Probe passed OK. Quota limit is %s and used space is %s' %(self.limit, out )
return 0
else:
# FIXME for the time being, it is just a print
print 'Probe failed. Quota limit is %s and used space is %s' %(self.limit, out )
return 1
if __name__ == '__main__':
probe = quota(sys.argv[1:])
rc = probe.run()
sys.exit(rc)
```
#### File: oasispackage/probes/readable.py
```python
from oasispackage.interfaces import BaseProbe
import sys
class readable(BaseProbe):
def __init__(self, options):
super(readable, self).__init__(options)
def run(self):
return 0
if __name__ == '__main__':
probe = readable(sys.argv[1:])
rc = probe.run()
sys.exit(rc)
``` |
{
"source": "JoseCaliz/XOR",
"score": 3
} |
#### File: src/data/make_xor_problem_dataset.py
```python
import click
from time import time
import numpy as np
from datetime import timedelta
import logging
import csv
import random
from pathlib import Path
@click.command()
@click.option('-s', '--size', required=True, default=100_000)
@click.option('--variable_length/--no_variable_length', default=False)
@click.argument('output_filepath', type=click.Path())
def main(size, variable_length, output_filepath):
""" Randomly generates a sample of `size` binary strings than are
cleaned data ready to be analyzed (saved in ../processed).
"""
logger = logging.getLogger(__name__)
start_time = time()
logger.info('Starting Generation')
end_time = time()
generated_strings = list()
possible_length_values = [50] if not(variable_length) else range(1, 50)
np.random.seed(0)
for _ in range(size):
length = np.random.choice(possible_length_values)
generated_strings.append(
format(random.getrandbits(length), 'b')
)
with open(output_filepath, 'w') as file:
writer = csv.writer(file)
for row in generated_strings:
if not(variable_length):
writer.writerow([row, row.count('1') % 2])
else:
writer.writerow([row.rjust(50, "0"), row.count('1') % 2])
execution_time = str(timedelta(seconds=end_time - start_time))
logger.info(f'Ending Generation. Time taken: {execution_time}')
if __name__ == '__main__':
log_fmt = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
logging.basicConfig(level=logging.INFO, format=log_fmt)
# not used in this stub but often useful for finding various files
project_dir = Path(__file__).resolve().parents[2]
main()
``` |
{
"source": "Josecalm/seguridadgdl",
"score": 2
} |
#### File: seguridadgdl/app/routes.py
```python
from flask import render_template, flash, redirect, url_for, request
from app import app, db
from app.forms import LoginForm, RegistrationForm, CreateReportForm, EditProfileForm, EditStatusReport
from flask_login import current_user, login_user, logout_user, login_required
from app.models import Person, User, CrimeList, HourList, \
ReferenceInfoList, Report, SexList, Zone
from datetime import date
from app.fuzzyLogic import generate_fuzzy_values
import json
@app.route('/')
@app.route('/index')
def index():
reports = Report.query.all()
zone_count = Zone.query.count()
crime_list_count = CrimeList.query.count()
zone_arr = []
crime_arr = []
for i in range(crime_list_count + 1):
crime_arr.append(0)
for i in range(zone_count):
zone_arr.append(list(crime_arr))
# TODO: Add conditional to only count accepted reports in array
for r in reports:
zone_arr[int(r.zone)-1][int(r.crime_id)-2] += 1
for z in zone_arr:
z[crime_list_count] = generate_fuzzy_values(z[0], z[1],
z[2], z[3])
with open('app/static/js/zones.js', 'w') as f:
f.write('var zone_data_json = ')
json.dump({'zones': zone_arr}, f)
f.write(";")
return render_template('index.html', title='Inicio', active='maps')
@app.route('/login', methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.username.data).first()
if user is None or not user.check_password(form.password.data):
flash('Nombre de usuario o contrasena no validos!')
return redirect(url_for('login'))
login_user(user)
if user.is_admin:
return redirect(url_for('admin_reports'))
return redirect(url_for('index'))
return render_template('login.html', title='Ingresar', form=form)
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('index'))
@app.route('/register', methods=['GET', 'POST'])
def register():
if current_user.is_authenticated:
return redirect(url_for('index'))
form = RegistrationForm()
if form.validate_on_submit():
user = User(username=form.username.data, email=form.email.data)
user.set_password(form.password.data)
db.session.add(user)
db.session.commit()
person = Person(name=form.name.data, user_id=user.id, sex_id=form.sex.data, birthdate=form.birthdate.data)
db.session.add(person)
db.session.commit()
flash('El usuario ha sido registrado exitosamente!')
return redirect(url_for('login'))
return render_template('register.html', form=form)
@app.route('/create_report', methods=['GET', 'POST'])
@login_required
def create_report():
form = CreateReportForm()
if form.validate_on_submit():
lat = float(form.coordinates_lat.data)
lng = float(form.coordinates_lng.data)
zone = int(form.zone.data)
report = Report(user_id=current_user.id, date=form.date.data, crime_id=form.crime.data,
crime_hour_id=form.hour.data, latitude=lat, longitude=lng, zone=zone, details=form.details.data, reference_id=form.reference.data)
db.session.add(report)
db.session.commit()
return redirect(url_for('user_reports'))
return render_template('create_report.html', title='Crear Reporte', active='add_report',
form=form)
@app.route('/user_reports', methods=['GET', 'POST'])
@login_required
def user_reports():
reports = Report.query.filter_by(user_id=current_user.id)
crimes = ['n/a', 'n/a', 'Asalto', 'Homicidio', 'Violación', 'Posesión Armas/Drogas']
statuses = ['n/a', 'Nuevo', 'En revision', 'Aceptado', 'Rechazado']
return render_template('user_reports.html', title='Mis reportes', active='user_reports', statuses=statuses, reports=reports, crimes=crimes)
@app.route('/user/<username>')
@login_required
def user(username):
user = User.query.filter_by(id=current_user.id).first_or_404()
person = Person.query.filter_by(user_id=current_user.id).first_or_404()
sex = SexList.query.filter_by(id=person.sex_id).first()
return render_template('profile.html', title='Perfil de usuario', active='user_profile',
person=person, user=user, sex=sex.description)
@app.route('/edit_profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm()
person = Person.query.filter_by(user_id = current_user.id).first_or_404()
if form.validate_on_submit():
current_user.username = form.username.data
person.name = form.name.data
person.sex_id = form.sex.data
db.session.commit()
flash('Tus cambios han sido guardados.')
return redirect(url_for( 'user', username=current_user.username ) )
elif request.method == 'GET':
form.username.data = current_user.username
form.name.data = person.name
return render_template('edit_profile.html', title='Editar Perfil',
form=form)
@app.route('/admin_reports', methods=['GET', 'POST'])
@login_required
def admin_reports():
form = EditStatusReport()
reports = Report.query.all()
crimes = ['n/a', 'n/a', 'Asalto', 'Homicidio', 'Violación', 'Posesión Armas/Drogas']
statuses = ['n/a', 'Nuevo', 'En revision', 'Aceptado', 'Rechazado']
if form.validate_on_submit():
report = Report.query.get(form.report_id.data)
report.status_id = form.status.data
report.status_details = form.status_details.data
db.session.commit()
return redirect(url_for('admin_reports'))
return render_template('reports.html', title='Operador', active='admin_reports', reports=reports, statuses=statuses, crimes=crimes, form=form)
```
#### File: seguridadgdl/app/sistemaFuzzy.py
```python
from app.funcionesMembresia import *
conjAsaltoRobo = ["Baja", "Media", "Alta"]
conjHomic = ["Baja", "Media", "Alta"]
conjSecVioAcos = ["Baja", "Media", "Alta"]
conjDrogArmas = ["Baja", "Media", "Alta"]
conjInseguridad = ["Baja", "Media", "Alta", "Critica"]
nivsMemAsaltoRobo = [0.0, 0.0, 0.0]
nivsMemHomic = [0.0, 0.0, 0.0]
nivsMemSecVioAcos = [0.0, 0.0, 0.0]
nivsMemDrogArmas = [0.0, 0.0, 0.0]
nivsMemInseguridad = [0.0, 0.0, 0.0, 0.0]
def posNivMemMayor(nivelesMem):
posMayor = 0
mayor = 0.0
for i in range(len(nivelesMem)):
if nivelesMem[i] > mayor:
mayor = nivelesMem[i]
posMayor = i
return posMayor
def prodMembsAsaltoRobo(valorAsRob):
nivsMemAsaltoRobo[0] = curva_z(valorAsRob, 10, 30)
nivsMemAsaltoRobo[1] = triangular_suave(valorAsRob, 15, 50, 75)
nivsMemAsaltoRobo[2] = curva_s(valorAsRob, 60, 80)
def prodMembsHomic(valorHomic):
nivsMemHomic[0] = curva_z(valorHomic, 2, 4)
nivsMemHomic[1] = triangular_suave(valorHomic, 2, 5, 8)
nivsMemHomic[2] = curva_s(valorHomic, 6, 9)
print(nivsMemHomic)
def prodMembsSecVioAcos(valorSeViAc):
nivsMemSecVioAcos[0] = curva_z(valorSeViAc, 2, 5)
nivsMemSecVioAcos[1] = triangular_suave(valorSeViAc, 3, 6, 9)
nivsMemSecVioAcos[2] = curva_s(valorSeViAc, 6, 8)
def prodMembsDrogArmas(valorDroArm):
nivsMemDrogArmas[0] = curva_z(valorDroArm, 10, 20)
nivsMemDrogArmas[1] = triangular_suave(valorDroArm, 10, 25, 45)
nivsMemDrogArmas[2] = curva_s(valorDroArm, 30, 40)
def fuzzyAsaltoRobo(valorAsRob):
prodMembsAsaltoRobo(valorAsRob)
membresia = conjAsaltoRobo[posNivMemMayor(nivsMemAsaltoRobo)]
return membresia
def fuzzyHomic(valorHomic):
prodMembsHomic(valorHomic)
membresia = conjHomic[posNivMemMayor(nivsMemHomic)]
return membresia
def fuzzySecVioAcos(valorSeViAc):
prodMembsSecVioAcos(valorSeViAc)
membresia = conjSecVioAcos[posNivMemMayor(nivsMemSecVioAcos)]
return membresia
def fuzzyDrogArmas(valorDroArm):
prodMembsDrogArmas(valorDroArm)
membresia = conjDrogArmas[posNivMemMayor(nivsMemDrogArmas)]
return membresia
# Reglas handcodeadas
def inferenciaCualitativa(AsaltoRoboDif, homicidioDif, secVioAcoDif,
drogArmasDif):
if ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Baja'):
inseguridadDifuso = "Baja"
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Baja'):
inseguridadDifuso = "Media"
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Baja'):
inseguridadDifuso = "Alta"
elif ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Baja'):
inseguridadDifuso = "Baja"
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Baja'):
inseguridadDifuso = "Media"
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Baja'):
inseguridadDifuso = "Alta"
elif ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Baja'):
inseguridadDifuso = "Media"
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Baja'):
inseguridadDifuso = "Alta"
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Baja'):
inseguridadDifuso = "Critica"
elif ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Media'):
inseguridadDifuso = "Baja"
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Media'):
inseguridadDifuso = "Alta"
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Media'):
inseguridadDifuso = "Alta"
elif ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Media'):
inseguridadDifuso = "Media"
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Media'):
inseguridadDifuso = "Alta"
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Media'):
inseguridadDifuso = "Alta"
elif ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Media'):
inseguridadDifuso = "Media"
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Media'):
inseguridadDifuso = "Alta"
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Media'):
inseguridadDifuso = "Critica"
elif ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Alta'):
inseguridadDifuso = "Media"
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Alta'):
inseguridadDifuso = "Alta"
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Alta'):
inseguridadDifuso = "Critica"
elif ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Alta'):
inseguridadDifuso = "Alta"
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Alta'):
inseguridadDifuso = "Alta"
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Alta'):
inseguridadDifuso = "Critica"
elif ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Alta'):
inseguridadDifuso = "Alta"
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Alta'):
inseguridadDifuso = "Critica"
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Alta'):
inseguridadDifuso = "Critica"
return inseguridadDifuso
def inferenciaCuantitativa(AsaltoRoboDif, homicidioDif, secVioAcoDif,
drogArmasDif):
nivMemAsaltoRobo = nivsMemAsaltoRobo[posNivMemMayor(nivsMemAsaltoRobo)]
nivMemHomic = nivsMemHomic[posNivMemMayor(nivsMemHomic)]
nivMemSecVioAcos = nivsMemSecVioAcos[posNivMemMayor(nivsMemSecVioAcos)]
nivMemDrogArmas = nivsMemDrogArmas[posNivMemMayor(nivsMemDrogArmas)]
nivMemInseguridad = compAnd(compAnd(compOr(nivMemDrogArmas, nivMemHomic), nivMemSecVioAcos), nivMemAsaltoRobo)
if ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Baja'):
nivsMemInseguridad[0] = nivMemInseguridad
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Baja'):
nivsMemInseguridad[1] = nivMemInseguridad
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Baja'):
nivsMemInseguridad[2] = nivMemInseguridad
elif ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Baja'):
nivsMemInseguridad[0] = nivMemInseguridad
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Baja'):
nivsMemInseguridad[1] = nivMemInseguridad
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Baja'):
nivsMemInseguridad[2] = nivMemInseguridad
elif ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Baja'):
nivsMemInseguridad[1] = nivMemInseguridad
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Baja'):
nivsMemInseguridad[2] = nivMemInseguridad
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Baja'):
nivsMemInseguridad[3] = nivMemInseguridad
elif ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Media'):
nivsMemInseguridad[0] = nivMemInseguridad
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Media'):
nivsMemInseguridad[2] = nivMemInseguridad
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Media'):
nivsMemInseguridad[2] = nivMemInseguridad
elif ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Media'):
nivsMemInseguridad[1] = nivMemInseguridad
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Media'):
nivsMemInseguridad[2] = nivMemInseguridad
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Media'):
nivsMemInseguridad[2] = nivMemInseguridad
elif ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Media'):
nivsMemInseguridad[1] = nivMemInseguridad
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Media'):
nivsMemInseguridad[2] = nivMemInseguridad
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Media'):
nivsMemInseguridad[3] = nivMemInseguridad
elif ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Alta'):
nivsMemInseguridad[1] = nivMemInseguridad
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Alta'):
nivsMemInseguridad[2] = nivMemInseguridad
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Baja' and AsaltoRoboDif == 'Alta'):
nivsMemInseguridad[3] = nivMemInseguridad
elif ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Alta'):
nivsMemInseguridad[2] = nivMemInseguridad
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Alta'):
nivsMemInseguridad[2] = nivMemInseguridad
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Media' and AsaltoRoboDif == 'Alta'):
nivsMemInseguridad[3] = nivMemInseguridad
elif ((drogArmasDif == 'Baja' or homicidioDif == 'Baja') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Alta'):
nivsMemInseguridad[2] = nivMemInseguridad
elif ((drogArmasDif == 'Media' or homicidioDif == 'Media') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Alta'):
nivsMemInseguridad[3] = nivMemInseguridad
elif ((drogArmasDif == 'Alta' or homicidioDif == 'Alta') and secVioAcoDif == 'Alta' and AsaltoRoboDif == 'Alta'):
nivsMemInseguridad[3] = nivMemInseguridad
return nivsMemInseguridad[posNivMemMayor(nivsMemInseguridad)]
def desfuzzificar(inseguridadDifuso, nivMemInseguridad):
if inseguridadDifuso == "Baja":
return nivMemInseguridad * 25
elif inseguridadDifuso == "Media":
return nivMemInseguridad * 50
elif inseguridadDifuso == "Alta":
return nivMemInseguridad * 75
elif inseguridadDifuso == "Critica":
return nivMemInseguridad * 100
else:
return 0.0
``` |
{
"source": "josecarb/ciff_2017_af3_jbcmjc",
"score": 3
} |
#### File: ciff_2017_af3_jbcmjc/ciff_2017_af3_jbcmjc/grafica.py
```python
def grafica(shortticker):
def datetime(x):
return np.array(x, dtype=np.datetime64)
symbol = shortticker #"GOOG"
df = DataReader(symbol, "google", '01/01/2016', '08/03/2017')
df['date'] = df.index
p1 = figure(x_axis_type="datetime", title="Stock Closing Prices")
p1.grid.grid_line_alpha=0.3
p1.xaxis.axis_label = 'Date'
p1.yaxis.axis_label = 'Price'
p1.line(datetime(df['date']), df['Close'], color='#A6CEE3', legend=symbol)
#p1.line(datetime(GOOG['date']), GOOG['adj_close'], color='#B2DF8A', legend='GOOG')
#p1.line(datetime(IBM['date']), IBM['adj_close'], color='#33A02C', legend='IBM')
#p1.line(datetime(MSFT['date']), MSFT['adj_close'], color='#FB9A99', legend='MSFT')
#p1.legend.location = "top_left"
df_array = np.array(df['Close'])
df_dates = np.array(df['date'], dtype=np.datetime64)
window_size = 30
window = np.ones(window_size)/float(window_size)
aapl_avg = np.convolve(df_array, window, 'same')
p2 = figure(x_axis_type="datetime", title="One-Month Average")
p2.grid.grid_line_alpha = 0
p2.xaxis.axis_label = 'Date'
p2.yaxis.axis_label = 'Price'
p2.ygrid.band_fill_color = "olive"
p2.ygrid.band_fill_alpha = 0.1
p2.circle(df_dates, df_array, size=4, legend='close',
color='darkgrey', alpha=0.2)
p2.line(df_dates, aapl_avg, legend='avg', color='navy')
p2.legend.location = "top_left"
output_file("./templates/stocks.html", title="My Own Bokeh Example")
show(gridplot([[p1,p2]], plot_width=400, plot_height=400)) # open a browser
return render_template('stocks.html')
```
#### File: ciff_2017_af3_jbcmjc/ciff_2017_af3_jbcmjc/main.py
```python
from flask import Flask, request, render_template, jsonify
import pandas.io.sql as sql
import sqlite3
import platform
from datetime import datetime
import numpy as np
import pandas as pd
import json
#from pandas.io.data import DataReader
from pandas_datareader import data, wb
from sklearn.linear_model import LogisticRegression
from sklearn.lda import LDA
from sklearn.qda import QDA
from bokeh.layouts import gridplot
from bokeh.plotting import figure, show, output_file
from scipy.stats import norm
from hist_bokeh import hist_bokeh
from hist_json import hist_json
app = Flask(__name__)
@app.route('/')
def main():
p1 = hist_bokeh (5,'GOOG')
p2 = hist_json (5,'GOOG')
return render_template('histogram.html',p1=p1,p2=p2)
if __name__ == '__main__':
app.run(
#host="0.0.0.0",
#port=int("80")
# , processes=9
debug=True
)
``` |
{
"source": "josecarlosah25/sistop-2020-2",
"score": 3
} |
#### File: 3/BarreroPatricio-EspinoHector/patomap.py
```python
import re
import argparse
import os
from termcolor import cprint, colored # Hay que instalarla
from functools import reduce
# Definimos funciones utiles
def crear_parser():
"""
Crea un parser para poder recibir por linea de comandos el archivo smaps o el pid del proceso que deseamos
"""
parser = argparse.ArgumentParser(description="""Pato map, un programa que igual y te mapea la memoria como te la rompe.\n
Se debe utilizar ya sea la opción [-p] o [-s] Ej:
python3 patomap.py -p 53
python3 patomap.py -s /proc/34/maps
python3 patomap.py -s /home/gwolf/Downloads/smapdump""",
epilog="En caso de bugs reportar a <EMAIL>",formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('-s','--smap',dest='smap',type=argparse.FileType('r'),
help='Ruta del archivo smap')
parser.add_argument('-p','--pid',dest='pid',type=int,
help='Pid del proceso.Se buscará en: /proc/{PID]}/ \n ->Esta opcion hace que se ignoren las demas')
return parser.parse_args()
def revisar_opciones(args):
"""
Verifica que el usuario halla usado las opciones de linea de comandos corretamente
----
regresa
str => ruta del archivo smaps
"""
if not (args.smap or args.pid) : # No utilizo ninguna opcion
print("Utilize al menos una de las opciones:")
print("Consulte: python3 patomap.py -h")
exit()
elif args.pid: # Utilizo pid
base= "/proc/{}".format(args.pid)
ruta = base+"/smaps"
elif args.smap: # Utilizo el archivo smaps
ruta = args.smap
else: # Hubo un error
print("No se que hiciste")
print(args)
exit()
return ruta
def leer_archivo(ubicacion):
"""
Permite leer el contenido de un archivo dada su ubicacion
----
Regresa
str => el contenido del archivo
----
puede arrojar
FileNotFoundError => Si no encuentra el archivo
PermissionError => SI no tengo permisos para leer el archivo
"""
archivo = open(ubicacion, 'r')
texto = archivo.read()
archivo.close()
return texto
# obtenemos informacion imporante del archivo
def obtener_info_smaps(texto_smaps):
"""
A traves del contenido de smaps obtenemos los elementos imporantes de cada segmento
---
Regresa
list(dict) => lista de segmentos. cada segmento es un diccionario donde se puede acceder mediante su llave al valor que deseamos
"""
# Por medio de una expresion regular analizamos cada segmento de memoria y obtenemos la informacion importante de cada uno
expresion_regular = r"([\da-f]+)\-([\da-f]+) (r|-)(w|-)(x|-)(s|p) [\da-f]+ [\da-f]+:[\da-f]+ [\da-f]+ +([\[\]\/\-\.\w]*)\n?Size: +(\d+) ([MkGgKm]B)\n?KernelPageSize: +(\d+) ([MkGgKm]B)\n?MMUPageSize: +(\d+) ([MkGgKm]B)[\n: \w\t]*VmFlags:(( \w*)+)"
patron = re.compile(expresion_regular)
s = patron.findall(texto_smaps)
lista_segmentos = []
for v in s:
segmento = dict()
segmento["inicio"] = v[0]
segmento["final"] = v[1]
segmento["leer"] = v[2]
segmento["escribir"] = v[3]
segmento["ejecutar"] = v[4]
segmento["compartido"] = v[5]
segmento["mapping"] = v[6]
segmento["Tamano"] =(v[7], v[8]) # (valor, unidad)
segmento["TamanoMMUPagina"] = (v[9], v[10]) # (valor, unidad)
segmento["RSS"] = (v[11], v[12]) # (valor, unidad)
lista_segmentos.append(segmento)
return lista_segmentos
def convertir(unidad):
"""
Convierte de prefijos de cantidad a numeros
"""
if unidad.upper() == "KB":
return 1000
if unidad.upper() == "MB":
return 1000000
return 1000000000
def organizar_informacion_unitario(segmento):
"""
Analiza un segmento y determina sus propiedades
---
Regresa
list(str) => segmento con partes identificadas
"""
# Accedemos a los valores mas importantes
mapeo = segmento["mapping"]
tamano = segmento["Tamano"][0] + " " + segmento["Tamano"][1]
num_paginas = str(float(segmento["Tamano"][0]) * convertir(segmento["Tamano"][1]) / (float(segmento["TamanoMMUPagina"][0]) * convertir(segmento["TamanoMMUPagina"][1])))
de = segmento["inicio"]
a = segmento["final"]
permisos = segmento["leer"] + segmento["escribir"] + segmento["ejecutar"]
# Averiguamos su uso
uso = "???"
if mapeo == '[stack]':
uso = "Stack"
elif mapeo == '[heap]':
uso = "Heap"
elif mapeo == '[anon]':
uso = 'Mapeo Anonimo'
elif mapeo in ('[vdso]', '[vsyscall]', '[vectors]'):
uso = "Llamada al Sistema"
elif mapeo == '[vvar]':
uso = 'Var Kernel'
elif mapeo == "":
uso = mapeo = "vacio"
elif segmento["leer"].lower() == 'r' and segmento["ejecutar"].lower() == "x" and "lib" in mapeo:
uso = "Bib→Texto"
elif segmento["leer"].lower() == 'r' and "lib" in mapeo:
uso = "Bib→Datos"
elif segmento["leer"].lower() == 'r' and segmento["ejecutar"].lower() == "x" and "/usr/bin" in mapeo:
uso = "Texto"
elif segmento["leer"].lower() == 'r' and "/usr/bin" in mapeo:
uso = "Datos"
return (uso, de, a, tamano, num_paginas, permisos, mapeo)
def organizar_informacion(lista_segmentos):
lista_identificados = []
for segmento in lista_segmentos:
segmento_identificado = organizar_informacion_unitario(segmento)
lista_identificados.append(segmento_identificado)
return lista_identificados
def imprimir_salida(lista_segmentos):
def calcula_maximo(lista_segmentos, i, cadena):
"""
Calcula la longitud maxima de una lista de cadenas y una cadena que no pertenece a estas
---
Regresa
int => longitud maxima de la cadena
"""
return len(reduce(lambda s1, s2 : s1 if len(s1) >= len(s2) else s2, map(lambda l : l[i], lista_segmentos), cadena))
def agregar_espacios(cadena, n):
"""
Agrega espacios suficientes para que la cadena tenga longitud n
----
Regresa
str => cadena de tamaño n
"""
if n <= len(cadena):
return cadena
resta = n - len(cadena)
agregar = " " * (resta // 2)
agregar_final = "" if resta % 2 == 0 else " "
return agregar + cadena + agregar + agregar_final
nombres = ['USO', 'DE PAG.', 'A PAG.', 'TAMAÑO', 'NUM. PAG.', 'PERMISOS', 'MAPEO']
colores = ['red', 'green', 'green', 'magenta', 'blue', 'cyan', 'yellow']
maximos = [calcula_maximo(lista_segmentos, i, cadena) for i, cadena in enumerate(nombres)]
# Imprimir encabezados
for nombre, color, maximo in zip(nombres, colores, maximos):
print(" | ", end ="")
cprint(agregar_espacios(nombre, maximo), color, 'on_grey', attrs = ['bold'], end = '')
print()
# Imprimimos contenido
for i, segmento in enumerate(lista_segmentos):
color_fondo = "on_white" if i % 2 == 0 else "on_grey"
for j, (color, maximo, elemento) in enumerate(zip(colores, maximos, segmento)):
simbolo = "|" if j != 2 else "-"
print(" {} ".format(simbolo), end ="")
cprint(agregar_espacios(elemento, maximo), color, color_fondo, end = '')
print()
if __name__ == "__main__":
try:
# Obtenemos la ubicacion del archivo smaps
ubicacion_smaps = revisar_opciones(crear_parser())
# Obtenemos el contenido del archivo smaps
texto_smaps = leer_archivo(ubicacion_smaps)
lista_segmentos = obtener_info_smaps(texto_smaps)
# Determinamos a que corresponde cada segmento
segmentos_organizados = organizar_informacion(lista_segmentos)
# Mostramos en pantalla
imprimir_salida(segmentos_organizados)
except PermissionError as e: # Si no tenemos permisos para leer un archivo
print("No tienes permisos para leer ese proceso/archivo :(")
print(e)
exit()
except FileNotFoundError as e: # Si no encontramos un archivo
print("No se ha encontrado el archivo")
print(e)
exit()
except Exception as e: # Cualquier otro error
print("No tengo permiso para leer ese proceso o no existe.\n\
Si es un pid, verifique que el proceso este corriendo")
print(e)
exit()
```
#### File: 3/MurrietaAlfonso-ValdespinoJoaquin/Myapp.py
```python
import sys
import os
from lines.LineComplete import LineComplete
from PyQt5 import uic, QtWidgets
qtCreatorFile = "maps.ui" # Nombre del archivo aquí.
Ui_MainWindow, QtBaseClass = uic.loadUiType(qtCreatorFile)
class MyApp(QtWidgets.QMainWindow, Ui_MainWindow):
def __init__(self):
QtWidgets.QMainWindow.__init__(self)
Ui_MainWindow.__init__(self)
self.setupUi(self)
self.btn_pid.clicked.connect(self.addPID)
###########################################
#codigo#
#event click button P_ID de la ventana#
def addPID(self):
print("adquirio p_id")
self.map_tx.setText("");
try:
pidx = int(self.pid_tx.toPlainText())
print(pidx)
self.map_tx.setText(" mapeo del P_ID: "+str(pidx)+"\n");
if(pidx >0):
self.mapping(pidx)
else:
raise ValueError("p_id no valido")
except ValueError:
self.map_tx.setText("mapeo no realizado")
#####################################################
def mapping(self,pid):
##### obtencion de maps en un txt ###
maps = 'cat /proc/' + str(pid) + '/maps > maps'+str(pid)+'.txt'
os.system(maps)
### files ###
filemaps = open("maps"+str(pid)+".txt","r")
filemap = open("MUVAMAP_"+str(pid).upper()+".txt","w")
### lectura de lineas ###
linesimp=[]
Filelines = filemaps.readlines()
for line in Filelines :
segmentedline = line.split(" ")
page = segmentedline[0]
perm = segmentedline[1]
use = segmentedline[len(segmentedline)-1]
typeUse = ""
if (use.find("[") != -1 and use.find("]") != -1):
typeUse = use
typeUse = typeUse.replace("[","")
typeUse = typeUse.replace("]\n","")
elif (use.find("home") != -1 and perm.find("r") != -1 and perm.find("x") != -1):
typeUse = "Texto"
elif (use.find("home") != -1 and perm.find("r") != -1 and perm.find("w") != -1):
typeUse = "Datos"
elif (use.find("lib") != -1 and perm.find("r") != -1 and perm.find("x") != -1):
typeUse = "Bib-Texto"
elif (use.find("lib") != -1 and perm.find("r") != -1 and perm.find("w") != -1):
typeUse = "Bib-Datos"
elif (use == "" or use =="\n"):
use ="vacio"
linesimp.append(LineComplete(page,perm,typeUse,use))
self.toPrint(linesimp,filemap)
filemaps.close()
filemap.close()
#####imprimir en pantalla #####
def toPrint(self,linesimp,file):
print("entro toprint")
print(len(linesimp))
string = "| {:_^11} | {:_^16} | {:_^16} | {:_^16} | {:_^12} | {:_^4} | {} \n".format(" use"," De pag"," A pag"," Tamaño"," Num paginas"," Perm","Uso o mapeo")
file.write(string)
self.map_tx.append(string+"\n")
for line in linesimp:
string = line.toString()
file.write(string)
if(line.gettypeUse() == "stack"):
Text = "<span style=\" font-size:9pt; font-weight:600; color:#ff0000;\" >"
Text += string
Text +="</span>"
self.map_tx.append(Text+"\n")
elif(line.gettypeUse() == "vvar"):
Text = "<span style=\" font-size:9pt; font-weight:600; color:#fca71a;\" >"
Text += string
Text +="</span>"
self.map_tx.append(Text+"\n")
elif(line.gettypeUse() == "vdso"):
Text = "<span style=\" font-size:9pt; font-weight:600; color:#a387a5;\" >"
Text += string
Text +="</span>"
self.map_tx.append(Text+"\n")
elif(line.gettypeUse() == "vsyscall"):
Text = "<span style=\" font-size:9pt; font-weight:600; color:#87c725;\" >"
Text += string
Text +="</span>"
self.map_tx.append(Text+"\n")
elif(line.gettypeUse() == "Texto"):
Text = "<span style=\" font-size:9pt; font-weight:600; color:#11a000;\" >"
Text += string
Text +="</span>"
self.map_tx.append(Text+"\n")
elif(line.gettypeUse() == "Datos"):
Text = "<span style=\" font-size:9pt; font-weight:600; color:#ff8b08;\" >"
Text += string
Text +="</span>"
self.map_tx.append(Text+"\n")
elif(line.gettypeUse() == "Bib-Texto"):
Text = "<span style=\" font-size:9pt; font-weight:600; color:#00bcd6;\" >"
Text += string
Text +="</span>"
self.map_tx.append(Text+"\n")
elif(line.gettypeUse() == "Bib-Datos"):
Text = "<span style=\" font-size:9pt; font-weight:600; color:#6100d6;\" >"
Text += string
Text +="</span>"
self.map_tx.append(Text+"\n")
elif(line.gettypeUse() == "heap"):
Text = "<span style=\" font-size:9pt; font-weight:600; color:#950000;\" >"
Text += string
Text +="</span>"
self.map_tx.append(Text+"\n")
else:
blackText = "<span style=\" font-size:9pt; font-weight:600; color:#000000;\" >"
blackText +=string
blackText +="</span>"
self.map_tx.append(blackText+"\n")
###########################################
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
window = MyApp()
window.show()
sys.exit(app.exec_())
``` |
{
"source": "jose-carlos-code/CursoEmvideo-python",
"score": 3
} |
#### File: EX_CursoEmVideo/ex109/moeda.py
```python
def metade(numero, form=False):
n = numero/2
if form == True:
return moeda(n)
else:
return n
def dobro(num,form=False):
n = num*2
if form == True:
return moeda(n)
else:
return n
def aumentar(valor, p=0, form=False):
a = (valor*p)/100
n = valor+a
if form == True:
return moeda(n)
else:
return n
def diminuir(numero, p=0, form=False):
a = (numero*p)/100
n = (numero - a)
if form == True:
return moeda(n)
else:
return n
def moeda(preço, moeda='R$'):
return f'{moeda}{preço:.2f}'.replace('.', ',')
```
#### File: exercícios/EX_CursoEmVideo/main.py
```python
import os
e=0
dic = {'123456':['erick', 'masc', 22]}
def inserir(chave):
chave = chave #str(input("Informe a matricula do aluno: "))
nome=str(input("Informe o Nome do aluno:"))
sexo=str(input("Informe o sexo do Aluno: "))
idade=int(input("Informe a idade do Aluno: "))
dic[chave]=[nome,sexo,idade]
def verifica(chave):
if chave not in dic:
return chave
else:
return 1 #chave já cadastrada
def exclui(chave):
dic.pop(chave)
def consultar(chave):
item = dic[chave]
matricula = chave
nome = item[0]
sexo = item[1]
idade = item[2]
return matricula,nome,sexo,idade
while e != 4:
lost = input("Informe a matricula: ")
if verifica(lost) == 1:
print("Já existe um aluno com essa matricula.")
else:
inserir(verifica(lost))
e += 1
print(dic)
while e != 3:
lost = input("Informe uma matricula para excluir: ")
if verifica(lost) == 1:
exclui(lost)
print("{} excluida com sucesso.".format(lost))
print(dic)
else:
print("Essa matricula ainda não foi casdastrada.")
print(dic)
while e != 4:
lost=input("Informe a matricula: ")
if verifica(lost) == 1:
print("Matricula: {}".format(consultar(lost)[0]))
print("Nome: {}".format(consultar(lost)[1]))
print("Sexo: {}".format(consultar(lost)[2]))
print("Idade: {}".format(consultar(lost)[3]))
else:
print("Essa Matricula ainda nao foi cadastrada.")
``` |
{
"source": "JoseCarlosGarcia95/Mathics",
"score": 2
} |
#### File: mathics/builtin/graphics.py
```python
from math import floor, ceil, log10, sin, cos, pi, sqrt, atan2, degrees, radians, exp
import json
import base64
from itertools import chain
from math import sin, cos, pi
from mathics.builtin.base import (
Builtin, InstancableBuiltin, BoxConstruct, BoxConstructError)
from mathics.builtin.options import options_to_rules
from mathics.core.expression import (
Expression, Integer, Rational, Real, String, Symbol, strip_context,
system_symbols, system_symbols_dict, from_python)
from mathics.builtin.colors import convert as convert_color
from mathics.core.numbers import machine_epsilon
class CoordinatesError(BoxConstructError):
pass
class ColorError(BoxConstructError):
pass
def get_class(name):
from mathics.builtin.graphics3d import GLOBALS3D
c = GLOBALS.get(name)
if c is None:
return GLOBALS3D.get(name)
else:
return c
# globals() does not work with Cython, otherwise one could use something
# like return globals().get(name)
def coords(value):
if value.has_form('List', 2):
x, y = value.leaves[0].round_to_float(), value.leaves[1].round_to_float()
if x is None or y is None:
raise CoordinatesError
return (x, y)
raise CoordinatesError
class Coords(object):
def __init__(self, graphics, expr=None, pos=None, d=None):
self.graphics = graphics
self.p = pos
self.d = d
if expr is not None:
if expr.has_form('Offset', 1, 2):
self.d = coords(expr.leaves[0])
if len(expr.leaves) > 1:
self.p = coords(expr.leaves[1])
else:
self.p = None
else:
self.p = coords(expr)
def pos(self):
p = self.graphics.translate(self.p)
p = (cut(p[0]), cut(p[1]))
if self.d is not None:
d = self.graphics.translate_absolute(self.d)
return (p[0] + d[0], p[1] + d[1])
return p
def add(self, x, y):
p = (self.p[0] + x, self.p[1] + y)
return Coords(self.graphics, pos=p, d=self.d)
def cut(value):
"Cut values in graphics primitives (not displayed otherwise in SVG)"
border = 10 ** 8
if value < -border:
value = -border
elif value > border:
value = border
return value
def create_css(edge_color=None, face_color=None, stroke_width=None,
font_color=None):
css = []
if edge_color is not None:
color, opacity = edge_color.to_css()
css.append('stroke: %s' % color)
css.append('stroke-opacity: %s' % opacity)
else:
css.append('stroke: none')
if stroke_width is not None:
css.append('stroke-width: %fpx' % stroke_width)
if face_color is not None:
color, opacity = face_color.to_css()
css.append('fill: %s' % color)
css.append('fill-opacity: %s' % opacity)
else:
css.append('fill: none')
if font_color is not None:
color, opacity = font_color.to_css()
css.append('color: %s' % color)
return '; '.join(css)
def asy_number(value):
return '%.5g' % value
def _to_float(x):
x = x.round_to_float()
if x is None:
raise BoxConstructError
return x
def create_pens(edge_color=None, face_color=None, stroke_width=None,
is_face_element=False):
result = []
if face_color is not None:
brush, opacity = face_color.to_asy()
if opacity != 1:
brush += '+opacity(%s)' % asy_number(opacity)
result.append(brush)
elif is_face_element:
result.append('nullpen')
if edge_color is not None:
pen, opacity = edge_color.to_asy()
if opacity != 1:
pen += '+opacity(%s)' % asy_number(opacity)
if stroke_width is not None:
pen += '+linewidth(%s)' % asy_number(stroke_width)
result.append(pen)
elif is_face_element:
result.append('nullpen')
return ', '.join(result)
def _data_and_options(leaves, defined_options):
data = []
options = defined_options.copy()
for leaf in leaves:
if leaf.get_head_name() == 'System`Rule':
if len(leaf.leaves) != 2:
raise BoxConstructError
name, value = leaf.leaves
name_head = name.get_head_name()
if name_head == 'System`Symbol':
py_name = name.get_name()
elif name_head == 'System`String':
py_name = 'System`' + name.get_string_value()
else: # unsupported name type
raise BoxConstructError
options[py_name] = value
else:
data.append(leaf)
return data, options
def _euclidean_distance(a, b):
return sqrt(sum((x1 - x2) * (x1 - x2) for x1, x2 in zip(a, b)))
def _component_distance(a, b, i):
return abs(a[i] - b[i])
def _cie2000_distance(lab1, lab2):
#reference: https://en.wikipedia.org/wiki/Color_difference#CIEDE2000
e = machine_epsilon
kL = kC = kH = 1 #common values
L1, L2 = lab1[0], lab2[0]
a1, a2 = lab1[1], lab2[1]
b1, b2 = lab1[2], lab2[2]
dL = L2 - L1
Lm = (L1 + L2)/2
C1 = sqrt(a1**2 + b1**2)
C2 = sqrt(a2**2 + b2**2)
Cm = (C1 + C2)/2;
a1 = a1 * (1 + (1 - sqrt(Cm**7/(Cm**7 + 25**7)))/2)
a2 = a2 * (1 + (1 - sqrt(Cm**7/(Cm**7 + 25**7)))/2)
C1 = sqrt(a1**2 + b1**2)
C2 = sqrt(a2**2 + b2**2)
Cm = (C1 + C2)/2
dC = C2 - C1
h1 = (180 * atan2(b1, a1 + e))/pi % 360
h2 = (180 * atan2(b2, a2 + e))/pi % 360
if abs(h2 - h1) <= 180:
dh = h2 - h1
elif abs(h2 - h1) > 180 and h2 <= h1:
dh = h2 - h1 + 360
elif abs(h2 - h1) > 180 and h2 > h1:
dh = h2 - h1 - 360
dH = 2*sqrt(C1*C2)*sin(radians(dh)/2)
Hm = (h1 + h2)/2 if abs(h2 - h1) <= 180 else (h1 + h2 + 360)/2
T = 1 - 0.17*cos(radians(Hm - 30)) + 0.24*cos(radians(2*Hm)) + 0.32*cos(radians(3*Hm + 6)) - 0.2*cos(radians(4*Hm - 63))
SL = 1 + (0.015*(Lm - 50)**2)/sqrt(20 + (Lm - 50)**2)
SC = 1 + 0.045*Cm
SH = 1 + 0.015*Cm*T
rT = -2 * sqrt(Cm**7/(Cm**7 + 25**7))*sin(radians(60*exp(-((Hm - 275)**2 / 25**2))))
return sqrt((dL/(SL*kL))**2 + (dC/(SC*kC))**2 + (dH/(SH*kH))**2 + rT*(dC/(SC*kC))*(dH/(SH*kH)))
def _CMC_distance(lab1, lab2, l, c):
#reference https://en.wikipedia.org/wiki/Color_difference#CMC_l:c_.281984.29
L1, L2 = lab1[0], lab2[0]
a1, a2 = lab1[1], lab2[1]
b1, b2 = lab1[2], lab2[2]
dL, da, db = L2-L1, a2-a1, b2-b1
e = machine_epsilon
C1 = sqrt(a1**2 + b1**2);
C2 = sqrt(a2**2 + b2**2);
h1 = (180 * atan2(b1, a1 + e))/pi % 360;
dC = C2 - C1;
dH2 = da**2 + db**2 - dC**2;
F = C1**2/sqrt(C1**4 + 1900);
T = 0.56 + abs(0.2*cos(radians(h1 + 168))) if (164 <= h1 and h1 <= 345) else 0.36 + abs(0.4*cos(radians(h1 + 35)));
SL = 0.511 if L1 < 16 else (0.040975*L1)/(1 + 0.01765*L1);
SC = (0.0638*C1)/(1 + 0.0131*C1) + 0.638;
SH = SC*(F*T + 1 - F);
return sqrt((dL/(l*SL))**2 + (dC/(c*SC))**2 + dH2/SH**2)
def _extract_graphics(graphics, format, evaluation):
graphics_box = Expression('MakeBoxes', graphics).evaluate(evaluation)
builtin = GraphicsBox(expression=False)
elements, calc_dimensions = builtin._prepare_elements(
graphics_box.leaves, {'evaluation': evaluation}, neg_y=True)
xmin, xmax, ymin, ymax, _, _, _, _ = calc_dimensions()
# xmin, xmax have always been moved to 0 here. the untransformed
# and unscaled bounds are found in elements.xmin, elements.ymin,
# elements.extent_width, elements.extent_height.
# now compute the position of origin (0, 0) in the transformed
# coordinate space.
ex = elements.extent_width
ey = elements.extent_height
sx = (xmax - xmin) / ex
sy = (ymax - ymin) / ey
ox = -elements.xmin * sx + xmin
oy = -elements.ymin * sy + ymin
# generate code for svg or asy.
if format == 'asy':
code = '\n'.join(element.to_asy() for element in elements.elements)
elif format == 'svg':
code = elements.to_svg()
else:
raise NotImplementedError
return xmin, xmax, ymin, ymax, ox, oy, ex, ey, code
class _SVGTransform():
def __init__(self):
self.transforms = []
def matrix(self, a, b, c, d, e, f):
# a c e
# b d f
# 0 0 1
self.transforms.append('matrix(%f, %f, %f, %f, %f, %f)' % (a, b, c, d, e, f))
def translate(self, x, y):
self.transforms.append('translate(%f, %f)' % (x, y))
def scale(self, x, y):
self.transforms.append('scale(%f, %f)' % (x, y))
def rotate(self, x):
self.transforms.append('rotate(%f)' % x)
def apply(self, svg):
return '<g transform="%s">%s</g>' % (' '.join(self.transforms), svg)
class _ASYTransform():
_template = """
add(%s * (new picture() {
picture saved = currentpicture;
picture transformed = new picture;
currentpicture = transformed;
%s
currentpicture = saved;
return transformed;
})());
"""
def __init__(self):
self.transforms = []
def matrix(self, a, b, c, d, e, f):
# a c e
# b d f
# 0 0 1
# see http://asymptote.sourceforge.net/doc/Transforms.html#Transforms
self.transforms.append('(%f, %f, %f, %f, %f, %f)' % (e, f, a, c, b, d))
def translate(self, x, y):
self.transforms.append('shift(%f, %f)' % (x, y))
def scale(self, x, y):
self.transforms.append('scale(%f, %f)' % (x, y))
def rotate(self, x):
self.transforms.append('rotate(%f)' % x)
def apply(self, asy):
return self._template % (' * '.join(self.transforms), asy)
class Graphics(Builtin):
r"""
<dl>
<dt>'Graphics[$primitives$, $options$]'
<dd>represents a graphic.
</dl>
>> Graphics[{Blue, Line[{{0,0}, {1,1}}]}]
= -Graphics-
'Graphics' supports 'PlotRange':
>> Graphics[{Rectangle[{1, 1}]}, Axes -> True, PlotRange -> {{-2, 1.5}, {-1, 1.5}}]
= -Graphics-
>> Graphics[{Rectangle[],Red,Disk[{1,0}]},PlotRange->{{0,1},{0,1}}]
= -Graphics-
'Graphics' produces 'GraphicsBox' boxes:
>> Graphics[Rectangle[]] // ToBoxes // Head
= GraphicsBox
In 'TeXForm', 'Graphics' produces Asymptote figures:
>> Graphics[Circle[]] // TeXForm
=
. \begin{asy}
. size(5.8556cm, 5.8333cm);
. draw(ellipse((175,175),175,175), rgb(0, 0, 0)+linewidth(0.66667));
. clip(box((-0.33333,0.33333), (350.33,349.67)));
. \end{asy}
Invalid graphics directives yield invalid box structures:
>> Graphics[Circle[{a, b}]]
: GraphicsBox[CircleBox[List[a, b]], Rule[AspectRatio, Automatic], Rule[Axes, False], Rule[AxesStyle, List[]], Rule[Background, Automatic], Rule[ImageSize, Automatic], Rule[LabelStyle, List[]], Rule[PlotRange, Automatic], Rule[PlotRangePadding, Automatic], Rule[TicksStyle, List[]]] is not a valid box structure.
"""
options = {
'Axes': 'False',
'TicksStyle': '{}',
'AxesStyle': '{}',
'LabelStyle': '{}',
'AspectRatio': 'Automatic',
'PlotRange': 'Automatic',
'PlotRangePadding': 'Automatic',
'ImageSize': 'Automatic',
'Background': 'Automatic',
}
box_suffix = 'Box'
def apply_makeboxes(self, content, evaluation, options):
'''MakeBoxes[%(name)s[content_, OptionsPattern[%(name)s]],
StandardForm|TraditionalForm|OutputForm]'''
def convert(content):
head = content.get_head_name()
if head == 'System`List':
return Expression('List', *[convert(item) for item in content.leaves])
elif head == 'System`Style':
return Expression('StyleBox', *[convert(item) for item in content.leaves])
if head in element_heads:
if head == 'System`Text':
head = 'System`Inset'
atoms = content.get_atoms(include_heads=False)
if any(not isinstance(atom, (Integer, Real)) and
not atom.get_name() in GRAPHICS_SYMBOLS
for atom in atoms):
if head == 'System`Inset':
n_leaves = [content.leaves[0]] + [
Expression('N', leaf).evaluate(evaluation)
for leaf in content.leaves[1:]]
else:
n_leaves = (Expression('N', leaf).evaluate(
evaluation) for leaf in content.leaves)
else:
n_leaves = content.leaves
return Expression(head + self.box_suffix, *n_leaves)
return content
for option in options:
if option not in ('System`ImageSize',):
options[option] = Expression(
'N', options[option]).evaluate(evaluation)
box_name = 'Graphics' + self.box_suffix
return Expression(box_name, convert(content),
*options_to_rules(options))
class _GraphicsElement(InstancableBuiltin):
def init(self, graphics, item=None, style=None):
if item is not None and not item.has_form(self.get_name(), None):
raise BoxConstructError
self.graphics = graphics
self.style = style
self.is_completely_visible = False # True for axis elements
@staticmethod
def create_as_style(klass, graphics, item):
return klass(graphics, item)
class _Color(_GraphicsElement):
formats = {
# we are adding ImageSizeMultipliers in the rule below, because we do _not_ want color boxes to
# diminish in size when they appear in lists or rows. we only want the display of colors this
# way in the notebook, so we restrict the rule to StandardForm.
(('StandardForm', ), '%(name)s[x__?(NumericQ[#] && 0 <= # <= 1&)]'):
'Style[Graphics[{EdgeForm[Black], %(name)s[x], Rectangle[]}, ImageSize -> 16], ' +
'ImageSizeMultipliers -> {1, 1}]'
}
rules = {
'%(name)s[x_List]': 'Apply[%(name)s, x]',
}
components_sizes = []
default_components = []
def init(self, item=None, components=None):
super(_Color, self).init(None, item)
if item is not None:
leaves = item.leaves
if len(leaves) in self.components_sizes:
# we must not clip here; we copy the components, without clipping,
# e.g. RGBColor[-1, 0, 0] stays RGBColor[-1, 0, 0]. this is especially
# important for color spaces like LAB that have negative components.
components = [value.round_to_float() for value in leaves]
if None in components:
raise ColorError
# the following lines always extend to the maximum available
# default_components, so RGBColor[0, 0, 0] will _always_
# become RGBColor[0, 0, 0, 1]. does not seem the right thing
# to do in this general context. poke1024
if len(components) < 3:
components.extend(self.default_components[len(components):])
self.components = components
else:
raise ColorError
elif components is not None:
self.components = components
@staticmethod
def create(expr):
head = expr.get_head_name()
cls = get_class(head)
if cls is None:
raise ColorError
return cls(expr)
@staticmethod
def create_as_style(klass, graphics, item):
return klass(item)
def to_css(self):
rgba = self.to_rgba()
alpha = rgba[3] if len(rgba) > 3 else 1.
return (r'rgb(%f%%, %f%%, %f%%)' % (
rgba[0] * 100, rgba[1] * 100, rgba[2] * 100), alpha)
def to_asy(self):
rgba = self.to_rgba()
alpha = rgba[3] if len(rgba) > 3 else 1.
return (r'rgb(%s, %s, %s)' % (
asy_number(rgba[0]), asy_number(rgba[1]), asy_number(rgba[2])),
alpha)
def to_js(self):
return self.to_rgba()
def to_expr(self):
return Expression(self.get_name(), *self.components)
def to_rgba(self):
return self.to_color_space("RGB")
def to_color_space(self, color_space):
components = convert_color(self.components, self.color_space, color_space)
if components is None:
raise ValueError('cannot convert from color space %s to %s.' % (self.color_space, color_space))
return components
class RGBColor(_Color):
"""
<dl>
<dt>'RGBColor[$r$, $g$, $b$]'
<dd>represents a color with the specified red, green and blue
components.
</dl>
>> Graphics[MapIndexed[{RGBColor @@ #1, Disk[2*#2 ~Join~ {0}]} &, IdentityMatrix[3]], ImageSize->Small]
= -Graphics-
>> RGBColor[0, 1, 0]
= RGBColor[0, 1, 0]
>> RGBColor[0, 1, 0] // ToBoxes
= StyleBox[GraphicsBox[...], ...]
"""
color_space = 'RGB'
components_sizes = [3, 4]
default_components = [0, 0, 0, 1]
def to_rgba(self):
return self.components
class LABColor(_Color):
"""
<dl>
<dt>'LABColor[$l$, $a$, $b$]'
<dd>represents a color with the specified lightness, red/green and yellow/blue
components in the CIE 1976 L*a*b* (CIELAB) color space.
</dl>
"""
color_space = 'LAB'
components_sizes = [3, 4]
default_components = [0, 0, 0, 1]
class LCHColor(_Color):
"""
<dl>
<dt>'LCHColor[$l$, $c$, $h$]'
<dd>represents a color with the specified lightness, chroma and hue
components in the CIELCh CIELab cube color space.
</dl>
"""
color_space = 'LCH'
components_sizes = [3, 4]
default_components = [0, 0, 0, 1]
class LUVColor(_Color):
"""
<dl>
<dt>'LCHColor[$l$, $u$, $v$]'
<dd>represents a color with the specified components in the CIE 1976 L*u*v* (CIELUV) color space.
</dl>
"""
color_space = 'LUV'
components_sizes = [3, 4]
default_components = [0, 0, 0, 1]
class XYZColor(_Color):
"""
<dl>
<dt>'XYZColor[$x$, $y$, $z$]'
<dd>represents a color with the specified components in the CIE 1931 XYZ color space.
</dl>
"""
color_space = 'XYZ'
components_sizes = [3, 4]
default_components = [0, 0, 0, 1]
class CMYKColor(_Color):
"""
<dl>
<dt>'CMYKColor[$c$, $m$, $y$, $k$]'
<dd>represents a color with the specified cyan, magenta,
yellow and black components.
</dl>
>> Graphics[MapIndexed[{CMYKColor @@ #1, Disk[2*#2 ~Join~ {0}]} &, IdentityMatrix[4]], ImageSize->Small]
= -Graphics-
"""
color_space = 'CMYK'
components_sizes = [3, 4, 5]
default_components = [0, 0, 0, 0, 1]
class Hue(_Color):
"""
<dl>
<dt>'Hue[$h$, $s$, $l$, $a$]'
<dd>represents the color with hue $h$, saturation $s$,
lightness $l$ and opacity $a$.
<dt>'Hue[$h$, $s$, $l$]'
<dd>is equivalent to 'Hue[$h$, $s$, $l$, 1]'.
<dt>'Hue[$h$, $s$]'
<dd>is equivalent to 'Hue[$h$, $s$, 1, 1]'.
<dt>'Hue[$h$]'
<dd>is equivalent to 'Hue[$h$, 1, 1, 1]'.
</dl>
>> Graphics[Table[{EdgeForm[Gray], Hue[h, s], Disk[{12h, 8s}]}, {h, 0, 1, 1/6}, {s, 0, 1, 1/4}]]
= -Graphics-
>> Graphics[Table[{EdgeForm[{GrayLevel[0, 0.5]}], Hue[(-11+q+10r)/72, 1, 1, 0.6], Disk[(8-r) {Cos[2Pi q/12], Sin[2Pi q/12]}, (8-r)/3]}, {r, 6}, {q, 12}]]
= -Graphics-
"""
color_space = 'HSB'
components_sizes = [1, 2, 3, 4]
default_components = [0, 1, 1, 1]
def hsl_to_rgba(self):
h, s, l = self.components[:3]
if l < 0.5:
q = l * (1 + s)
else:
q = l + s - l * s
p = 2 * l - q
rgb = (h + 1 / 3, h, h - 1 / 3)
def map(value):
if value < 0:
value += 1
if value > 1:
value -= 1
return value
def trans(t):
if t < 1 / 6:
return p + ((q - p) * 6 * t)
elif t < 1 / 2:
return q
elif t < 2 / 3:
return p + ((q - p) * 6 * (2 / 3 - t))
else:
return p
result = tuple([trans(list(map(t))) for t in rgb]) + (self.components[3],)
return result
class GrayLevel(_Color):
"""
<dl>
<dt>'GrayLevel[$g$]'
<dd>represents a shade of gray specified by $g$, ranging from
0 (black) to 1 (white).
<dt>'GrayLevel[$g$, $a$]'
<dd>represents a shade of gray specified by $g$ with opacity $a$.
</dl>
"""
color_space = 'Grayscale'
components_sizes = [1, 2]
default_components = [0, 1]
def expression_to_color(color):
try:
return _Color.create(color)
except ColorError:
return None
def color_to_expression(components, colorspace):
if colorspace == 'Grayscale':
converted_color_name = 'GrayLevel'
elif colorspace == 'HSB':
converted_color_name = 'Hue'
else:
converted_color_name = colorspace + 'Color'
return Expression(converted_color_name, *components)
class ColorDistance(Builtin):
"""
<dl>
<dt>'ColorDistance[$c1$, $c2$]'
<dd>returns a measure of color distance between the colors $c1$ and $c2$.
<dt>'ColorDistance[$list$, $c2$]'
<dd>returns a list of color distances between the colors in $list$ and $c2$.
</dl>
The option DistanceFunction specifies the method used to measure the color
distance. Available options are:
CIE76: euclidean distance in the LABColor space
CIE94: euclidean distance in the LCHColor space
CIE2000 or CIEDE2000: CIE94 distance with corrections
CMC: Colour Measurement Committee metric (1984)
DeltaL: difference in the L component of LCHColor
DeltaC: difference in the C component of LCHColor
DeltaH: difference in the H component of LCHColor
It is also possible to specify a custom distance
>> ColorDistance[Magenta, Green]
= 2.2507
>> ColorDistance[{Red, Blue}, {Green, Yellow}, DistanceFunction -> {"CMC", "Perceptibility"}]
= {1.0495, 1.27455}
#> ColorDistance[Blue, Red, DistanceFunction -> "CIE2000"]
= 0.557976
#> ColorDistance[Red, Black, DistanceFunction -> (Abs[#1[[1]] - #2[[1]]] &)]
= 0.542917
"""
options = {
'DistanceFunction': 'Automatic',
}
messages = {
'invdist': '`1` is not Automatic or a valid distance specification.',
'invarg': '`1` and `2` should be two colors or a color and a lists of colors or ' +
'two lists of colors of the same length.'
}
# the docs say LABColor's colorspace corresponds to the CIE 1976 L^* a^* b^* color space
# with {l,a,b}={L^*,a^*,b^*}/100. Corrections factors are put accordingly.
_distances = {
"CIE76": lambda c1, c2: _euclidean_distance(c1.to_color_space('LAB')[:3], c2.to_color_space('LAB')[:3]),
"CIE94": lambda c1, c2: _euclidean_distance(c1.to_color_space('LCH')[:3], c2.to_color_space('LCH')[:3]),
"CIE2000": lambda c1, c2: _cie2000_distance(100*c1.to_color_space('LAB')[:3], 100*c2.to_color_space('LAB')[:3])/100,
"CIEDE2000": lambda c1, c2: _cie2000_distance(100*c1.to_color_space('LAB')[:3], 100*c2.to_color_space('LAB')[:3])/100,
"DeltaL": lambda c1, c2: _component_distance(c1.to_color_space('LCH'), c2.to_color_space('LCH'), 0),
"DeltaC": lambda c1, c2: _component_distance(c1.to_color_space('LCH'), c2.to_color_space('LCH'), 1),
"DeltaH": lambda c1, c2: _component_distance(c1.to_color_space('LCH'), c2.to_color_space('LCH'), 2),
"CMC": lambda c1, c2: _CMC_distance(100*c1.to_color_space('LAB')[:3], 100*c2.to_color_space('LAB')[:3], 1, 1)/100
}
def apply(self, c1, c2, evaluation, options):
'ColorDistance[c1_, c2_, OptionsPattern[ColorDistance]]'
# If numpy is not installed, 100 * c1.to_color_space returns
# a list of 100 x 3 elements, instead of doing elementwise multiplication
try:
import numpy as np
except:
raise RuntimeError("NumPy needs to be installed for ColorDistance")
distance_function = options.get('System`DistanceFunction')
compute = None
if isinstance(distance_function, String):
compute = ColorDistance._distances.get(distance_function.get_string_value())
if not compute:
evaluation.message('ColorDistance', 'invdist', distance_function)
return
elif distance_function.has_form('List', 2):
if distance_function.leaves[0].get_string_value() == 'CMC':
if distance_function.leaves[1].get_string_value() == 'Acceptability':
compute = lambda c1, c2: _CMC_distance(100*c1.to_color_space('LAB')[:3],
100*c2.to_color_space('LAB')[:3], 2, 1)/100
elif distance_function.leaves[1].get_string_value() == 'Perceptibility':
compute = ColorDistance._distances.get("CMC")
elif distance_function.leaves[1].has_form('List', 2):
if (isinstance(distance_function.leaves[1].leaves[0], Integer)
and isinstance(distance_function.leaves[1].leaves[1], Integer)):
if (distance_function.leaves[1].leaves[0].get_int_value() > 0
and distance_function.leaves[1].leaves[1].get_int_value() > 0):
lightness = distance_function.leaves[1].leaves[0].get_int_value()
chroma = distance_function.leaves[1].leaves[1].get_int_value()
compute = lambda c1, c2: _CMC_distance(100*c1.to_color_space('LAB')[:3],
100*c2.to_color_space('LAB')[:3], lightness, chroma)/100
elif isinstance(distance_function, Symbol) and distance_function.get_name() == 'System`Automatic':
compute = ColorDistance._distances.get("CIE76")
else:
def compute(a, b):
return Expression('Apply',
distance_function,
Expression('List',
Expression('List', *[Real(val) for val in a.to_color_space('LAB')]),
Expression('List', *[Real(val) for val in b.to_color_space('LAB')])
)
)
if compute == None:
evaluation.message('ColorDistance', 'invdist', distance_function)
return
def distance(a, b):
try:
py_a = _Color.create(a)
py_b = _Color.create(b)
except ColorError:
evaluation.message('ColorDistance', 'invarg', a, b)
raise
result = from_python(compute(py_a, py_b))
return result
try:
if c1.get_head_name() == 'System`List':
if c2.get_head_name() == 'System`List':
if len(c1.leaves) != len(c2.leaves):
evaluation.message('ColorDistance', 'invarg', c1, c2)
return
else:
return Expression('List', *[distance(a, b) for a, b in zip(c1.leaves, c2.leaves)])
else:
return Expression('List', *[distance(c, c2) for c in c1.leaves])
elif c2.get_head_name() == 'System`List':
return Expression('List', *[distance(c1, c) for c in c2.leaves])
else:
return distance(c1, c2)
except ColorError:
return
except NotImplementedError:
evaluation.message('ColorDistance', 'invdist', distance_function)
return
class _Size(_GraphicsElement):
def init(self, graphics, item=None, value=None):
super(_Size, self).init(graphics, item)
if item is not None:
self.value = item.leaves[0].round_to_float()
elif value is not None:
self.value = value
else:
raise BoxConstructError
if self.value < 0:
raise BoxConstructError
class _Thickness(_Size):
pass
class AbsoluteThickness(_Thickness):
"""
<dl>
<dt>'AbsoluteThickness[$p$]'
<dd>sets the line thickness for subsequent graphics primitives
to $p$ points.
</dl>
>> Graphics[Table[{AbsoluteThickness[t], Line[{{20 t, 10}, {20 t, 80}}], Text[ToString[t]<>"pt", {20 t, 0}]}, {t, 0, 10}]]
= -Graphics-
"""
def get_thickness(self):
return self.graphics.translate_absolute((self.value, 0))[0]
class Thickness(_Thickness):
"""
<dl>
<dt>'Thickness[$t$]'
<dd>sets the line thickness for subsequent graphics primitives
to $t$ times the size of the plot area.
</dl>
>> Graphics[{Thickness[0.2], Line[{{0, 0}, {0, 5}}]}, Axes->True, PlotRange->{{-5, 5}, {-5, 5}}]
= -Graphics-
"""
def get_thickness(self):
return self.graphics.translate_relative(self.value)
class Thin(Builtin):
"""
<dl>
<dt>'Thin'
<dd>sets the line width for subsequent graphics primitives to 0.5pt.
</dl>
"""
rules = {
'Thin': 'AbsoluteThickness[0.5]',
}
class Thick(Builtin):
"""
<dl>
<dt>'Thick'
<dd>sets the line width for subsequent graphics primitives to 2pt.
</dl>
"""
rules = {
'Thick': 'AbsoluteThickness[2]',
}
class PointSize(_Size):
"""
<dl>
<dt>'PointSize[$t$]'
<dd>sets the diameter of points to $t$, which is relative to the overall width.
</dl>
"""
def get_size(self):
return self.graphics.view_width * self.value
class FontColor(Builtin):
"""
<dl>
<dt>'FontColor'
<dd>is an option for Style to set the font color.
</dl>
"""
pass
class Offset(Builtin):
pass
class Rectangle(Builtin):
"""
<dl>
<dt>'Rectangle[{$xmin$, $ymin$}]'
<dd>represents a unit square with bottom-left corner at {$xmin$, $ymin$}.
<dt>'Rectangle[{$xmin$, $ymin$}, {$xmax$, $ymax$}]
<dd>is a rectange extending from {$xmin$, $ymin$} to {$xmax$, $ymax$}.
</dl>
>> Graphics[Rectangle[]]
= -Graphics-
>> Graphics[{Blue, Rectangle[{0.5, 0}], Orange, Rectangle[{0, 0.5}]}]
= -Graphics-
"""
rules = {
'Rectangle[]': 'Rectangle[{0, 0}]',
}
class Disk(Builtin):
"""
<dl>
<dt>'Disk[{$cx$, $cy$}, $r$]'
<dd>fills a circle with center '($cx$, $cy$)' and radius $r$.
<dt>'Disk[{$cx$, $cy$}, {$rx$, $ry$}]'
<dd>fills an ellipse.
<dt>'Disk[{$cx$, $cy$}]'
<dd>chooses radius 1.
<dt>'Disk[]'
<dd>chooses center '(0, 0)' and radius 1.
<dt>'Disk[{$x$, $y$}, ..., {$t1$, $t2$}]'
<dd>is a sector from angle $t1$ to $t2$.
</dl>
>> Graphics[{Blue, Disk[{0, 0}, {2, 1}]}]
= -Graphics-
The outer border can be drawn using 'EdgeForm':
>> Graphics[{EdgeForm[Black], Red, Disk[]}]
= -Graphics-
Disk can also draw sectors of circles and ellipses
>> Graphics[Disk[{0, 0}, 1, {Pi / 3, 2 Pi / 3}]]
= -Graphics-
>> Graphics[{Blue, Disk[{0, 0}, {1, 2}, {Pi / 3, 5 Pi / 3}]}]
= -Graphics-
"""
rules = {
'Disk[]': 'Disk[{0, 0}]',
}
class Circle(Builtin):
"""
<dl>
<dt>'Circle[{$cx$, $cy$}, $r$]'
<dd>draws a circle with center '($cx$, $cy$)' and radius $r$.
<dt>'Circle[{$cx$, $cy$}, {$rx$, $ry$}]'
<dd>draws an ellipse.
<dt>'Circle[{$cx$, $cy$}]'
<dd>chooses radius 1.
<dt>'Circle[]'
<dd>chooses center '(0, 0)' and radius 1.
</dl>
>> Graphics[{Red, Circle[{0, 0}, {2, 1}]}]
= -Graphics-
>> Graphics[{Circle[], Disk[{0, 0}, {1, 1}, {0, 2.1}]}]
= -Graphics-
"""
rules = {
'Circle[]': 'Circle[{0, 0}]',
}
class Inset(Builtin):
pass
class Text(Inset):
"""
<dl>
<dt>'Text["$text$", {$x$, $y$}]'
<dd>draws $text$ centered on position '{$x$, $y$}'.
</dl>
>> Graphics[{Text["First", {0, 0}], Text["Second", {1, 1}]}, Axes->True, PlotRange->{{-2, 2}, {-2, 2}}]
= -Graphics-
#> Graphics[{Text[x, {0,0}]}]
= -Graphics-
"""
class RectangleBox(_GraphicsElement):
def init(self, graphics, style, item):
super(RectangleBox, self).init(graphics, item, style)
if len(item.leaves) not in (1, 2):
raise BoxConstructError
self.edge_color, self.face_color = style.get_style(
_Color, face_element=True)
self.p1 = Coords(graphics, item.leaves[0])
if len(item.leaves) == 1:
self.p2 = self.p1.add(1, 1)
elif len(item.leaves) == 2:
self.p2 = Coords(graphics, item.leaves[1])
def extent(self):
l = self.style.get_line_width(face_element=True) / 2
result = []
for p in [self.p1, self.p2]:
x, y = p.pos()
result.extend([(x - l, y - l), (
x - l, y + l), (x + l, y - l), (x + l, y + l)])
return result
def to_svg(self):
l = self.style.get_line_width(face_element=True)
x1, y1 = self.p1.pos()
x2, y2 = self.p2.pos()
xmin = min(x1, x2)
ymin = min(y1, y2)
w = max(x1, x2) - xmin
h = max(y1, y2) - ymin
style = create_css(self.edge_color, self.face_color, l)
return '<rect x="%f" y="%f" width="%f" height="%f" style="%s" />' % (
xmin, ymin, w, h, style)
def to_asy(self):
l = self.style.get_line_width(face_element=True)
x1, y1 = self.p1.pos()
x2, y2 = self.p2.pos()
pens = create_pens(
self.edge_color, self.face_color, l, is_face_element=True)
x1, x2, y1, y2 = asy_number(x1), asy_number(
x2), asy_number(y1), asy_number(y2)
return 'filldraw((%s,%s)--(%s,%s)--(%s,%s)--(%s,%s)--cycle, %s);' % (
x1, y1, x2, y1, x2, y2, x1, y2, pens)
class _RoundBox(_GraphicsElement):
face_element = None
def init(self, graphics, style, item):
super(_RoundBox, self).init(graphics, item, style)
if len(item.leaves) not in (1, 2):
raise BoxConstructError
self.edge_color, self.face_color = style.get_style(
_Color, face_element=self.face_element)
self.c = Coords(graphics, item.leaves[0])
if len(item.leaves) == 1:
rx = ry = 1
elif len(item.leaves) == 2:
r = item.leaves[1]
if r.has_form('List', 2):
rx = r.leaves[0].round_to_float()
ry = r.leaves[1].round_to_float()
else:
rx = ry = r.round_to_float()
self.r = self.c.add(rx, ry)
def extent(self):
l = self.style.get_line_width(face_element=self.face_element) / 2
x, y = self.c.pos()
rx, ry = self.r.pos()
rx -= x
ry = y - ry
rx += l
ry += l
return [(x - rx, y - ry), (x - rx, y + ry),
(x + rx, y - ry), (x + rx, y + ry)]
def to_svg(self):
x, y = self.c.pos()
rx, ry = self.r.pos()
rx -= x
ry = y - ry
l = self.style.get_line_width(face_element=self.face_element)
style = create_css(self.edge_color, self.face_color, stroke_width=l)
return '<ellipse cx="%f" cy="%f" rx="%f" ry="%f" style="%s" />' % (
x, y, rx, ry, style)
def to_asy(self):
x, y = self.c.pos()
rx, ry = self.r.pos()
rx -= x
ry -= y
l = self.style.get_line_width(face_element=self.face_element)
pen = create_pens(edge_color=self.edge_color,
face_color=self.face_color, stroke_width=l,
is_face_element=self.face_element)
cmd = 'filldraw' if self.face_element else 'draw'
return '%s(ellipse((%s,%s),%s,%s), %s);' % (
cmd, asy_number(x), asy_number(y), asy_number(rx), asy_number(ry),
pen)
class _ArcBox(_RoundBox):
def init(self, graphics, style, item):
if len(item.leaves) == 3:
arc_expr = item.leaves[2]
if arc_expr.get_head_name() != 'System`List':
raise BoxConstructError
arc = arc_expr.leaves
pi2 = 2 * pi
start_angle = arc[0].round_to_float()
end_angle = arc[1].round_to_float()
if start_angle is None or end_angle is None:
raise BoxConstructError
elif end_angle >= start_angle + pi2: # full circle?
self.arc = None
else:
if end_angle <= start_angle:
self.arc = (end_angle, start_angle)
else:
self.arc = (start_angle, end_angle)
item = Expression(item.get_head_name(), *item.leaves[:2])
else:
self.arc = None
super(_ArcBox, self).init(graphics, style, item)
def _arc_params(self):
x, y = self.c.pos()
rx, ry = self.r.pos()
rx -= x
ry -= y
start_angle, end_angle = self.arc
if end_angle - start_angle <= pi:
large_arc = 0
else:
large_arc = 1
sx = x + rx * cos(start_angle)
sy = y + ry * sin(start_angle)
ex = x + rx * cos(end_angle)
ey = y + ry * sin(end_angle)
return x, y, abs(rx), abs(ry), sx, sy, ex, ey, large_arc
def to_svg(self):
if self.arc is None:
return super(_ArcBox, self).to_svg()
x, y, rx, ry, sx, sy, ex, ey, large_arc = self._arc_params()
def path(closed):
if closed:
yield 'M %f,%f' % (x, y)
yield 'L %f,%f' % (sx, sy)
else:
yield 'M %f,%f' % (sx, sy)
yield 'A %f,%f,0,%d,0,%f,%f' % (rx, ry, large_arc, ex, ey)
if closed:
yield 'Z'
l = self.style.get_line_width(face_element=self.face_element)
style = create_css(self.edge_color, self.face_color, stroke_width=l)
return '<path d="%s" style="%s" />' % (' '.join(path(self.face_element)), style)
def to_asy(self):
if self.arc is None:
return super(_ArcBox, self).to_asy()
x, y, rx, ry, sx, sy, ex, ey, large_arc = self._arc_params()
def path(closed):
if closed:
yield '(%s,%s)--(%s,%s)--' % tuple(
asy_number(t) for t in (x, y, sx, sy))
yield 'arc((%s,%s), (%s, %s), (%s, %s))' % tuple(
asy_number(t) for t in (x, y, sx, sy, ex, ey))
if closed:
yield '--cycle'
l = self.style.get_line_width(face_element=self.face_element)
pen = create_pens(edge_color=self.edge_color,
face_color=self.face_color, stroke_width=l,
is_face_element=self.face_element)
command = 'filldraw' if self.face_element else 'draw'
return '%s(%s, %s);' % (command, ''.join(path(self.face_element)), pen)
class DiskBox(_ArcBox):
face_element = True
class CircleBox(_ArcBox):
face_element = False
class _Polyline(_GraphicsElement):
def do_init(self, graphics, points):
if not points.has_form('List', None):
raise BoxConstructError
if (points.leaves and points.leaves[0].has_form('List', None) and
all(leaf.has_form('List', None)
for leaf in points.leaves[0].leaves)):
leaves = points.leaves
self.multi_parts = True
else:
leaves = [Expression('List', *points.leaves)]
self.multi_parts = False
lines = []
for leaf in leaves:
if leaf.has_form('List', None):
lines.append(leaf.leaves)
else:
raise BoxConstructError
self.lines = [[graphics.coords(
graphics, point) for point in line] for line in lines]
def extent(self):
l = self.style.get_line_width(face_element=False)
result = []
for line in self.lines:
for c in line:
x, y = c.pos()
result.extend([(x - l, y - l), (
x - l, y + l), (x + l, y - l), (x + l, y + l)])
return result
class Point(Builtin):
"""
<dl>
<dt>'Point[{$point_1$, $point_2$ ...}]'
<dd>represents the point primitive.
<dt>'Point[{{$p_11$, $p_12$, ...}, {$p_21$, $p_22$, ...}, ...}]'
<dd>represents a number of point primitives.
</dl>
>> Graphics[Point[{0,0}]]
= -Graphics-
>> Graphics[Point[Table[{Sin[t], Cos[t]}, {t, 0, 2. Pi, Pi / 15.}]]]
= -Graphics-
>> Graphics3D[Point[Table[{Sin[t], Cos[t], 0}, {t, 0, 2. Pi, Pi / 15.}]]]
= -Graphics3D-
"""
pass
class PointBox(_Polyline):
def init(self, graphics, style, item=None):
super(PointBox, self).init(graphics, item, style)
self.edge_color, self.face_color = style.get_style(
_Color, face_element=True)
if item is not None:
if len(item.leaves) != 1:
raise BoxConstructError
points = item.leaves[0]
if points.has_form('List', None) and len(points.leaves) != 0:
if all(not leaf.has_form('List', None)
for leaf in points.leaves):
points = Expression('List', points)
self.do_init(graphics, points)
else:
raise BoxConstructError
def to_svg(self):
point_size, _ = self.style.get_style(PointSize, face_element=False)
if point_size is None:
point_size = PointSize(self.graphics, value=0.005)
size = point_size.get_size()
style = create_css(edge_color=self.edge_color,
stroke_width=0, face_color=self.face_color)
svg = ''
for line in self.lines:
for coords in line:
svg += '<circle cx="%f" cy="%f" r="%f" style="%s" />' % (
coords.pos()[0], coords.pos()[1], size, style)
return svg
def to_asy(self):
pen = create_pens(face_color=self.face_color, is_face_element=False)
asy = ''
for line in self.lines:
for coords in line:
asy += 'dot(%s, %s);' % (coords.pos(), pen)
return asy
class Line(Builtin):
"""
<dl>
<dt>'Line[{$point_1$, $point_2$ ...}]'
<dd>represents the line primitive.
<dt>'Line[{{$p_11$, $p_12$, ...}, {$p_21$, $p_22$, ...}, ...}]'
<dd>represents a number of line primitives.
</dl>
>> Graphics[Line[{{0,1},{0,0},{1,0},{1,1}}]]
= -Graphics-
>> Graphics3D[Line[{{0,0,0},{0,1,1},{1,0,0}}]]
= -Graphics3D-
"""
pass
class LineBox(_Polyline):
def init(self, graphics, style, item=None, lines=None):
super(LineBox, self).init(graphics, item, style)
self.edge_color, _ = style.get_style(_Color, face_element=False)
if item is not None:
if len(item.leaves) != 1:
raise BoxConstructError
points = item.leaves[0]
self.do_init(graphics, points)
elif lines is not None:
self.lines = lines
else:
raise BoxConstructError
def to_svg(self):
l = self.style.get_line_width(face_element=False)
style = create_css(edge_color=self.edge_color, stroke_width=l)
svg = ''
for line in self.lines:
svg += '<polyline points="%s" style="%s" />' % (
' '.join(['%f,%f' % coords.pos() for coords in line]), style)
return svg
def to_asy(self):
l = self.style.get_line_width(face_element=False)
pen = create_pens(edge_color=self.edge_color, stroke_width=l)
asy = ''
for line in self.lines:
path = '--'.join(['(%.5g,%5g)' % coords.pos() for coords in line])
asy += 'draw(%s, %s);' % (path, pen)
return asy
def _svg_bezier(*segments):
# see https://www.w3.org/TR/SVG/paths.html#PathDataCubicBezierCommands
# see https://docs.webplatform.org/wiki/svg/tutorials/smarter_svg_shapes
while segments and not segments[0][1]:
segments = segments[1:]
if not segments:
return
forms = 'LQC' # SVG commands for line, quadratic bezier, cubic bezier
def path(max_degree, p):
max_degree = min(max_degree, len(forms))
while p:
n = min(max_degree, len(p)) # 1, 2, or 3
if n < 1:
raise BoxConstructError
yield forms[n - 1] + ' '.join('%f,%f' % xy for xy in p[:n])
p = p[n:]
k, p = segments[0]
yield 'M%f,%f' % p[0]
for s in path(k, p[1:]):
yield s
for k, p in segments[1:]:
for s in path(k, p):
yield s
def _asy_bezier(*segments):
# see http://asymptote.sourceforge.net/doc/Bezier-curves.html#Bezier-curves
while segments and not segments[0][1]:
segments = segments[1:]
if not segments:
return
def cubic(p0, p1, p2, p3):
return '..controls(%.5g,%.5g) and (%.5g,%.5g)..(%.5g,%.5g)' % tuple(list(chain(p1, p2, p3)))
def quadratric(qp0, qp1, qp2):
# asymptote only supports cubic beziers, so we convert this quadratic
# bezier to a cubic bezier, see http://fontforge.github.io/bezier.html
# CP0 = QP0
# CP3 = QP2
# CP1 = QP0 + 2 / 3 * (QP1 - QP0)
# CP2 = QP2 + 2 / 3 * (QP1 - QP2)
qp0x, qp0y = qp0
qp1x, qp1y = qp1
qp2x, qp2y = qp2
t = 2. / 3.
cp0 = qp0
cp1 = (qp0x + t * (qp1x - qp0x), qp0y + t * (qp1y - qp0y))
cp2 = (qp2x + t * (qp1x - qp2x), qp2y + t * (qp1y - qp2y))
cp3 = qp2
return cubic(cp0, cp1, cp2, cp3)
def linear(p0, p1):
return '--(%.5g,%.5g)' % p1
forms = (linear, quadratric, cubic)
def path(max_degree, p):
max_degree = min(max_degree, len(forms))
while p:
n = min(max_degree, len(p) - 1) # 1, 2, or 3
if n < 1:
break
yield forms[n - 1](*p[:n + 1])
p = p[n:]
k, p = segments[0]
yield '(%.5g,%.5g)' % p[0]
connect = []
for k, p in segments:
for s in path(k, list(chain(connect, p))):
yield s
connect = p[-1:]
class BernsteinBasis(Builtin):
rules = {
'BernsteinBasis[d_, n_, x_]': 'Piecewise[{{Binomial[d, n] * x ^ n * (1 - x) ^ (d - n), 0 < x < 1}}, 0]',
}
class BezierFunction(Builtin):
rules = {
'BezierFunction[p_]': 'Function[x, Total[p * BernsteinBasis[Length[p] - 1, Range[0, Length[p] - 1], x]]]',
}
class BezierCurve(Builtin):
"""
<dl>
<dt>'BezierCurve[{$p1$, $p2$ ...}]'
<dd>represents a bezier curve with $p1$, $p2$ as control points.
</dl>
>> Graphics[BezierCurve[{{0, 0},{1, 1},{2, -1},{3, 0}}]]
= -Graphics-
>> Module[{p={{0, 0},{1, 1},{2, -1},{4, 0}}}, Graphics[{BezierCurve[p], Red, Point[Table[BezierFunction[p][x], {x, 0, 1, 0.1}]]}]]
= -Graphics-
"""
options = {
'SplineDegree': '3',
}
class BezierCurveBox(_Polyline):
def init(self, graphics, style, item, options):
super(BezierCurveBox, self).init(graphics, item, style)
if len(item.leaves) != 1 or item.leaves[0].get_head_name() != 'System`List':
raise BoxConstructError
self.edge_color, _ = style.get_style(_Color, face_element=False)
points = item.leaves[0]
self.do_init(graphics, points)
spline_degree = options.get('System`SplineDegree')
if not isinstance(spline_degree, Integer):
raise BoxConstructError
self.spline_degree = spline_degree.get_int_value()
def to_svg(self):
l = self.style.get_line_width(face_element=False)
style = create_css(edge_color=self.edge_color, stroke_width=l)
svg = ''
for line in self.lines:
s = ' '.join(_svg_bezier((self.spline_degree, [xy.pos() for xy in line])))
svg += '<path d="%s" style="%s"/>' % (s, style)
return svg
def to_asy(self):
l = self.style.get_line_width(face_element=False)
pen = create_pens(edge_color=self.edge_color, stroke_width=l)
asy = ''
for line in self.lines:
for path in _asy_bezier((self.spline_degree, [xy.pos() for xy in line])):
asy += 'draw(%s, %s);' % (path, pen)
return asy
class FilledCurve(Builtin):
"""
<dl>
<dt>'FilledCurve[{$segment1$, $segment2$ ...}]'
<dd>represents a filled curve.
</dl>
>> Graphics[FilledCurve[{Line[{{0, 0}, {1, 1}, {2, 0}}]}]]
= -Graphics-
>> Graphics[FilledCurve[{BezierCurve[{{0, 0}, {1, 1}, {2, 0}}], Line[{{3, 0}, {0, 2}}]}]]
= -Graphics-
"""
pass
class FilledCurveBox(_GraphicsElement):
def init(self, graphics, style, item=None):
super(FilledCurveBox, self).init(graphics, item, style)
self.edge_color, self.face_color = style.get_style(_Color, face_element=True)
if item is not None and item.leaves and item.leaves[0].has_form('List', None):
if len(item.leaves) != 1:
raise BoxConstructError
leaves = item.leaves[0].leaves
def parse_component(segments):
for segment in segments:
head = segment.get_head_name()
if head == 'System`Line':
k = 1
parts = segment.leaves
elif head == 'System`BezierCurve':
parts, options = _data_and_options(segment.leaves, {})
spline_degree = options.get('SplineDegree', Integer(3))
if not isinstance(spline_degree, Integer):
raise BoxConstructError
k = spline_degree.get_int_value()
elif head == 'System`BSplineCurve':
raise NotImplementedError # FIXME convert bspline to bezier here
parts = segment.leaves
else:
raise BoxConstructError
coords = []
for part in parts:
if part.get_head_name() != 'System`List':
raise BoxConstructError
coords.extend([graphics.coords(graphics, xy) for xy in part.leaves])
yield k, coords
if all(x.get_head_name() == 'System`List' for x in leaves):
self.components = [list(parse_component(x)) for x in leaves]
else:
self.components = [list(parse_component(leaves))]
else:
raise BoxConstructError
def to_svg(self):
l = self.style.get_line_width(face_element=False)
style = create_css(edge_color=self.edge_color, face_color=self.face_color, stroke_width=l)
def components():
for component in self.components:
transformed = [(k, [xy.pos() for xy in p]) for k, p in component]
yield ' '.join(_svg_bezier(*transformed)) + ' Z'
return '<path d="%s" style="%s" fill-rule="evenodd"/>' % (' '.join(components()), style)
def to_asy(self):
l = self.style.get_line_width(face_element=False)
pen = create_pens(edge_color=self.edge_color, stroke_width=l)
if not pen:
pen = 'currentpen'
def components():
for component in self.components:
transformed = [(k, [xy.pos() for xy in p]) for k, p in component]
yield 'fill(%s--cycle, %s);' % (''.join(_asy_bezier(*transformed)), pen)
return ''.join(components())
def extent(self):
l = self.style.get_line_width(face_element=False)
result = []
for component in self.components:
for _, points in component:
for p in points:
x, y = p.pos()
result.extend([(x - l, y - l), (x - l, y + l), (x + l, y - l), (x + l, y + l)])
return result
class Polygon(Builtin):
"""
<dl>
<dt>'Polygon[{$point_1$, $point_2$ ...}]'
<dd>represents the filled polygon primitive.
<dt>'Polygon[{{$p_11$, $p_12$, ...}, {$p_21$, $p_22$, ...}, ...}]'
<dd>represents a number of filled polygon primitives.
</dl>
>> Graphics[Polygon[{{1,0},{0,0},{0,1}}]]
= -Graphics-
>> Graphics3D[Polygon[{{0,0,0},{0,1,1},{1,0,0}}]]
= -Graphics3D-
"""
pass
class PolygonBox(_Polyline):
def init(self, graphics, style, item=None):
super(PolygonBox, self).init(graphics, item, style)
self.edge_color, self.face_color = style.get_style(
_Color, face_element=True)
if item is not None:
if len(item.leaves) not in (1, 2):
raise BoxConstructError
points = item.leaves[0]
self.do_init(graphics, points)
self.vertex_colors = None
for leaf in item.leaves[1:]:
if not leaf.has_form('Rule', 2):
raise BoxConstructError
name = leaf.leaves[0].get_name()
self.process_option(name, leaf.leaves[1])
else:
raise BoxConstructError
def process_option(self, name, value):
if name == 'System`VertexColors':
if not value.has_form('List', None):
raise BoxConstructError
black = RGBColor(components=[0, 0, 0, 1])
self.vertex_colors = [[black] * len(line) for line in self.lines]
colors = value.leaves
if not self.multi_parts:
colors = [Expression('List', *colors)]
for line_index, line in enumerate(self.lines):
if line_index >= len(colors):
break
line_colors = colors[line_index]
if not line_colors.has_form('List', None):
continue
for index, color in enumerate(line_colors.leaves):
if index >= len(self.vertex_colors[line_index]):
break
try:
self.vertex_colors[line_index][
index] = _Color.create(color)
except ColorError:
continue
else:
raise BoxConstructError
def to_svg(self):
l = self.style.get_line_width(face_element=True)
if self.vertex_colors is None:
face_color = self.face_color
else:
face_color = None
style = create_css(
edge_color=self.edge_color, face_color=face_color, stroke_width=l)
svg = ''
if self.vertex_colors is not None:
mesh = []
for index, line in enumerate(self.lines):
data = [[coords.pos(), color.to_js()] for coords, color in zip(
line, self.vertex_colors[index])]
mesh.append(data)
svg += '<meshgradient data="%s" />' % json.dumps(mesh)
for line in self.lines:
svg += '<polygon points="%s" style="%s" />' % (
' '.join('%f,%f' % coords.pos() for coords in line), style)
return svg
def to_asy(self):
l = self.style.get_line_width(face_element=True)
if self.vertex_colors is None:
face_color = self.face_color
else:
face_color = None
pens = create_pens(edge_color=self.edge_color, face_color=face_color,
stroke_width=l, is_face_element=True)
asy = ''
if self.vertex_colors is not None:
paths = []
colors = []
edges = []
for index, line in enumerate(self.lines):
paths.append('--'.join([
'(%.5g,%.5g)' % coords.pos() for coords in line]) + '--cycle')
# ignore opacity
colors.append(','.join([
color.to_asy()[0] for color in self.vertex_colors[index]]))
edges.append(','.join(['0'] + ['1'] * (
len(self.vertex_colors[index]) - 1)))
asy += 'gouraudshade(%s, new pen[] {%s}, new int[] {%s});' % (
'^^'.join(paths), ','.join(colors), ','.join(edges))
if pens and pens != 'nullpen':
for line in self.lines:
path = '--'.join(
['(%.5g,%.5g)' % coords.pos() for coords in line]) + '--cycle'
asy += 'filldraw(%s, %s);' % (path, pens)
return asy
class RegularPolygon(Builtin):
"""
<dl>
<dt>'RegularPolygon[$n$]'
<dd>gives the regular polygon with $n$ edges.
<dt>'RegularPolygon[$r$, $n$]'
<dd>gives the regular polygon with $n$ edges and radius $r$.
<dt>'RegularPolygon[{$r$, $phi$}, $n$]'
<dd>gives the regular polygon with radius $r$ with one vertex drawn at angle $phi$.
<dt>'RegularPolygon[{$x, $y}, $r$, $n$]'
<dd>gives the regular polygon centered at the position {$x, $y}.
</dl>
>> Graphics[RegularPolygon[5]]
= -Graphics-
>> Graphics[{Yellow, Rectangle[], Orange, RegularPolygon[{1, 1}, {0.25, 0}, 3]}]
= -Graphics-
"""
class RegularPolygonBox(PolygonBox):
def init(self, graphics, style, item):
if len(item.leaves) in (1, 2, 3) and isinstance(item.leaves[-1], Integer):
r = 1.
phi0 = None
if len(item.leaves) >= 2:
rspec = item.leaves[-2]
if rspec.get_head_name() == 'System`List':
if len(rspec.leaves) != 2:
raise BoxConstructError
r = rspec.leaves[0].round_to_float()
phi0 = rspec.leaves[1].round_to_float()
else:
r = rspec.round_to_float()
x = 0.
y = 0.
if len(item.leaves) == 3:
pos = item.leaves[0]
if not pos.has_form('List', 2):
raise BoxConstructError
x = pos.leaves[0].round_to_float()
y = pos.leaves[1].round_to_float()
n = item.leaves[-1].get_int_value()
if any(t is None for t in (x, y, r)) or n < 0:
raise BoxConstructError
if phi0 is None:
phi0 = -pi / 2.
if n % 1 == 0 and n > 0:
phi0 += pi / n
pi2 = pi * 2.
def vertices():
for i in range(n):
phi = phi0 + pi2 * i / float(n)
yield Expression('List', Real(x + r * cos(phi)), Real(y + r * sin(phi)))
new_item = Expression('RegularPolygonBox', Expression('List', *list(vertices())))
else:
raise BoxConstructError
super(RegularPolygonBox, self).init(graphics, style, new_item)
class Arrow(Builtin):
"""
<dl>
<dt>'Arrow[{$p1$, $p2$}]'
<dd>represents a line from $p1$ to $p2$ that ends with an arrow at $p2$.
<dt>'Arrow[{$p1$, $p2$}, $s$]'
<dd>represents a line with arrow that keeps a distance of $s$ from $p1$
and $p2$.
<dt>'Arrow[{$point_1$, $point_2$}, {$s1$, $s2$}]'
<dd>represents a line with arrow that keeps a distance of $s1$ from $p1$
and a distance of $s2$ from $p2$.
</dl>
>> Graphics[Arrow[{{0,0}, {1,1}}]]
= -Graphics-
>> Graphics[{Circle[], Arrow[{{2, 1}, {0, 0}}, 1]}]
= -Graphics-
Keeping distances may happen across multiple segments:
>> Table[Graphics[{Circle[], Arrow[Table[{Cos[phi],Sin[phi]},{phi,0,2*Pi,Pi/2}],{d, d}]}],{d,0,2,0.5}]
= {-Graphics-, -Graphics-, -Graphics-, -Graphics-, -Graphics-}
"""
pass
class Arrowheads(_GraphicsElement):
"""
<dl>
<dt>'Arrowheads[$s$]'
<dd>specifies that Arrow[] draws one arrow of size $s$ (relative to width of image, defaults to 0.04).
<dt>'Arrowheads[{$spec1$, $spec2$, ..., $specn$}]'
<dd>specifies that Arrow[] draws n arrows as defined by $spec1$, $spec2$, ... $specn$.
<dt>'Arrowheads[{{$s$}}]'
<dd>specifies that one arrow of size $s$ should be drawn.
<dt>'Arrowheads[{{$s$, $pos$}}]'
<dd>specifies that one arrow of size $s$ should be drawn at position $pos$ (for the arrow to
be on the line, $pos$ has to be between 0, i.e. the start for the line, and 1, i.e. the end
of the line).
<dt>'Arrowheads[{{$s$, $pos$, $g$}}]'
<dd>specifies that one arrow of size $s$ should be drawn at position $pos$ using Graphics $g$.
</dl>
Arrows on both ends can be achieved using negative sizes:
>> Graphics[{Circle[],Arrowheads[{-0.04, 0.04}], Arrow[{{0, 0}, {2, 2}}, {1,1}]}]
= -Graphics-
You may also specify our own arrow shapes:
>> Graphics[{Circle[], Arrowheads[{{0.04, 1, Graphics[{Red, Disk[]}]}}], Arrow[{{0, 0}, {Cos[Pi/3],Sin[Pi/3]}}]}]
= -Graphics-
>> Graphics[{Arrowheads[Table[{0.04, i/10, Graphics[Disk[]]},{i,1,10}]], Arrow[{{0, 0}, {6, 5}, {1, -3}, {-2, 2}}]}]
= -Graphics-
"""
default_size = 0.04
symbolic_sizes = {
'System`Tiny': 3,
'System`Small': 5,
'System`Medium': 9,
'System`Large': 18,
}
def init(self, graphics, item=None):
super(Arrowheads, self).init(graphics, item)
if len(item.leaves) != 1:
raise BoxConstructError
self.spec = item.leaves[0]
def _arrow_size(self, s, extent):
if isinstance(s, Symbol):
size = self.symbolic_sizes.get(s.get_name(), 0)
return self.graphics.translate_absolute((size, 0))[0]
else:
return _to_float(s) * extent
def heads(self, extent, default_arrow, custom_arrow):
# see https://reference.wolfram.com/language/ref/Arrowheads.html
if self.spec.get_head_name() == 'System`List':
leaves = self.spec.leaves
if all(x.get_head_name() == 'System`List' for x in leaves):
for head in leaves:
spec = head.leaves
if len(spec) not in (2, 3):
raise BoxConstructError
size_spec = spec[0]
if isinstance(size_spec, Symbol) and size_spec.get_name() == 'System`Automatic':
s = self.default_size * extent
elif size_spec.is_numeric():
s = self._arrow_size(size_spec, extent)
else:
raise BoxConstructError
if len(spec) == 3 and custom_arrow:
graphics = spec[2]
if graphics.get_head_name() != 'System`Graphics':
raise BoxConstructError
arrow = custom_arrow(graphics)
else:
arrow = default_arrow
if not isinstance(spec[1], (Real, Rational, Integer)):
raise BoxConstructError
yield s, _to_float(spec[1]), arrow
else:
n = max(1., len(leaves) - 1.)
for i, head in enumerate(leaves):
yield self._arrow_size(head, extent), i / n, default_arrow
else:
yield self._arrow_size(self.spec, extent), 1, default_arrow
class ArrowBox(_Polyline):
def init(self, graphics, style, item=None):
super(ArrowBox, self).init(graphics, item, style)
if not item:
raise BoxConstructError
leaves = item.leaves
if len(leaves) == 2:
setback = self._setback_spec(leaves[1])
elif len(leaves) == 1:
setback = (0, 0)
else:
raise BoxConstructError
self.setback = setback
self.do_init(graphics, leaves[0])
self.graphics = graphics
self.edge_color, _ = style.get_style(_Color, face_element=False)
self.heads, _ = style.get_style(Arrowheads, face_element=False)
@staticmethod
def _setback_spec(expr):
if expr.get_head_name() == 'System`List':
leaves = expr.leaves
if len(leaves) != 2:
raise BoxConstructError
return tuple(max(_to_float(l), 0.) for l in leaves)
else:
s = max(_to_float(expr), 0.)
return s, s
@staticmethod
def _default_arrow(polygon):
# the default arrow drawn by draw() below looks looks like this:
#
# H
# .:.
# . : .
# . : .
# . .B. .
# . . : . .
# S. E .S
# :
# :
# :
#
# the head H is where the arrow's point is. at base B, the arrow spreads out at right angles from the line
# it attaches to. the arrow size 's' given in the Arrowheads specification always specifies the length H-B.
#
# the spread out points S are defined via two constants: arrow_edge (which defines the factor to get from
# H-B to H-E) and arrow_spread (which defines the factor to get from H-B to E-S).
arrow_spread = 0.3
arrow_edge = 1.1
def draw(px, py, vx, vy, t1, s):
hx = px + t1 * vx # compute H
hy = py + t1 * vy
t0 = t1 - s
bx = px + t0 * vx # compute B
by = py + t0 * vy
te = t1 - arrow_edge * s
ex = px + te * vx # compute E
ey = py + te * vy
ts = arrow_spread * s
sx = -vy * ts
sy = vx * ts
head_points = ((hx, hy),
(ex + sx, ey + sy),
(bx, by),
(ex - sx, ey - sy))
for shape in polygon(head_points):
yield shape
return draw
def _draw(self, polyline, default_arrow, custom_arrow, extent):
if self.heads:
heads = list(self.heads.heads(extent, default_arrow, custom_arrow))
heads = sorted(heads, key=lambda spec: spec[1]) # sort by pos
else:
heads = ((extent * Arrowheads.default_size, 1, default_arrow),)
def norm(p, q):
px, py = p
qx, qy = q
dx = qx - px
dy = qy - py
length = sqrt(dx * dx + dy * dy)
return dx, dy, length
def segments(points):
for i in range(len(points) - 1):
px, py = points[i]
dx, dy, dl = norm((px, py), points[i + 1])
yield dl, px, py, dx, dy
def setback(p, q, d):
dx, dy, length = norm(p, q)
if d >= length:
return None, length
else:
s = d / length
return (s * dx, s * dy), d
def shrink_one_end(line, s):
while s > 0.:
if len(line) < 2:
return []
xy, length = setback(line[0].p, line[1].p, s)
if xy is not None:
line[0] = line[0].add(*xy)
else:
line = line[1:]
s -= length
return line
def shrink(line, s1, s2):
return list(reversed(shrink_one_end(
list(reversed(shrink_one_end(line[:], s1))), s2)))
def render(points, heads): # heads has to be sorted by pos
seg = list(segments(points))
if not seg:
return
i = 0
t0 = 0.
n = len(seg)
dl, px, py, dx, dy = seg[i]
total = sum(segment[0] for segment in seg)
for s, t, draw in ((s, pos * total - t0, draw) for s, pos, draw in heads):
if s == 0.: # ignore zero-sized arrows
continue
if i < n: # not yet past last segment?
while t > dl: # position past current segment?
t -= dl
t0 += dl
i += 1
if i == n:
px += dx # move to last segment's end
py += dy
break
else:
dl, px, py, dx, dy = seg[i]
for shape in draw(px, py, dx / dl, dy / dl, t, s):
yield shape
for line in self.lines:
if len(line) < 2:
continue
# note that shrinking needs to happen in the Graphics[] coordinate space, whereas the
# subsequent position calculation needs to happen in pixel space.
transformed_points = [xy.pos() for xy in shrink(line, *self.setback)]
for s in polyline(transformed_points):
yield s
for s in render(transformed_points, heads):
yield s
def _custom_arrow(self, format, format_transform):
def make(graphics):
xmin, xmax, ymin, ymax, ox, oy, ex, ey, code = _extract_graphics(
graphics, format, self.graphics.evaluation)
boxw = xmax - xmin
boxh = ymax - ymin
def draw(px, py, vx, vy, t1, s):
t0 = t1
cx = px + t0 * vx
cy = py + t0 * vy
transform = format_transform()
transform.translate(cx, cy)
transform.scale(-s / boxw * ex, -s / boxh * ey)
transform.rotate(90 + degrees(atan2(vy, vx)))
transform.translate(-ox, -oy)
yield transform.apply(code)
return draw
return make
def to_svg(self):
width = self.style.get_line_width(face_element=False)
style = create_css(edge_color=self.edge_color, stroke_width=width)
arrow_style = create_css(face_color=self.edge_color, stroke_width=width)
def polyline(points):
yield '<polyline points="'
yield ' '.join('%f,%f' % xy for xy in points)
yield '" style="%s" />' % style
def polygon(points):
yield '<polygon points="'
yield ' '.join('%f,%f' % xy for xy in points)
yield '" style="%s" />' % arrow_style
extent = self.graphics.view_width or 0
default_arrow = self._default_arrow(polygon)
custom_arrow = self._custom_arrow('svg', _SVGTransform)
return ''.join(self._draw(polyline, default_arrow, custom_arrow, extent))
def to_asy(self):
width = self.style.get_line_width(face_element=False)
pen = create_pens(edge_color=self.edge_color, stroke_width=width)
arrow_pen = create_pens(face_color=self.edge_color, stroke_width=width)
def polyline(points):
yield 'draw('
yield '--'.join(['(%.5g,%5g)' % xy for xy in points])
yield ', % s);' % pen
def polygon(points):
yield 'filldraw('
yield '--'.join(['(%.5g,%5g)' % xy for xy in points])
yield '--cycle, % s);' % arrow_pen
extent = self.graphics.view_width or 0
default_arrow = self._default_arrow(polygon)
custom_arrow = self._custom_arrow('asy', _ASYTransform)
return ''.join(self._draw(polyline, default_arrow, custom_arrow, extent))
def extent(self):
width = self.style.get_line_width(face_element=False)
def polyline(points):
for p in points:
x, y = p
yield x - width, y - width
yield x - width, y + width
yield x + width, y - width
yield x + width, y + width
def polygon(points):
for p in points:
yield p
def default_arrow(px, py, vx, vy, t1, s):
yield px, py
return list(self._draw(polyline, default_arrow, None, 0))
class InsetBox(_GraphicsElement):
def init(self, graphics, style, item=None, content=None, pos=None,
opos=(0, 0)):
super(InsetBox, self).init(graphics, item, style)
self.color = self.style.get_option('System`FontColor')
if self.color is None:
self.color, _ = style.get_style(_Color, face_element=False)
if item is not None:
if len(item.leaves) not in (1, 2, 3):
raise BoxConstructError
content = item.leaves[0]
self.content = content.format(
graphics.evaluation, 'TraditionalForm')
if len(item.leaves) > 1:
self.pos = Coords(graphics, item.leaves[1])
else:
self.pos = Coords(graphics, pos=(0, 0))
if len(item.leaves) > 2:
self.opos = coords(item.leaves[2])
else:
self.opos = (0, 0)
else:
self.content = content
self.pos = pos
self.opos = opos
self.content_text = self.content.boxes_to_text(
evaluation=self.graphics.evaluation)
def extent(self):
p = self.pos.pos()
h = 25
w = len(self.content_text) * \
7 # rough approximation by numbers of characters
opos = self.opos
x = p[0] - w / 2.0 - opos[0] * w / 2.0
y = p[1] - h / 2.0 + opos[1] * h / 2.0
return [(x, y), (x + w, y + h)]
def to_svg(self):
x, y = self.pos.pos()
content = self.content.boxes_to_xml(
evaluation=self.graphics.evaluation)
style = create_css(font_color=self.color)
svg = (
'<foreignObject x="%f" y="%f" ox="%f" oy="%f" style="%s">'
'<math>%s</math></foreignObject>') % (
x, y, self.opos[0], self.opos[1], style, content)
return svg
def to_asy(self):
x, y = self.pos.pos()
content = self.content.boxes_to_tex(
evaluation=self.graphics.evaluation)
pen = create_pens(edge_color=self.color)
asy = 'label("$%s$", (%s,%s), (%s,%s), %s);' % (
content, x, y, -self.opos[0], -self.opos[1], pen)
return asy
def total_extent(extents):
xmin = xmax = ymin = ymax = None
for extent in extents:
for x, y in extent:
if xmin is None or x < xmin:
xmin = x
if xmax is None or x > xmax:
xmax = x
if ymin is None or y < ymin:
ymin = y
if ymax is None or y > ymax:
ymax = y
return xmin, xmax, ymin, ymax
class EdgeForm(Builtin):
"""
>> Graphics[{EdgeForm[{Thick, Green}], Disk[]}]
= -Graphics-
>> Graphics[{Style[Disk[],EdgeForm[{Thick,Red}]], Circle[{1,1}]}]
= -Graphics-
"""
pass
class FaceForm(Builtin):
pass
def _style(graphics, item):
head = item.get_head_name()
if head in style_heads:
klass = get_class(head)
style = klass.create_as_style(klass, graphics, item)
elif head in ('System`EdgeForm', 'System`FaceForm'):
style = graphics.get_style_class()(graphics, edge=head == 'System`EdgeForm',
face=head == 'System`FaceForm')
if len(item.leaves) > 1:
raise BoxConstructError
if item.leaves:
if item.leaves[0].has_form('List', None):
for dir in item.leaves[0].leaves:
style.append(dir, allow_forms=False)
else:
style.append(item.leaves[0], allow_forms=False)
else:
raise BoxConstructError
return style
class Style(object):
def __init__(self, graphics, edge=False, face=False):
self.styles = []
self.options = {}
self.graphics = graphics
self.edge = edge
self.face = face
self.klass = graphics.get_style_class()
def append(self, item, allow_forms=True):
self.styles.append(_style(self.graphics, item))
def set_option(self, name, value):
self.options[name] = value
def extend(self, style, pre=True):
if pre:
self.styles = style.styles + self.styles
else:
self.styles.extend(style.styles)
def clone(self):
result = self.klass(self.graphics, edge=self.edge, face=self.face)
result.styles = self.styles[:]
result.options = self.options.copy()
return result
def get_default_face_color(self):
return RGBColor(components=(0, 0, 0, 1))
def get_default_edge_color(self):
return RGBColor(components=(0, 0, 0, 1))
def get_style(self, style_class, face_element=None, default_to_faces=True,
consider_forms=True):
if face_element is not None:
default_to_faces = consider_forms = face_element
edge_style = face_style = None
if style_class == _Color:
if default_to_faces:
face_style = self.get_default_face_color()
else:
edge_style = self.get_default_edge_color()
elif style_class == _Thickness:
if not default_to_faces:
edge_style = AbsoluteThickness(self.graphics, value=0.5)
for item in self.styles:
if isinstance(item, style_class):
if default_to_faces:
face_style = item
else:
edge_style = item
elif isinstance(item, Style):
if consider_forms:
if item.edge:
edge_style, _ = item.get_style(
style_class, default_to_faces=False,
consider_forms=False)
elif item.face:
_, face_style = item.get_style(
style_class, default_to_faces=True,
consider_forms=False)
return edge_style, face_style
def get_option(self, name):
return self.options.get(name, None)
def get_line_width(self, face_element=True):
if self.graphics.pixel_width is None:
return 0
edge_style, _ = self.get_style(
_Thickness, default_to_faces=face_element,
consider_forms=face_element)
if edge_style is None:
return 0
return edge_style.get_thickness()
def _flatten(leaves):
for leaf in leaves:
if leaf.get_head_name() == 'System`List':
flattened = leaf.flatten(Symbol('List'))
if flattened.get_head_name() == 'System`List':
for x in flattened.leaves:
yield x
else:
yield flattened
else:
yield leaf
class _GraphicsElements(object):
def __init__(self, content, evaluation):
self.evaluation = evaluation
self.elements = []
builtins = evaluation.definitions.builtin
def get_options(name):
builtin = builtins.get(name)
if builtin is None:
return None
return builtin.options
def stylebox_style(style, specs):
new_style = style.clone()
for spec in _flatten(specs):
head_name = spec.get_head_name()
if head_name in style_and_form_heads:
new_style.append(spec)
elif head_name == 'System`Rule' and len(spec.leaves) == 2:
option, expr = spec.leaves
if not isinstance(option, Symbol):
raise BoxConstructError
name = option.get_name()
create = style_options.get(name, None)
if create is None:
raise BoxConstructError
new_style.set_option(name, create(style.graphics, expr))
else:
raise BoxConstructError
return new_style
def convert(content, style):
if content.has_form('List', None):
items = content.leaves
else:
items = [content]
style = style.clone()
for item in items:
if item.get_name() == 'System`Null':
continue
head = item.get_head_name()
if head in style_and_form_heads:
style.append(item)
elif head == 'System`StyleBox':
if len(item.leaves) < 1:
raise BoxConstructError
for element in convert(item.leaves[0], stylebox_style(style, item.leaves[1:])):
yield element
elif head[-3:] == 'Box': # and head[:-3] in element_heads:
element_class = get_class(head)
if element_class is not None:
options = get_options(head[:-3])
if options:
data, options = _data_and_options(item.leaves, options)
new_item = Expression(head, *data)
element = get_class(head)(self, style, new_item, options)
else:
element = get_class(head)(self, style, item)
yield element
else:
raise BoxConstructError
elif head == 'System`List':
for element in convert(item, style):
yield element
else:
raise BoxConstructError
self.elements = list(convert(content, self.get_style_class()(self)))
def create_style(self, expr):
style = self.get_style_class()(self)
def convert(expr):
if expr.has_form(('List', 'Directive'), None):
for item in expr.leaves:
convert(item)
else:
style.append(expr)
convert(expr)
return style
def get_style_class(self):
return Style
class GraphicsElements(_GraphicsElements):
coords = Coords
def __init__(self, content, evaluation, neg_y=False):
super(GraphicsElements, self).__init__(content, evaluation)
self.neg_y = neg_y
self.xmin = self.ymin = self.pixel_width = None
self.pixel_height = self.extent_width = self.extent_height = None
self.view_width = None
def translate(self, coords):
if self.pixel_width is not None:
w = self.extent_width if self.extent_width > 0 else 1
h = self.extent_height if self.extent_height > 0 else 1
result = [(coords[0] - self.xmin) * self.pixel_width / w,
(coords[1] - self.ymin) * self.pixel_height / h]
if self.neg_y:
result[1] = self.pixel_height - result[1]
return tuple(result)
else:
return (coords[0], coords[1])
def translate_absolute(self, d):
if self.pixel_width is None:
return (0, 0)
else:
l = 96.0 / 72
return (d[0] * l, (-1 if self.neg_y else 1) * d[1] * l)
def translate_relative(self, x):
if self.pixel_width is None:
return 0
else:
return x * self.pixel_width
def extent(self, completely_visible_only=False):
if completely_visible_only:
ext = total_extent([element.extent() for element in self.elements
if element.is_completely_visible])
else:
ext = total_extent([element.extent() for element in self.elements])
xmin, xmax, ymin, ymax = ext
if xmin == xmax:
xmin = 0
xmax *= 2
if ymin == ymax:
ymin = 0
ymax *= 2
return xmin, xmax, ymin, ymax
def to_svg(self):
return '\n'.join(element.to_svg() for element in self.elements)
def to_asy(self):
return '\n'.join(element.to_asy() for element in self.elements)
def set_size(self, xmin, ymin, extent_width, extent_height, pixel_width,
pixel_height):
self.xmin, self.ymin = xmin, ymin
self.extent_width, self.extent_height = extent_width, extent_height
self.pixel_width, self.pixel_height = pixel_width, pixel_height
class GraphicsBox(BoxConstruct):
options = Graphics.options
attributes = ('HoldAll', 'ReadProtected')
def boxes_to_text(self, leaves, **options):
self._prepare_elements(leaves, options) # to test for Box errors
return '-Graphics-'
def _get_image_size(self, options, graphics_options, max_width):
inside_row = options.pop('inside_row', False)
inside_list = options.pop('inside_list', False)
image_size_multipliers = options.pop('image_size_multipliers', None)
aspect_ratio = graphics_options['System`AspectRatio']
if image_size_multipliers is None:
image_size_multipliers = (0.5, 0.25)
if aspect_ratio == Symbol('Automatic'):
aspect = None
else:
aspect = aspect_ratio.round_to_float()
image_size = graphics_options['System`ImageSize']
if isinstance(image_size, Integer):
base_width = image_size.get_int_value()
base_height = None # will be computed later in calc_dimensions
elif image_size.has_form('System`List', 2):
base_width, base_height = ([x.round_to_float() for x in image_size.leaves] + [0, 0])[:2]
if base_width is None or base_height is None:
raise BoxConstructError
aspect = base_height / base_width
else:
image_size = image_size.get_name()
base_width, base_height = {
'System`Automatic': (400, 350),
'System`Tiny': (100, 100),
'System`Small': (200, 200),
'System`Medium': (400, 350),
'System`Large': (600, 500),
}.get(image_size, (None, None))
if base_width is None:
raise BoxConstructError
if max_width is not None and base_width > max_width:
base_width = max_width
if inside_row:
multi = image_size_multipliers[1]
elif inside_list:
multi = image_size_multipliers[0]
else:
multi = 1
return base_width, base_height, multi, aspect
def _prepare_elements(self, leaves, options, neg_y=False, max_width=None):
if not leaves:
raise BoxConstructError
graphics_options = self.get_option_values(leaves[1:], **options)
background = graphics_options['System`Background']
if isinstance(background, Symbol) and background.get_name() == 'System`Automatic':
self.background_color = None
else:
self.background_color =_Color.create(background)
base_width, base_height, size_multiplier, size_aspect = \
self._get_image_size(options, graphics_options, max_width)
plot_range = graphics_options['System`PlotRange'].to_python()
if plot_range == 'System`Automatic':
plot_range = ['System`Automatic', 'System`Automatic']
if not isinstance(plot_range, list) or len(plot_range) != 2:
raise BoxConstructError
elements = GraphicsElements(leaves[0], options['evaluation'], neg_y)
axes = [] # to be filled further down
def calc_dimensions(final_pass=True):
"""
calc_dimensions gets called twice: In the first run
(final_pass = False, called inside _prepare_elements), the extent
of all user-defined graphics is determined.
Axes are created accordingly.
In the second run (final_pass = True, called from outside),
the dimensions of these axes are taken into account as well.
This is also important to size absolutely sized objects correctly
(e.g. values using AbsoluteThickness).
"""
# always need to compute extent if size aspect is automatic
if 'System`Automatic' in plot_range or size_aspect is None:
xmin, xmax, ymin, ymax = elements.extent()
else:
xmin = xmax = ymin = ymax = None
if final_pass and any(x for x in axes) and plot_range != ['System`Automatic', 'System`Automatic']:
# Take into account the dimensiosn of axes and axes labels
# (they should be displayed completely even when a specific
# PlotRange is given).
exmin, exmax, eymin, eymax = elements.extent(
completely_visible_only=True)
else:
exmin = exmax = eymin = eymax = None
def get_range(min, max):
if max < min:
min, max = max, min
elif min == max:
if min < 0:
min, max = 2 * min, 0
elif min > 0:
min, max = 0, 2 * min
else:
min, max = -1, 1
return min, max
try:
if plot_range[0] == 'System`Automatic':
if xmin is None and xmax is None:
xmin = 0
xmax = 1
elif xmin == xmax:
xmin -= 1
xmax += 1
elif (isinstance(plot_range[0], list) and
len(plot_range[0]) == 2):
xmin, xmax = list(map(float, plot_range[0]))
xmin, xmax = get_range(xmin, xmax)
xmin = elements.translate((xmin, 0))[0]
xmax = elements.translate((xmax, 0))[0]
if exmin is not None and exmin < xmin:
xmin = exmin
if exmax is not None and exmax > xmax:
xmax = exmax
else:
raise BoxConstructError
if plot_range[1] == 'System`Automatic':
if ymin is None and ymax is None:
ymin = 0
ymax = 1
elif ymin == ymax:
ymin -= 1
ymax += 1
elif (isinstance(plot_range[1], list) and
len(plot_range[1]) == 2):
ymin, ymax = list(map(float, plot_range[1]))
ymin, ymax = get_range(ymin, ymax)
ymin = elements.translate((0, ymin))[1]
ymax = elements.translate((0, ymax))[1]
if ymin > ymax:
ymin, ymax = ymax, ymin
if eymin is not None and eymin < ymin:
ymin = eymin
if eymax is not None and eymax > ymax:
ymax = eymax
else:
raise BoxConstructError
except (ValueError, TypeError):
raise BoxConstructError
w = 0 if (xmin is None or xmax is None) else xmax - xmin
h = 0 if (ymin is None or ymax is None) else ymax - ymin
if size_aspect is None:
aspect = h / w
else:
aspect = size_aspect
height = base_height
if height is None:
height = base_width * aspect
width = height / aspect
if width > base_width:
width = base_width
height = width * aspect
height = height
width *= size_multiplier
height *= size_multiplier
return xmin, xmax, ymin, ymax, w, h, width, height
xmin, xmax, ymin, ymax, w, h, width, height = calc_dimensions(
final_pass=False)
elements.set_size(xmin, ymin, w, h, width, height)
xmin -= w * 0.02
xmax += w * 0.02
ymin -= h * 0.02
ymax += h * 0.02
axes.extend(self.create_axes(elements, graphics_options, xmin, xmax, ymin, ymax))
return elements, calc_dimensions
def boxes_to_tex(self, leaves, **options):
elements, calc_dimensions = self._prepare_elements(
leaves, options, max_width=450)
xmin, xmax, ymin, ymax, w, h, width, height = calc_dimensions()
elements.view_width = w
asy_completely_visible = '\n'.join(
element.to_asy() for element in elements.elements
if element.is_completely_visible)
asy_regular = '\n'.join(
element.to_asy() for element in elements.elements
if not element.is_completely_visible)
asy_box = 'box((%s,%s), (%s,%s))' % (asy_number(xmin), asy_number(ymin), asy_number(xmax), asy_number(ymax))
if self.background_color is not None:
color, opacity = self.background_color.to_asy()
asy_background = 'filldraw(%s, %s);' % (asy_box, color)
else:
asy_background = ''
tex = r"""
\begin{asy}
size(%scm, %scm);
%s
%s
clip(%s);
%s
\end{asy}
""" % (
asy_number(width / 60), asy_number(height / 60),
asy_background,
asy_regular,
asy_box,
asy_completely_visible)
return tex
def boxes_to_xml(self, leaves, **options):
elements, calc_dimensions = self._prepare_elements(
leaves, options, neg_y=True)
xmin, xmax, ymin, ymax, w, h, width, height = calc_dimensions()
elements.view_width = w
svg = elements.to_svg()
if self.background_color is not None:
svg = '<rect x="%f" y="%f" width="%f" height="%f" style="fill:%s"/>%s' % (
xmin, ymin, w, h, self.background_color.to_css()[0], svg)
xmin -= 1
ymin -= 1
w += 2
h += 2
svg_xml = '''
<svg xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
version="1.1"
viewBox="%s">
%s
</svg>
''' % (' '.join('%f' % t for t in (xmin, ymin, w, h)), svg)
return '<mglyph width="%dpx" height="%dpx" src="data:image/svg+xml;base64,%s"/>' % (
int(width),
int(height),
base64.b64encode(svg_xml.encode('utf8')).decode('utf8'))
def axis_ticks(self, xmin, xmax):
def round_to_zero(value):
if value == 0:
return 0
elif value < 0:
return ceil(value)
else:
return floor(value)
def round_step(value):
if not value:
return 1, 1
sub_steps = 5
try:
shift = 10.0 ** floor(log10(value))
except ValueError:
return 1, 1
value = value / shift
if value < 1.5:
value = 1
elif value < 3:
value = 2
sub_steps = 4
elif value < 8:
value = 5
else:
value = 10
return value * shift, sub_steps
step_x, sub_x = round_step((xmax - xmin) / 5.0)
step_x_small = step_x / sub_x
steps_x = int(floor((xmax - xmin) / step_x))
steps_x_small = int(floor((xmax - xmin) / step_x_small))
start_k_x = int(ceil(xmin / step_x))
start_k_x_small = int(ceil(xmin / step_x_small))
if xmin <= 0 <= xmax:
origin_k_x = 0
else:
origin_k_x = start_k_x
origin_x = origin_k_x * step_x
ticks = []
ticks_small = []
for k in range(start_k_x, start_k_x + steps_x + 1):
if k != origin_k_x:
x = k * step_x
if x > xmax:
break
ticks.append(x)
for k in range(start_k_x_small, start_k_x_small + steps_x_small + 1):
if k % sub_x != 0:
x = k * step_x_small
if x > xmax:
break
ticks_small.append(x)
return ticks, ticks_small, origin_x
def create_axes(self, elements, graphics_options, xmin, xmax, ymin, ymax):
axes = graphics_options.get('System`Axes')
if axes.is_true():
axes = (True, True)
elif axes.has_form('List', 2):
axes = (axes.leaves[0].is_true(), axes.leaves[1].is_true())
else:
axes = (False, False)
ticks_style = graphics_options.get('System`TicksStyle')
axes_style = graphics_options.get('System`AxesStyle')
label_style = graphics_options.get('System`LabelStyle')
if ticks_style.has_form('List', 2):
ticks_style = ticks_style.leaves
else:
ticks_style = [ticks_style] * 2
if axes_style.has_form('List', 2):
axes_style = axes_style.leaves
else:
axes_style = [axes_style] * 2
ticks_style = [elements.create_style(s) for s in ticks_style]
axes_style = [elements.create_style(s) for s in axes_style]
label_style = elements.create_style(label_style)
ticks_style[0].extend(axes_style[0])
ticks_style[1].extend(axes_style[1])
def add_element(element):
element.is_completely_visible = True
elements.elements.append(element)
ticks_x, ticks_x_small, origin_x = self.axis_ticks(xmin, xmax)
ticks_y, ticks_y_small, origin_y = self.axis_ticks(ymin, ymax)
axes_extra = 6
tick_small_size = 3
tick_large_size = 5
tick_label_d = 2
ticks_x_int = all(floor(x) == x for x in ticks_x)
ticks_y_int = all(floor(x) == x for x in ticks_y)
for index, (
min, max, p_self0, p_other0, p_origin,
ticks, ticks_small, ticks_int) in enumerate([
(xmin, xmax, lambda y: (0, y), lambda x: (x, 0),
lambda x: (x, origin_y), ticks_x, ticks_x_small, ticks_x_int),
(ymin, ymax, lambda x: (x, 0), lambda y: (0, y),
lambda y: (origin_x, y), ticks_y, ticks_y_small, ticks_y_int)]):
if axes[index]:
add_element(LineBox(
elements, axes_style[index],
lines=[[Coords(elements, pos=p_origin(min),
d=p_other0(-axes_extra)),
Coords(elements, pos=p_origin(max),
d=p_other0(axes_extra))]]))
ticks_lines = []
tick_label_style = ticks_style[index].clone()
tick_label_style.extend(label_style)
for x in ticks:
ticks_lines.append([Coords(elements, pos=p_origin(x)),
Coords(elements, pos=p_origin(x),
d=p_self0(tick_large_size))])
if ticks_int:
content = String(str(int(x)))
elif x == floor(x):
content = String('%.1f' % x) # e.g. 1.0 (instead of 1.)
else:
content = String('%g' % x) # fix e.g. 0.6000000000000001
add_element(InsetBox(
elements, tick_label_style,
content=content,
pos=Coords(elements, pos=p_origin(x),
d=p_self0(-tick_label_d)), opos=p_self0(1)))
for x in ticks_small:
pos = p_origin(x)
ticks_lines.append([Coords(elements, pos=pos),
Coords(elements, pos=pos,
d=p_self0(tick_small_size))])
add_element(LineBox(elements, axes_style[0],
lines=ticks_lines))
return axes
"""if axes[1]:
add_element(LineBox(elements, axes_style[1], lines=[[Coords(elements, pos=(origin_x,ymin), d=(0,-axes_extra)),
Coords(elements, pos=(origin_x,ymax), d=(0,axes_extra))]]))
ticks = []
tick_label_style = ticks_style[1].clone()
tick_label_style.extend(label_style)
for k in range(start_k_y, start_k_y+steps_y+1):
if k != origin_k_y:
y = k * step_y
if y > ymax:
break
pos = (origin_x,y)
ticks.append([Coords(elements, pos=pos),
Coords(elements, pos=pos, d=(tick_large_size,0))])
add_element(InsetBox(elements, tick_label_style, content=Real(y), pos=Coords(elements, pos=pos,
d=(-tick_label_d,0)), opos=(1,0)))
for k in range(start_k_y_small, start_k_y_small+steps_y_small+1):
if k % sub_y != 0:
y = k * step_y_small
if y > ymax:
break
pos = (origin_x,y)
ticks.append([Coords(elements, pos=pos),
Coords(elements, pos=pos, d=(tick_small_size,0))])
add_element(LineBox(elements, axes_style[1], lines=ticks))"""
class Directive(Builtin):
attributes = ('ReadProtected',)
class Blend(Builtin):
"""
<dl>
<dt>'Blend[{$c1$, $c2$}]'
<dd>represents the color between $c1$ and $c2$.
<dt>'Blend[{$c1$, $c2$}, $x$]'
<dd>represents the color formed by blending $c1$ and $c2$ with
factors 1 - $x$ and $x$ respectively.
<dt>'Blend[{$c1$, $c2$, ..., $cn$}, $x$]'
<dd>blends between the colors $c1$ to $cn$ according to the
factor $x$.
</dl>
>> Blend[{Red, Blue}]
= RGBColor[0.5, 0., 0.5]
>> Blend[{Red, Blue}, 0.3]
= RGBColor[0.7, 0., 0.3]
>> Blend[{Red, Blue, Green}, 0.75]
= RGBColor[0., 0.5, 0.5]
>> Graphics[Table[{Blend[{Red, Green, Blue}, x], Rectangle[{10 x, 0}]}, {x, 0, 1, 1/10}]]
= -Graphics-
>> Graphics[Table[{Blend[{RGBColor[1, 0.5, 0, 0.5], RGBColor[0, 0, 1, 0.5]}, x], Disk[{5x, 0}]}, {x, 0, 1, 1/10}]]
= -Graphics-
#> Blend[{Red, Green, Blue}, {1, 0.5}]
: {1, 0.5} should be a real number or a list of non-negative numbers, which has the same length as {RGBColor[1, 0, 0], RGBColor[0, 1, 0], RGBColor[0, 0, 1]}.
= Blend[{RGBColor[1, 0, 0], RGBColor[0, 1, 0], RGBColor[0, 0, 1]}, {1, 0.5}]
"""
messages = {
'arg': ("`1` is not a valid list of color or gray-level directives, "
"or pairs of a real number and a directive."),
'argl': ("`1` should be a real number or a list of non-negative "
"numbers, which has the same length as `2`."),
}
rules = {
'Blend[colors_]': 'Blend[colors, ConstantArray[1, Length[colors]]]',
}
def do_blend(self, colors, values):
type = None
homogenous = True
for color in colors:
if type is None:
type = color.__class__
else:
if color.__class__ != type:
homogenous = False
break
if not homogenous:
colors = [RGBColor(components=color.to_rgba()) for color in colors]
type = RGBColor
total = sum(values)
result = None
for color, value in zip(colors, values):
frac = value / total
part = [component * frac for component in color.components]
if result is None:
result = part
else:
result = [r + p for r, p in zip(result, part)]
return type(components=result)
def apply(self, colors, u, evaluation):
'Blend[{colors___}, u_]'
colors_orig = colors
try:
colors = [_Color.create(color) for color in colors.get_sequence()]
if not colors:
raise ColorError
except ColorError:
evaluation.message('Blend', 'arg', Expression('List', colors_orig))
return
if u.has_form('List', None):
values = [value.round_to_float(evaluation) for value in u.leaves]
if None in values:
values = None
if len(u.leaves) != len(colors):
values = None
use_list = True
else:
values = u.round_to_float(evaluation)
if values is None:
pass
elif values > 1:
values = 1.0
elif values < 0:
values = 0.0
use_list = False
if values is None:
return evaluation.message('Blend', 'argl', u, Expression(
'List', colors_orig))
if use_list:
return self.do_blend(colors, values).to_expr()
else:
x = values
pos = int(floor(x * (len(colors) - 1)))
x = (x - pos * 1.0 / (len(colors) - 1)) * (len(colors) - 1)
if pos == len(colors) - 1:
return colors[-1].to_expr()
else:
return self.do_blend(
colors[pos:(pos + 2)], [1 - x, x]).to_expr()
class Lighter(Builtin):
"""
<dl>
<dt>'Lighter[$c$, $f$]'
<dd>is equivalent to 'Blend[{$c$, White}, $f$]'.
<dt>'Lighter[$c$]'
<dd>is equivalent to 'Lighter[$c$, 1/3]'.
</dl>
>> Lighter[Orange, 1/4]
= RGBColor[1., 0.625, 0.25]
>> Graphics[{Lighter[Orange, 1/4], Disk[]}]
= -Graphics-
>> Graphics[Table[{Lighter[Orange, x], Disk[{12x, 0}]}, {x, 0, 1, 1/6}]]
= -Graphics-
"""
rules = {
'Lighter[c_, f_]': 'Blend[{c, White}, f]',
'Lighter[c_]': 'Lighter[c, 1/3]',
}
class Darker(Builtin):
"""
<dl>
<dt>'Darker[$c$, $f$]'
<dd>is equivalent to 'Blend[{$c$, Black}, $f$]'.
<dt>'Darker[$c$]'
<dd>is equivalent to 'Darker[$c$, 1/3]'.
</dl>
>> Graphics[Table[{Darker[Yellow, x], Disk[{12x, 0}]}, {x, 0, 1, 1/6}]]
= -Graphics-
"""
rules = {
'Darker[c_, f_]': 'Blend[{c, Black}, f]',
'Darker[c_]': 'Darker[c, 1/3]',
}
class _ColorObject(Builtin):
text_name = None
def __init__(self, *args, **kwargs):
super(_ColorObject, self).__init__(*args, **kwargs)
if self.text_name is None:
text_name = strip_context(self.get_name()).lower()
else:
text_name = self.text_name
doc = """
<dl>
<dt>'%(name)s'
<dd>represents the color %(text_name)s in graphics.
</dl>
>> Graphics[{EdgeForm[Black], %(name)s, Disk[]}, ImageSize->Small]
= -Graphics-
>> %(name)s // ToBoxes
= StyleBox[GraphicsBox[...], ...]
""" % {'name': strip_context(self.get_name()), 'text_name': text_name}
if self.__doc__ is None:
self.__doc__ = doc
else:
self.__doc__ = doc + self.__doc__
class Black(_ColorObject):
"""
>> Black
= GrayLevel[0]
"""
rules = {
'Black': 'GrayLevel[0]',
}
class White(_ColorObject):
"""
>> White
= GrayLevel[1]
"""
rules = {
'White': 'GrayLevel[1]',
}
class Gray(_ColorObject):
"""
>> Gray
= GrayLevel[0.5]
"""
rules = {
'Gray': 'GrayLevel[0.5]',
}
class Red(_ColorObject):
"""
>> Red
= RGBColor[1, 0, 0]
"""
rules = {
'Red': 'RGBColor[1, 0, 0]',
}
class Green(_ColorObject):
"""
>> Green
= RGBColor[0, 1, 0]
"""
rules = {
'Green': 'RGBColor[0, 1, 0]',
}
class Blue(_ColorObject):
"""
>> Blue
= RGBColor[0, 0, 1]
"""
rules = {
'Blue': 'RGBColor[0, 0, 1]',
}
class Cyan(_ColorObject):
"""
>> Cyan
= RGBColor[0, 1, 1]
"""
rules = {
'Cyan': 'RGBColor[0, 1, 1]',
}
class Magenta(_ColorObject):
"""
>> Magenta
= RGBColor[1, 0, 1]
"""
rules = {
'Magenta': 'RGBColor[1, 0, 1]',
}
class Yellow(_ColorObject):
"""
>> Yellow
= RGBColor[1, 1, 0]
"""
rules = {
'Yellow': 'RGBColor[1, 1, 0]',
}
class Purple(_ColorObject):
rules = {
'Purple': 'RGBColor[0.5, 0, 0.5]',
}
class LightRed(_ColorObject):
text_name = 'light red'
rules = {
'LightRed': 'Lighter[Red, 0.85]',
}
class Orange(_ColorObject):
rules = {
'Orange': 'RGBColor[1, 0.5, 0]',
}
class Automatic(Builtin):
'''
<dl>
<dt>'Automatic'
<dd>is used to specify an automatically computed option value.
</dl>
'Automatic' is the default for 'PlotRange', 'ImageSize', and other
graphical options:
>> Cases[Options[Plot], HoldPattern[_ :> Automatic]]
= {Background :> Automatic, Exclusions :> Automatic, ImageSize :> Automatic, MaxRecursion :> Automatic, PlotRange :> Automatic, PlotRangePadding :> Automatic}
'''
class Tiny(Builtin):
'''
<dl>
<dt>'ImageSize' -> 'Tiny'
<dd>produces a tiny image.
</dl>
'''
class Small(Builtin):
'''
<dl>
<dt>'ImageSize' -> 'Small'
<dd>produces a small image.
</dl>
'''
class Medium(Builtin):
'''
<dl>
<dt>'ImageSize' -> 'Medium'
<dd>produces a medium-sized image.
</dl>
'''
class Large(Builtin):
'''
<dl>
<dt>'ImageSize' -> 'Large'
<dd>produces a large image.
</dl>
'''
element_heads = frozenset(system_symbols(
'Rectangle', 'Disk', 'Line', 'Arrow', 'FilledCurve', 'BezierCurve',
'Point', 'Circle', 'Polygon', 'RegularPolygon',
'Inset', 'Text', 'Sphere', 'Style'))
styles = system_symbols_dict({
'RGBColor': RGBColor,
'XYZColor': XYZColor,
'LABColor': LABColor,
'LCHColor': LCHColor,
'LUVColor': LUVColor,
'CMYKColor': CMYKColor,
'Hue': Hue,
'GrayLevel': GrayLevel,
'Thickness': Thickness,
'AbsoluteThickness': AbsoluteThickness,
'Thick': Thick,
'Thin': Thin,
'PointSize': PointSize,
'Arrowheads': Arrowheads,
})
style_options = system_symbols_dict({
'FontColor': _style,
})
style_heads = frozenset(styles.keys())
style_and_form_heads = frozenset(style_heads.union(set(['System`EdgeForm', 'System`FaceForm'])))
GLOBALS = system_symbols_dict({
'Rectangle': Rectangle,
'Disk': Disk,
'Circle': Circle,
'Polygon': Polygon,
'RegularPolygon': RegularPolygon,
'Inset': Inset,
'Text': Text,
'RectangleBox': RectangleBox,
'DiskBox': DiskBox,
'LineBox': LineBox,
'BezierCurveBox': BezierCurveBox,
'FilledCurveBox': FilledCurveBox,
'ArrowBox': ArrowBox,
'CircleBox': CircleBox,
'PolygonBox': PolygonBox,
'RegularPolygonBox': RegularPolygonBox,
'PointBox': PointBox,
'InsetBox': InsetBox,
})
GLOBALS.update(styles)
GRAPHICS_SYMBOLS = frozenset(
['System`List', 'System`Rule', 'System`VertexColors'] +
list(element_heads) +
[element + 'Box' for element in element_heads] +
list(style_heads))
```
#### File: mathics/core/definitions.py
```python
import pickle
import os
import base64
import re
import bisect
from collections import defaultdict
from mathics.core.expression import Expression, Symbol, String, fully_qualified_symbol_name, strip_context
from mathics.core.characters import letters, letterlikes
names_wildcards = "@*"
base_names_pattern = r'((?![0-9])([0-9${0}{1}{2}])+)'.format(letters, letterlikes, names_wildcards)
full_names_pattern = r'(`?{0}(`{0})*)'.format(base_names_pattern)
def get_file_time(file):
try:
return os.stat(file).st_mtime
except OSError:
return 0
def valuesname(name):
" 'NValues' -> 'n' "
assert name.startswith('System`'), name
if name == 'System`Messages':
return 'messages'
else:
return name[7:-6].lower()
class Definitions(object):
def __init__(self, add_builtin=False, builtin_filename=None):
super(Definitions, self).__init__()
self.builtin = {}
self.user = {}
self.definitions_cache = {}
self.lookup_cache = {}
self.proxy = defaultdict(set)
self.now = 0 # increments whenever something is updated
if add_builtin:
from mathics.builtin import modules, contribute
from mathics.core.evaluation import Evaluation
from mathics.settings import ROOT_DIR
loaded = False
if builtin_filename is not None:
builtin_dates = [get_file_time(module.__file__)
for module in modules]
builtin_time = max(builtin_dates)
if get_file_time(builtin_filename) > builtin_time:
builtin_file = open(builtin_filename, 'rb')
self.builtin = pickle.load(builtin_file)
loaded = True
if not loaded:
contribute(self)
if builtin_filename is not None:
builtin_file = open(builtin_filename, 'wb')
pickle.dump(self.builtin, builtin_file, -1)
for root, dirs, files in os.walk(os.path.join(ROOT_DIR, 'autoload')):
for path in [os.path.join(root, f) for f in files if f.endswith('.m')]:
Expression('Get', String(path)).evaluate(Evaluation(self))
# Move any user definitions created by autoloaded files to
# builtins, and clear out the user definitions list. This
# means that any autoloaded definitions become shared
# between users and no longer disappear after a Quit[].
#
# Autoloads that accidentally define a name in Global`
# could cause confusion, so check for this.
#
for name in self.user:
if name.startswith('Global`'):
raise ValueError("autoload defined %s." % name)
self.builtin.update(self.user)
self.user = {}
self.clear_cache()
def clear_cache(self, name=None):
# the definitions cache (self.definitions_cache) caches (incomplete and complete) names -> Definition(),
# e.g. "xy" -> d and "MyContext`xy" -> d. we need to clear this cache if a Definition() changes (which
# would happen if a Definition is combined from a builtin and a user definition and some content in the
# user definition is updated) or if the lookup rules change and we could end up at a completely different
# Definition.
# the lookup cache (self.lookup_cache) caches what lookup_name() does. we only need to update this if some
# change happens that might change the result lookup_name() calculates. we do not need to change it if a
# Definition() changes.
# self.proxy keeps track of all the names we cache. if we need to clear the caches for only one name, e.g.
# 'MySymbol', then we need to be able to look up all the entries that might be related to it, e.g. 'MySymbol',
# 'A`MySymbol', 'C`A`MySymbol', and so on. proxy identifies symbols using their stripped name and thus might
# give us symbols in other contexts that are actually not affected. still, this is a safe solution.
if name is None:
self.definitions_cache = {}
self.lookup_cache = {}
self.proxy = defaultdict(set)
else:
definitions_cache = self.definitions_cache
lookup_cache = self.lookup_cache
tail = strip_context(name)
for k in self.proxy.pop(tail, []):
definitions_cache.pop(k, None)
lookup_cache.pop(k, None)
def clear_definitions_cache(self, name):
definitions_cache = self.definitions_cache
tail = strip_context(name)
for k in self.proxy.pop(tail, []):
definitions_cache.pop(k, None)
def last_changed(self, expr):
# timestamp for the most recently changed part of a given expression.
if isinstance(expr, Symbol):
symb = self.get_definition(expr.get_name(), only_if_exists=True)
if symb is None:
# symbol doesn't exist so it was never changed
return 0
try:
return symb.changed
except AttributeError:
# must be system symbol
symb.changed = 0
return 0
result = 0
head = expr.get_head()
head_changed = self.last_changed(head)
result = max(result, head_changed)
for leaf in expr.get_leaves():
leaf_changed = self.last_changed(leaf)
result = max(result, leaf_changed)
return result
def get_current_context(self):
# It's crucial to specify System` in this get_ownvalue() call,
# otherwise we'll end up back in this function and trigger
# infinite recursion.
context_rule = self.get_ownvalue('System`$Context')
context = context_rule.replace.get_string_value()
assert context is not None, "$Context somehow set to an invalid value"
return context
def get_context_path(self):
context_path_rule = self.get_ownvalue('System`$ContextPath')
context_path = context_path_rule.replace
assert context_path.has_form('System`List', None)
context_path = [c.get_string_value() for c in context_path.leaves]
assert not any([c is None for c in context_path])
return context_path
def set_current_context(self, context):
assert isinstance(context, str)
self.set_ownvalue('System`$Context', String(context))
self.clear_cache()
def set_context_path(self, context_path):
assert isinstance(context_path, list)
assert all([isinstance(c, str) for c in context_path])
self.set_ownvalue('System`$ContextPath',
Expression('System`List',
*[String(c) for c in context_path]))
self.clear_cache()
def get_builtin_names(self):
return set(self.builtin)
def get_user_names(self):
return set(self.user)
def get_names(self):
return self.get_builtin_names() | self.get_user_names()
def get_accessible_contexts(self):
"Return the contexts reachable though $Context or $ContextPath."
accessible_ctxts = set(self.get_context_path())
accessible_ctxts.add(self.get_current_context())
return accessible_ctxts
def get_matching_names(self, pattern):
"""
Return a list of the symbol names matching a string pattern.
A pattern containing a context mark (of the form
"ctx_pattern`short_pattern") matches symbols whose context and
short name individually match the two patterns. A pattern
without a context mark matches symbols accessible through
$Context and $ContextPath whose short names match the pattern.
'*' matches any sequence of symbol characters or an empty
string. '@' matches a non-empty sequence of symbol characters
which aren't uppercase letters. In the context pattern, both
'*' and '@' match context marks.
"""
if re.match(full_names_pattern, pattern) is None:
# The pattern contained characters which weren't allowed
# in symbols and aren't valid wildcards. Hence, the
# pattern can't match any symbols.
return []
# If we get here, there aren't any regexp metacharacters in
# the pattern.
if '`' in pattern:
ctx_pattern, short_pattern = pattern.rsplit('`', 1)
ctx_pattern = ((ctx_pattern + '`')
.replace('@', '[^A-Z`]+')
.replace('*', '.*')
.replace('$', r'\$'))
else:
short_pattern = pattern
# start with a group matching the accessible contexts
ctx_pattern = "(?:%s)" % "|".join(
re.escape(c) for c in self.get_accessible_contexts())
short_pattern = (short_pattern
.replace('@', '[^A-Z]+')
.replace('*', '[^`]*')
.replace('$', r'\$'))
regex = re.compile('^' + ctx_pattern + short_pattern + '$')
return [name for name in self.get_names() if regex.match(name)]
def lookup_name(self, name):
"""
Determine the full name (including context) for a symbol name.
- If the name begins with a context mark, it's in the context
given by $Context.
- Otherwise, if it contains a context mark, it's already fully
specified.
- Otherwise, it doesn't contain a context mark: try $Context,
then each element of $ContextPath, taking the first existing
symbol.
- Otherwise, it's a new symbol in $Context.
"""
cached = self.lookup_cache.get(name, None)
if cached is not None:
return cached
assert isinstance(name, str)
# Bail out if the name we're being asked to look up is already
# fully qualified.
if fully_qualified_symbol_name(name):
return name
current_context = self.get_current_context()
if '`' in name:
if name.startswith('`'):
return current_context + name.lstrip('`')
return name
with_context = current_context + name
if not self.have_definition(with_context):
for ctx in self.get_context_path():
n = ctx + name
if self.have_definition(n):
return n
return with_context
def shorten_name(self, name_with_ctx):
if '`' not in name_with_ctx:
return name_with_ctx
def in_ctx(name, ctx):
return name.startswith(ctx) and '`' not in name[len(ctx):]
if in_ctx(name_with_ctx, self.get_current_context()):
return name_with_ctx[len(self.get_current_context()):]
for ctx in self.get_context_path():
if in_ctx(name_with_ctx, ctx):
return name_with_ctx[len(ctx):]
return name_with_ctx
def have_definition(self, name):
return self.get_definition(name, only_if_exists=True) is not None
def get_definition(self, name, only_if_exists=False):
definition = self.definitions_cache.get(name, None)
if definition is not None:
return definition
original_name = name
name = self.lookup_name(name)
user = self.user.get(name, None)
builtin = self.builtin.get(name, None)
if user is None and builtin is None:
definition = None
elif builtin is None:
definition = user
elif user is None:
definition = builtin
else:
if user:
attributes = user.attributes
elif builtin:
attributes = builtin.attributes
else:
attributes = set()
if not user:
user = Definition(name=name)
if not builtin:
builtin = Definition(name=name)
options = builtin.options.copy()
options.update(user.options)
formatvalues = builtin.formatvalues.copy()
for form, rules in user.formatvalues.items():
if form in formatvalues:
formatvalues[form].extend(rules)
else:
formatvalues[form] = rules
definition = Definition(name=name,
ownvalues=user.ownvalues + builtin.ownvalues,
downvalues=user.downvalues + builtin.downvalues,
subvalues=user.subvalues + builtin.subvalues,
upvalues=user.upvalues + builtin.upvalues,
formatvalues=formatvalues,
messages=user.messages + builtin.messages,
attributes=attributes,
options=options,
nvalues=user.nvalues + builtin.nvalues,
defaultvalues=user.defaultvalues +
builtin.defaultvalues,
)
if definition is not None:
self.proxy[strip_context(original_name)].add(original_name)
self.definitions_cache[original_name] = definition
self.lookup_cache[original_name] = name
elif not only_if_exists:
definition = Definition(name=name)
return definition
def get_attributes(self, name):
return self.get_definition(name).attributes
def get_ownvalues(self, name):
return self.get_definition(name).ownvalues
def get_downvalues(self, name):
return self.get_definition(name).downvalues
def get_subvalues(self, name):
return self.get_definition(name).subvalues
def get_upvalues(self, name):
return self.get_definition(name).upvalues
def get_formats(self, name, format=''):
formats = self.get_definition(name).formatvalues
result = formats.get(format, []) + formats.get('', [])
result.sort()
return result
def get_nvalues(self, name):
return self.get_definition(name).nvalues
def get_defaultvalues(self, name):
return self.get_definition(name).defaultvalues
def get_value(self, name, pos, pattern, evaluation):
assert isinstance(name, str)
assert '`' in name
rules = self.get_definition(name).get_values_list(valuesname(pos))
for rule in rules:
result = rule.apply(pattern, evaluation)
if result is not None:
return result
def get_user_definition(self, name, create=True):
assert not isinstance(name, Symbol)
existing = self.user.get(name)
if existing:
return existing
else:
if not create:
return None
builtin = self.builtin.get(name)
if builtin:
attributes = builtin.attributes
else:
attributes = set()
self.user[name] = Definition(name=name, attributes=attributes)
self.clear_cache(name)
return self.user[name]
def mark_changed(self, definition):
self.now += 1
definition.changed = self.now
def reset_user_definition(self, name):
assert not isinstance(name, Symbol)
fullname = self.lookup_name(name)
del self.user[fullname]
self.clear_cache(fullname)
# TODO fix changed
def add_user_definition(self, name, definition):
assert not isinstance(name, Symbol)
self.mark_changed(definition)
fullname = self.lookup_name(name)
self.user[fullname] = definition
self.clear_cache(fullname)
def set_attribute(self, name, attribute):
definition = self.get_user_definition(self.lookup_name(name))
definition.attributes.add(attribute)
self.mark_changed(definition)
self.clear_definitions_cache(name)
def set_attributes(self, name, attributes):
definition = self.get_user_definition(self.lookup_name(name))
definition.attributes = set(attributes)
self.mark_changed(definition)
self.clear_definitions_cache(name)
def clear_attribute(self, name, attribute):
definition = self.get_user_definition(self.lookup_name(name))
if attribute in definition.attributes:
definition.attributes.remove(attribute)
self.mark_changed(definition)
self.clear_definitions_cache(name)
def add_rule(self, name, rule, position=None):
definition = self.get_user_definition(self.lookup_name(name))
if position is None:
result = definition.add_rule(rule)
else:
result = definition.add_rule_at(rule, position)
self.mark_changed(definition)
self.clear_definitions_cache(name)
return result
def add_format(self, name, rule, form=''):
definition = self.get_user_definition(self.lookup_name(name))
if isinstance(form, tuple) or isinstance(form, list):
forms = form
else:
forms = [form]
for form in forms:
if form not in definition.formatvalues:
definition.formatvalues[form] = []
insert_rule(definition.formatvalues[form], rule)
self.mark_changed(definition)
self.clear_definitions_cache(name)
def add_nvalue(self, name, rule):
definition = self.get_user_definition(self.lookup_name(name))
definition.add_rule_at(rule, 'n')
self.mark_changed(definition)
self.clear_definitions_cache(name)
def add_default(self, name, rule):
definition = self.get_user_definition(self.lookup_name(name))
definition.add_rule_at(rule, 'default')
self.mark_changed(definition)
self.clear_definitions_cache(name)
def add_message(self, name, rule):
definition = self.get_user_definition(self.lookup_name(name))
definition.add_rule_at(rule, 'messages')
self.mark_changed(definition)
self.clear_definitions_cache(name)
def set_values(self, name, values, rules):
pos = valuesname(values)
definition = self.get_user_definition(self.lookup_name(name))
definition.set_values_list(pos, rules)
self.mark_changed(definition)
self.clear_definitions_cache(name)
def get_options(self, name):
return self.get_definition(self.lookup_name(name)).options
def reset_user_definitions(self):
self.user = {}
self.clear_cache()
# TODO changed
def get_user_definitions(self):
return base64.encodebytes(pickle.dumps(self.user, protocol=2)).decode('ascii')
def set_user_definitions(self, definitions):
if definitions:
self.user = pickle.loads(base64.decodebytes(definitions.encode('ascii')))
else:
self.user = {}
self.clear_cache()
def get_ownvalue(self, name):
ownvalues = self.get_definition(self.lookup_name(name)).ownvalues
if ownvalues:
return ownvalues[0]
return None
def set_ownvalue(self, name, value):
from .expression import Symbol
from .rules import Rule
name = self.lookup_name(name)
self.add_rule(name, Rule(Symbol(name), value))
self.clear_cache(name)
def set_options(self, name, options):
definition = self.get_user_definition(self.lookup_name(name))
definition.options = options
self.mark_changed(definition)
self.clear_definitions_cache(name)
def unset(self, name, expr):
definition = self.get_user_definition(self.lookup_name(name))
result = definition.remove_rule(expr)
self.mark_changed(definition)
self.clear_definitions_cache(name)
return result
def get_config_value(self, name, default=None):
'Infinity -> None, otherwise returns integer.'
value = self.get_definition(name).ownvalues
if value:
try:
value = value[0].replace
except AttributeError:
return None
if value.get_name() == 'System`Infinity' or value.has_form('DirectedInfinity', 1):
return None
return int(value.get_int_value())
else:
return default
def set_config_value(self, name, new_value):
from mathics.core.expression import Integer
self.set_ownvalue(name, Integer(new_value))
def set_line_no(self, line_no):
self.set_config_value('$Line', line_no)
def get_line_no(self):
return self.get_config_value('$Line', 0)
def get_history_length(self):
history_length = self.get_config_value('$HistoryLength', 100)
if history_length is None or history_length > 100:
history_length = 100
return history_length
def get_tag_position(pattern, name):
if pattern.get_name() == name:
return 'own'
elif pattern.is_atom():
return None
else:
head_name = pattern.get_head_name()
if head_name == name:
return 'down'
elif head_name == 'System`Condition' and len(pattern.leaves) > 0:
return get_tag_position(pattern.leaves[0], name)
elif pattern.get_lookup_name() == name:
return 'sub'
else:
for leaf in pattern.leaves:
if leaf.get_lookup_name() == name:
return 'up'
return None
def insert_rule(values, rule):
for index, existing in enumerate(values):
if existing.pattern.same(rule.pattern):
del values[index]
break
# use insort_left to guarantee that if equal rules exist, newer rules will
# get higher precedence by being inserted before them. see DownValues[].
bisect.insort_left(values, rule)
class Definition(object):
def __init__(self, name, rules=None, ownvalues=None, downvalues=None,
subvalues=None, upvalues=None, formatvalues=None,
messages=None, attributes=(), options=None, nvalues=None,
defaultvalues=None, builtin=None):
super(Definition, self).__init__()
self.name = name
if rules is None:
rules = []
if ownvalues is None:
ownvalues = []
if downvalues is None:
downvalues = []
if subvalues is None:
subvalues = []
if upvalues is None:
upvalues = []
if formatvalues is None:
formatvalues = {}
if options is None:
options = {}
if nvalues is None:
nvalues = []
if defaultvalues is None:
defaultvalues = []
if messages is None:
messages = []
self.ownvalues = ownvalues
self.downvalues = downvalues
self.subvalues = subvalues
self.upvalues = upvalues
for rule in rules:
self.add_rule(rule)
self.formatvalues = dict((name, list)
for name, list in formatvalues.items())
self.messages = messages
self.attributes = set(attributes)
for a in self.attributes:
assert '`' in a, "%s attribute %s has no context" % (name, a)
self.options = options
self.nvalues = nvalues
self.defaultvalues = defaultvalues
self.builtin = builtin
def get_values_list(self, pos):
assert pos.isalpha()
if pos == 'messages':
return self.messages
else:
return getattr(self, '%svalues' % pos)
def set_values_list(self, pos, rules):
assert pos.isalpha()
if pos == 'messages':
self.messages = rules
else:
setattr(self, '%svalues' % pos, rules)
def add_rule_at(self, rule, position):
values = self.get_values_list(position)
insert_rule(values, rule)
return True
def add_rule(self, rule):
pos = get_tag_position(rule.pattern, self.name)
if pos:
return self.add_rule_at(rule, pos)
return False
def remove_rule(self, lhs):
position = get_tag_position(lhs, self.name)
if position:
values = self.get_values_list(position)
for index, existing in enumerate(values):
if existing.pattern.expr.same(lhs):
del values[index]
return True
return False
def __repr__(self):
s = '<Definition: name: {}, downvalues: {}, formats: {}, attributes: {}>'.format(
self.name, self.downvalues, self.formatvalues, self.attributes)
return s.encode('unicode_escape')
```
#### File: core/parser/prescanner.py
```python
from mathics.core.characters import named_characters
from mathics.core.parser.errors import ScanError, IncompleteSyntaxError
class Prescanner(object):
r'''
Converts:
character codes to characters:
\.7A -> z
\:004a -> J
\041 -> !
unicode longnames to characters:
\[Theta] -> \u03B8
escape sequences:
\n -> literal \n
Also reports trailing \ characters as incomplete.
PreScanner works by breaking the partitioning code into stubs.
'''
def __init__(self, feeder):
self.feeder = feeder # returns more code when asked
self.code = feeder.feed() # input code
self.pos = 0 # current position within code
def feed(self):
return self.feeder.feed()
def incomplete(self):
line = self.feed()
if not line:
self.feeder.message('Syntax', 'sntxi', self.code[self.pos:].rstrip())
raise IncompleteSyntaxError()
self.code += line
def scan(self):
# main loop
self.stubs = [] # stubs of code to be joined
self.start = self.pos # start of current stub
while self.pos < len(self.code):
if self.code[self.pos] == '\\':
if self.pos + 1 == len(self.code):
self.incomplete()
c = self.code[self.pos + 1]
if c == '.':
self.try_parse_base(2, 4, 16)
elif c == ':':
self.try_parse_base(2, 6, 16)
elif c == '[':
self.try_parse_longname(2)
elif c in '01234567':
self.try_parse_base(1, 4, 8)
elif c == '\n':
if self.pos + 2 == len(self.code):
self.incomplete()
self.stubs.append(self.code[self.start:self.pos])
self.newstub(self.pos + 2)
else:
self.pos += 1
else:
self.pos += 1
self.stubs.append(self.code[self.start:]) # final stub
# reduce
return ''.join(self.stubs)
def newstub(self, pos):
self.pos = pos
self.start = pos
def try_parse_base(self, start_shift, end_shift, base):
start, end = self.pos + start_shift, self.pos + end_shift
result = None
if end <= len(self.code):
text = self.code[start:end]
try:
result = int(text, base)
except ValueError:
pass # result remains None
if result is None:
l = end - start
if l == 2:
self.feeder.message('Syntax', 'sntoct2')
elif l == 3:
self.feeder.message('Syntax', 'sntoct1')
elif l == 4:
self.feeder.message('Syntax', 'snthex')
else:
raise ValueError()
self.feeder.message('Syntax', 'sntxb', self.code[self.pos:].rstrip('\n'))
raise ScanError()
self.stubs.append(self.code[self.start:self.pos])
self.stubs.append(chr(result))
self.newstub(end)
def try_parse_longname(self, start_shift):
i = self.pos + start_shift
while True:
if i == len(self.code):
self.incomplete()
if self.code[i] == ']':
break
i += 1
longname = self.code[self.pos + start_shift:i]
if longname.isalpha():
char = named_characters.get(longname)
if char is None:
self.feeder.message('Syntax', 'sntufn', longname)
pass # stay in same stub
else:
self.stubs.append(self.code[self.start:self.pos])
self.stubs.append(char)
self.newstub(i + 1)
self.pos = i + 1 # stay in same stub but skip ahead
``` |
{
"source": "JoseCarlosPa/CECEQ-GO",
"score": 2
} |
#### File: WEB/instalaciones/views.py
```python
from django.shortcuts import render
# Por defecto
from django.shortcuts import render
# Llamamos a los metodos por HHTPS
from django.http import HttpResponse
from django.contrib.auth import logout as do_logout
from django.shortcuts import render, redirect
# Llamamos las funciones de util
from usuarios.util import *
# Create your views here.
def instalaciones(request):
if request.user.is_authenticated:
return render(request, 'instalaciones/index.html')
return redirect('login')
```
#### File: WEB/reportes/views.py
```python
from io import BytesIO
from reportlab.pdfgen import canvas
from reportlab.lib.pagesizes import A4
from django.http import HttpResponse
from django.shortcuts import render, redirect
from reportes.util import *
import datetime
import time
# URLS de Reportes
def cursos(request):
if request.user.is_authenticated:
eventos = get_cursos()
args = {'eventos': eventos}
return render(request, 'reportes/cursos.html', args)
return redirect('login')
def salones(request):
if request.user.is_authenticated:
return render(request, 'reportes/salones.html')
return redirect('login')
def usuarios(request):
if request.user.is_authenticated:
return render(request, 'reportes/usuarios.html')
return redirect('login')
def visitas(request):
if request.user.is_authenticated:
users_anon = get_anonimus_users_today()
user_year = get_anonimus_users_year()
user_month = get_anonimus_users_mes()
u1 = get_anonimus_users_1()
u2 = get_anonimus_users_2()
u3 = get_anonimus_users_3()
u4 = get_anonimus_users_4()
u5 = get_anonimus_users_5()
u6 = get_anonimus_users_6()
u7 = get_anonimus_users_7()
u8 = get_anonimus_users_8()
u9 = get_anonimus_users_9()
u10 = get_anonimus_users_10()
u11 = get_anonimus_users_11()
u12 = get_anonimus_users_12()
args = {'user_anon': users_anon, 'user_year': user_year, 'u1': u1, 'u2': u2, 'u3': u3, 'u4': u4, 'u5': u5,
'u6': u6, 'u7': u7, 'u8': u8, 'u9': u9, 'u10': u10, 'u11': u11, 'u12': u12, 'user_month': user_month,
}
return render(request, 'reportes/visitas.html', args)
return redirect('login')
def show_users(request):
if request.user.is_authenticated:
hoy = time.strftime("%I:%M:%S")
users_anon = get_anonimus_users()
args = {'title': 'CECEQ Reportes', 'users_anon': users_anon, 'hoy': hoy}
return render(request, 'reportes/usuarios.html', args)
return redirect('login')
def reporte_usuarios_hoy(request):
if request.user.is_authenticated:
hoy = time.strftime("%d/%m/%y")
hora = time.strftime("%I:%M:%S")
users_anon = get_anonimus_users_today()
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=Hoy-reporte-usuarios.pdf'
buffer = BytesIO()
c = canvas.Canvas(buffer, pagesize=A4)
c.setLineWidth(.3)
c.drawString(30, 750, 'CECEQ -GO')
c.drawString(30, 735, 'Reporte de Usuarios de hoy:')
c.drawString(480, 750, hoy)
c.drawString(480, 735, hora)
c.line(460, 747, 560, 747)
c.drawString(30, 700, 'El numero de usuarios hoy ha sido de:' + str(users_anon))
c.save()
pdf = buffer.getvalue()
buffer.close()
response.write(pdf)
return response
return redirect('login')
def reporte_usuarios_semana(request):
if request.user.is_authenticated:
hoy = time.strftime("%d/%m/%y")
hora = time.strftime("%I:%M:%S")
users_anon = get_anonimus_users()
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=Semana-reporte-usuarios.pdf'
buffer = BytesIO()
c = canvas.Canvas(buffer, pagesize=A4)
c.setLineWidth(.3)
c.drawString(30, 750, 'CECEQ -GO')
c.drawString(30, 735, 'Reporte de Usuarios de esta semana:')
c.drawString(480, 750, hoy)
c.drawString(480, 735, hora)
c.line(460, 747, 560, 747)
c.drawString(30, 700, 'El numero de usuarios esta semana ha sido de:')
c.save()
pdf = buffer.getvalue()
buffer.close()
response.write(pdf)
return response
return redirect('login')
def reporte_usuarios_mes(request):
if request.user.is_authenticated:
hoy = time.strftime("%d/%m/%y")
hora = time.strftime("%I:%M:%S")
users_anon = get_anonimus_users()
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=Mes-reporte-usuarios.pdf'
buffer = BytesIO()
c = canvas.Canvas(buffer, pagesize=A4)
c.setLineWidth(.3)
c.drawString(30, 750, 'CECEQ -GO')
c.drawString(30, 735, 'Reporte de Usuarios de este mes:')
c.drawString(480, 750, hoy)
c.drawString(480, 735, hora)
c.line(460, 747, 560, 747)
c.drawString(30, 700, 'El numero de usuarios este mes ha sido de:')
c.save()
pdf = buffer.getvalue()
buffer.close()
response.write(pdf)
return response
return redirect('login')
def reporte_usuarios_año(request):
if request.user.is_authenticated:
hoy = time.strftime("%d/%m/%y")
hora = time.strftime("%I:%M:%S")
users_anon = get_anonimus_users_year()
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=Año-reporte-usuarios.pdf'
buffer = BytesIO()
c = canvas.Canvas(buffer, pagesize=A4)
c.setLineWidth(.3)
c.drawString(30, 750, 'CECEQ -GO')
c.drawString(30, 735, 'Reporte de Usuarios de este año:')
c.drawString(480, 750, hoy)
c.drawString(480, 735, hora)
c.line(460, 747, 560, 747)
c.drawString(30, 700, 'El numero de usuarios este año ha sido de:' + str(users_anon))
c.save()
pdf = buffer.getvalue()
buffer.close()
response.write(pdf)
return response
return redirect('login')
``` |
{
"source": "JoseCarlosPa/IA-Image-recognition",
"score": 3
} |
#### File: IA-Image-recognition/src/libs.py
```python
import tensorflow as tf
import matplotlib.pyplot as plt
import numpy as np
numberNames = [
'Zero', 'One', 'Two', 'Three', 'Four', 'Five', 'Six',
'Seven', 'Eight', 'Nine'
]
# split the pixel numbers into black or wihte (255 to 0 or 1)
def imageWitheBlak(images, labels):
images = tf.cast(images, tf.float32)
images /= 255
return images, labels
# Documentation https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.bar.html
def plotGrph(i, predictions, finalLabel):
predictions, finalLabel = predictions[i], finalLabel[i]
plt.grid(False)
plt.xticks([])
plt.yticks([])
thisplot = plt.bar(range(10), predictions, color="#888888") # prediction variants
plt.ylim([0, 1])
finalLabels = np.argmax(predictions)
thisplot[finalLabels].set_color('red') # THe one that choose but wrong
thisplot[finalLabel].set_color('green') # THe one that choose
def plotImg(i, predictions, finalLabels, images):
predictions, finalLabel, img = predictions[i], finalLabels[i], images[i]
plt.grid(False)
plt.xticks([])
plt.yticks([])
plt.imshow(img[..., 0], cmap=plt.cm.binary)
finalLabels = np.argmax(predictions)
if finalLabels == finalLabel:
color = 'green'
else:
color = 'red'
plt.xlabel("Prediccion: {}".format(numberNames[finalLabels]), color=color)
``` |
{
"source": "JoseCarlosSkar/telegram-bot-voice",
"score": 3
} |
#### File: JoseCarlosSkar/telegram-bot-voice/manager_audio.py
```python
import os
import subprocess
import speech_recognition as sr
from gtts import gTTS
r = sr.Recognizer()
async def stt ( file ):
with sr.AudioFile ( file ) as source:
r.adjust_for_ambient_noise ( source )
audio = r.record ( source )
try:
recognized_text = r.recognize_google ( audio, language = "pt" )
return recognized_text
except sr.UnknownValueError:
return sr.UnknownValueError # "Google Speech Recognition could not understand audio"
except sr.RequestError as e:
return sr.RequestError # ("Could not request results from Google Speech Recognition service; {0}".format(e))
except:
return False
async def tts ( text, chat_id, file_id ):
if "vtts" not in os.listdir():
os.mkdir ( "vtts" )
file = "vtts/" + str ( chat_id ) + "." + str ( file_id ) + ".mp3"
tts = gTTS ( text, lang = "pt" )
tts.save ( file )
return file
async def saveVoiceClient ( bot, chat_id, file_id ):
if "vstt" not in os.listdir():
os.mkdir ( "vstt" )
file = "vstt/" + str ( chat_id ) + "." + str ( file_id ) + ".ogg"
newfile = "vstt/" + str ( chat_id ) + "." + str ( file_id ) + ".wav"
await bot.download_file ( file_id, file )
try:
check_ffmpeg = subprocess.run ( [ "ffmpeg", "--help" ], stdout = subprocess.DEVNULL, stderr = subprocess.DEVNULL )
except FileNotFoundError:
print("[*] Error!!! ffmpeg não instalado.")
return False
try:
process = subprocess.run ( [ "ffmpeg", "-i", file, newfile ], stdout = subprocess.DEVNULL, stderr = subprocess.DEVNULL )
if process.returncode != 0: return False
except FileNotFoundError:
return False
except:
print ( "[*] Error!!! in saveVoiceClient." )
return False
os.remove ( file )
return newfile
``` |
{
"source": "jose-carmona/keras-transformer",
"score": 2
} |
#### File: keras-transformer/example/utils.py
```python
import math
import warnings
import h5py
from keras import Model
def load_optimizer_weights(model: Model, model_save_path: str):
"""
Loads optimizer's weights for the model from an HDF5 file.
"""
with h5py.File(model_save_path, mode='r') as f:
if 'optimizer_weights' in f:
# Build train function (to get weight updates).
# noinspection PyProtectedMember
model._make_train_function()
optimizer_weights_group = f['optimizer_weights']
optimizer_weight_names = [
n.decode('utf8') for n in
optimizer_weights_group.attrs['weight_names']]
optimizer_weight_values = [
optimizer_weights_group[n]
for n in optimizer_weight_names]
try:
model.optimizer.set_weights(optimizer_weight_values)
except ValueError:
warnings.warn('Error in loading the saved optimizer '
'state. As a result, your model is '
'starting with a freshly initialized '
'optimizer.')
def contain_tf_gpu_mem_usage():
"""
By default TensorFlow may try to reserve all available GPU memory
making it impossible to train multiple networks at once.
This function will disable such behaviour in TensorFlow.
"""
from keras import backend
if backend.backend() != 'tensorflow':
return
try:
# noinspection PyPackageRequirements
import tensorflow as tf
except ImportError:
pass
else:
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.allow_growth = True # dynamically grow the memory
sess = tf.Session(config=config)
set_session(sess)
class CosineLRSchedule:
"""
Cosine annealing with warm restarts, described in paper
"SGDR: stochastic gradient descent with warm restarts"
https://arxiv.org/abs/1608.03983
Changes the learning rate, oscillating it between `lr_high` and `lr_low`.
It takes `period` epochs for the learning rate to drop to its very minimum,
after which it quickly returns back to `lr_high` (resets) and everything
starts over again.
With every reset:
* the period grows, multiplied by factor `period_mult`
* the maximum learning rate drops proportionally to `high_lr_mult`
This class is supposed to be used with
`keras.callbacks.LearningRateScheduler`.
"""
def __init__(self, lr_high: float, lr_low: float, initial_period: int = 50,
period_mult: float = 2, high_lr_mult: float = 0.97):
self._lr_high = lr_high
self._lr_low = lr_low
self._initial_period = initial_period
self._period_mult = period_mult
self._high_lr_mult = high_lr_mult
def __call__(self, epoch, lr):
return self.get_lr_for_epoch(epoch)
def get_lr_for_epoch(self, epoch):
assert epoch >= 0
t_cur = 0
lr_max = self._lr_high
period = self._initial_period
result = lr_max
for i in range(epoch + 1):
if i == epoch: # last iteration
result = (self._lr_low +
0.5 * (lr_max - self._lr_low) *
(1 + math.cos(math.pi * t_cur / period)))
else:
if t_cur == period:
period *= self._period_mult
lr_max *= self._high_lr_mult
t_cur = 0
else:
t_cur += 1
return result
```
#### File: keras-transformer/keras_transformer/position.py
```python
import numpy as np
# noinspection PyPep8Naming
from keras import backend as K
from keras.engine import Layer
from keras.utils import get_custom_objects
def positional_signal(hidden_size: int, length: int,
min_timescale: float = 1.0, max_timescale: float = 1e4):
"""
Helper function, constructing basic positional encoding.
The code is partially based on implementation from Tensor2Tensor library
https://github.com/tensorflow/tensor2tensor/blob/master/tensor2tensor/layers/common_attention.py
"""
if hidden_size % 2 != 0:
raise ValueError(
f"The hidden dimension of the model must be divisible by 2."
f"Currently it is {hidden_size}")
position = K.arange(0, length, dtype=K.floatx())
num_timescales = hidden_size // 2
log_timescale_increment = K.constant(
(np.log(float(max_timescale) / float(min_timescale)) /
(num_timescales - 1)),
dtype=K.floatx())
inv_timescales = (
min_timescale *
K.exp(K.arange(num_timescales, dtype=K.floatx()) *
-log_timescale_increment))
scaled_time = K.expand_dims(position, 1) * K.expand_dims(inv_timescales, 0)
signal = K.concatenate([K.sin(scaled_time), K.cos(scaled_time)], axis=1)
return K.expand_dims(signal, axis=0)
class AddPositionalEncoding(Layer):
"""
Injects positional encoding signal described in section 3.5 of the original
paper "Attention is all you need". Also a base class for more complex
coordinate encoding described in "Universal Transformers".
"""
def __init__(self, min_timescale: float = 1.0,
max_timescale: float = 1.0e4, **kwargs):
self.min_timescale = min_timescale
self.max_timescale = max_timescale
self.signal = None
super().__init__(**kwargs)
def get_config(self):
config = super().get_config()
config['min_timescale'] = self.min_timescale
config['max_timescale'] = self.max_timescale
return config
def build(self, input_shape):
_, length, hidden_size = input_shape
self.signal = positional_signal(
hidden_size, length, self.min_timescale, self.max_timescale)
return super().build(input_shape)
def call(self, inputs, **kwargs):
return inputs + self.signal
class AddCoordinateEncoding(AddPositionalEncoding):
"""
Implements coordinate encoding described in section 2.1
of "Universal Transformers" (https://arxiv.org/abs/1807.03819).
In other words, injects two signals at once: current position in
the sequence, and current step (vertically) in the transformer model.
"""
def build(self, input_shape):
super().build(input_shape)
_, length, hidden_size = input_shape
def call(self, inputs, step=None, **kwargs):
if step is None:
raise ValueError("Please, provide current Transformer's step"
"using 'step' keyword argument.")
pos_encoded_added = super().call(inputs, **kwargs)
step_signal = K.expand_dims(self.signal[:, step, :], axis=1)
return pos_encoded_added + step_signal
class TransformerCoordinateEmbedding(Layer):
"""
Represents trainable positional embeddings for the Transformer model:
1. word position embeddings - one for each position in the sequence.
2. depth embeddings - one for each block of the model
Calling the layer with the Transformer's input will return a new input
with those embeddings added.
"""
def __init__(self, max_transformer_depth: int, **kwargs):
self.max_depth = max_transformer_depth
super().__init__(**kwargs)
def get_config(self):
config = super().get_config()
config['max_transformer_depth'] = self.max_depth
return config
# noinspection PyAttributeOutsideInit
def build(self, input_shape):
sequence_length, d_model = input_shape[-2:]
self.word_position_embeddings = self.add_weight(
shape=(sequence_length, d_model),
initializer='uniform',
name='word_position_embeddings',
trainable=True)
self.depth_embeddings = self.add_weight(
shape=(self.max_depth, d_model),
initializer='uniform',
name='depth_position_embeddings',
trainable=True)
super().build(input_shape)
def call(self, inputs, **kwargs):
depth = kwargs.get('step')
if depth is None:
raise ValueError("Please, provide current Transformer's step"
"using 'step' keyword argument.")
result = inputs + self.word_position_embeddings
if depth is not None:
result = result + self.depth_embeddings[depth]
return result
get_custom_objects().update({
'TransformerCoordinateEmbedding': TransformerCoordinateEmbedding,
'AddCoordinateEncoding': AddCoordinateEncoding,
'AddPositionalEncoding': AddCoordinateEncoding,
})
``` |
{
"source": "josecastano1/Evidencia_Control_Git",
"score": 2
} |
#### File: josecastano1/Evidencia_Control_Git/main.py
```python
from flask import Flask, send_from_directory, render_template
import gi
gi.require_version('Gst', '1.0')
from gi.repository import Gst, GObject # noqa: E402
GObject.threads_init()
Gst.init(None)
app = Flask(__name__, template_folder='template')
DEBUG = False
if DEBUG:
src = "videotestsrc"
else:
src = 'v4l2src device="/dev/video0"'
launch_string = "%s ! videoconvert ! clockoverlay ! " \
"x264enc tune=zerolatency ! mpegtsmux ! " \
"hlssink location=video/usb_camera.%%05d.ts playlist-location=video/usb_camera.m3u8 max-files=10 target-duration=1" % src
pipeline = Gst.parse_launch(launch_string)
pipeline.set_state(Gst.State.PLAYING)
@app.after_request
def add_header(response):
response.headers['X-UA-Compatible'] = 'IE=Edge,chrome=1'
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
response.headers['Pragma'] = 'no-cache'
response.headers['Expires'] = '0'
return response
@app.route('/')
def index():
return render_template('index.html')
@app.route('/video/<string:file_name>')
def stream(file_name):
video_dir = './video'
return send_from_directory(directory=video_dir, filename=file_name)
if __name__ == '__main__':
app.run(host="0.0.0.0", port=5001)
``` |
{
"source": "Josecc/save-excel-images",
"score": 3
} |
#### File: Josecc/save-excel-images/ImageDownloadThread.py
```python
from threading import Thread
from Image import Image
class ImageDownloadThread(Thread):
def __init__(self, imageURLs):
super().__init__()
self.imageURLs = imageURLs
def run(self):
for imageURL in self.imageURLs:
Image(imageURL).download()
``` |
{
"source": "josecelano/data-version-control",
"score": 3
} |
#### File: app/console/main.py
```python
import argparse
from shared.resize_image_handler import resize_image_handler
def init_argparse():
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input', required=True,
help="source image path")
parser.add_argument('-o', '--output', required=True,
help="resized image path")
parser.add_argument('-r', '--rows', required=True,
type=int, help="resized image width")
parser.add_argument('-c', '--cols', required=True,
type=int, help="resized image heigth")
parser.add_argument('-d', '--dim', required=True, type=int)
return parser
def main():
parser = init_argparse()
args = parser.parse_args()
resize_image_handler(args.input, args.output,
(args.rows, args.cols, args.dim))
if __name__ == "__main__":
main()
```
#### File: data-version-control/src/check_accuracy.py
```python
import json
from pathlib import Path
def main(repo_path):
accuracy_path = repo_path / "metrics/accuracy.json"
with open(accuracy_path) as json_file:
data = json.load(json_file)
accuracy = data['accuracy']
if abs(accuracy - .81) > 0.01:
exit(1)
if __name__ == "__main__":
repo_path = Path(__file__).parent.parent
main(repo_path)
``` |
{
"source": "JoseChavez98/Competitive-Programing",
"score": 4
} |
#### File: JoseChavez98/Competitive-Programing/coinChange.py
```python
def coinChange(array,amount):
table = [0 for k in range(amount+1)]
table[0] = 1
for i in range(0,len(array)):
for j in range(array[i],amount+1):
table[j] += table[j-array[i]]
return table[amount]
# array = [1,5]
array = [1,2,3]
# amount = 6
amount=4
print(coinChange(array,amount))
#https://www.geeksforgeeks.org/coin-change-dp-7/
```
#### File: Competitive-Programing/leetcode-problems/two_sum.py
```python
def two_sum(nums,target):
dic={}
complement = 0
listOfIndexes = []
for i in range(len(nums)-1):
complement = target - nums[i]
if(complement in dic):
listOfIndexes.append(i)
listOfIndexes.append(dic[complement])
return listOfIndexes
else:
dic[nums[i]]=i
return -1
listOfNumbers = [2,7,11,15]
print(sorted(two_sum(listOfNumbers,9)))
```
#### File: JoseChavez98/Competitive-Programing/maxElementWindow.py
```python
from collections import deque
def find_max_sliding_window(arr, w):
myQ =deque()
for i in range(w):
while(myQ and arr[i]>=arr[myQ[-1]]):
myQ.pop()
myQ.append(i)
for i in range(w, len(arr)):
print(str(arr[myQ[0]]) + " ", end = "")
while myQ and myQ[0] <= i-w:
myQ.popleft()
while myQ and arr[i] >= arr[myQ[-1]] :
myQ.pop()
myQ.append(i)
print(arr[myQ[0]])
array = [10, 6, 9, -3, 23, -1, 34, 56, 67, -1, -4, -8, -2, 9, 10, 34, 67]
find_max_sliding_window(array,3)
```
#### File: JoseChavez98/Competitive-Programing/minWindowSort.py
```python
def minMax(array):
mini = 1000000
maxi = -100000
for i in array:
maxi = max(maxi, i)
mini = min(mini, i)
return (mini, maxi)
def minWSort(array):
values = minMax(array)
mini = values[0]
maxi = values[1]
flag = [False, False]
p1 = 0
p2 = len(array)-1
init = [-1, False]
end = [-1, False]
if(array[p1] > mini):
flag[0] = True
init[0] = p1
init[1] = True
if(array[p2] < maxi):
flag[1] = True
end[0] = p2
end[1] = True
while(init[1] == False or end[1] == False):
if(array[p1] < mini and flag[0] == False):
init[0] = p1-1
init[1] = True
elif(flag[0] == False):
mini = array[p1]
p1 += 1
if(array[p2] > maxi and flag[1] == False):
end[0] = p2+1
end[1] = True
elif(flag[1] == False):
maxi = array[p2]
p2 -= 1
# flag[1]=True
if(p1 == p2):
return 0
return end[0] - init[0] + 1
print(minWSort([1, 2, 5, 3, 7, 10, 9, 12]))
print(minWSort([1, 3, 2, 0, -1, 7, 10]))
print(minWSort([1, 2, 3]))
print(minWSort([3, 2, 1]))
``` |
{
"source": "Josecholi/Houdini-asyncio",
"score": 3
} |
#### File: houdini/data/ninja.py
```python
from houdini.data import db, BaseCrumbsCollection
class Card(db.Model):
__tablename__ = 'card'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), nullable=False)
set_id = db.Column(db.SmallInteger, nullable=False, server_default=db.text("1"))
power_id = db.Column(db.SmallInteger, nullable=False, server_default=db.text("0"))
element = db.Column(db.CHAR(1), nullable=False, server_default=db.text("'s'::bpchar"))
color = db.Column(db.CHAR(1), nullable=False, server_default=db.text("'b'::bpchar"))
value = db.Column(db.SmallInteger, nullable=False, server_default=db.text("2"))
description = db.Column(db.String(255), nullable=False, server_default=db.text("''::character varying"))
class PenguinCard(db.Model):
__tablename__ = 'penguin_card'
penguin_id = db.Column(db.ForeignKey('penguin.id', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True,
nullable=False, index=True)
card_id = db.Column(db.ForeignKey('card.id', ondelete='CASCADE', onupdate='CASCADE'), primary_key=True,
nullable=False)
quantity = db.Column(db.SmallInteger, nullable=False, server_default=db.text("1"))
class CardCrumbsCollection(BaseCrumbsCollection):
def __init__(self, inventory_id=None):
super().__init__(model=Card,
key='id',
inventory_key='penguin_id',
inventory_value='card_id',
inventory_model=PenguinCard,
inventory_id=inventory_id)
```
#### File: handlers/login/world.py
```python
import config
from houdini import handlers
from houdini.handlers import XMLPacket, login
from houdini.converters import WorldCredentials, Credentials
from houdini.data.penguin import Penguin
from houdini.data.moderator import Ban
from houdini.crypto import Crypto
from houdini.constants import ClientType
from datetime import datetime
handle_version_check = login.handle_version_check
handle_random_key = login.handle_random_key
@handlers.handler(XMLPacket('login'), client=ClientType.Vanilla)
@handlers.allow_once
@handlers.depends_on_packet(XMLPacket('verChk'), XMLPacket('rndK'))
async def handle_login(p, credentials: WorldCredentials):
if len(p.server.penguins_by_id) >= p.server.server_config['Capacity']:
return await p.send_error_and_disconnect(103)
tr = p.server.redis.multi_exec()
tr.get('{}.lkey'.format(credentials.username))
tr.get('{}.ckey'.format(credentials.username))
tr.delete('{}.lkey'.format(credentials.username), '{}.ckey'.format(credentials.username))
login_key, confirmation_hash, _ = await tr.execute()
if login_key is None or confirmation_hash is None:
return await p.close()
login_key = login_key.decode()
login_hash = Crypto.encrypt_password(login_key + config.client['AuthStaticKey']) + login_key
if credentials.client_key != login_hash:
return await p.close()
if login_key != credentials.login_key or confirmation_hash.decode() != credentials.confirmation_hash:
return await p.close()
data = await Penguin.get(credentials.id)
if data is None:
return await p.send_error_and_disconnect(100)
if not data.active:
return await p.close()
if data.permaban:
return await p.close()
active_ban = await Ban.query.where((Ban.penguin_id == data.id) & (Ban.expires >= datetime.now())).gino.scalar()
if active_ban is not None:
return await p.close()
if data.id in p.server.penguins_by_id:
await p.server.penguins_by_id[data.id].close()
p.logger.info('{} logged in successfully'.format(data.username))
p.data = data
p.login_key = login_key
await p.send_xt('l')
@handlers.handler(XMLPacket('login'), client=ClientType.Legacy)
@handlers.allow_once
@handlers.depends_on_packet(XMLPacket('verChk'), XMLPacket('rndK'))
async def handle_legacy_login(p, credentials: Credentials):
if len(p.server.penguins_by_id) >= p.server.server_config['Capacity']:
return await p.send_error_and_disconnect(103)
tr = p.server.redis.multi_exec()
tr.get('{}.lkey'.format(credentials.username))
tr.delete('{}.lkey'.format(credentials.username), '{}.ckey'.format(credentials.username))
login_key, _ = await tr.execute()
login_key = login_key.decode()
login_hash = Crypto.encrypt_password(login_key + config.client['AuthStaticKey']) + login_key
if login_key is None or login_hash != credentials.password:
return await p.close()
data = await Penguin.query.where(Penguin.username == credentials.username).gino.first()
if data is None:
return await p.send_error_and_disconnect(100)
if not data.active:
return await p.close()
if data.permaban:
return await p.close()
active_ban = await Ban.query.where((Ban.penguin_id == data.id) & (Ban.expires >= datetime.now())).gino.scalar()
if active_ban is not None:
return await p.close()
if data.id in p.server.penguins_by_id:
await p.server.penguins_by_id[data.id].close()
p.logger.info('{} logged in successfully'.format(data.username))
p.data = data
p.login_key = login_key
await p.send_xt('l')
```
#### File: handlers/play/experience.py
```python
from houdini import handlers
from houdini.handlers import XTPacket
from houdini.handlers.play.navigation import handle_join_server, handle_join_room
from houdini.data import db
from houdini.data.quest import Quest, QuestAwardItem, QuestAwardFurniture, QuestAwardPuffleItem, QuestTask
from houdini.data.quest import PenguinQuestTask
import ujson
from aiocache import cached
def get_status_key(_, p):
return 'quest.status.{}'.format(p.data.id)
def get_settings_key(_, p):
return 'quest.settings.{}'.format(p.room.id)
@cached(alias='default', key_builder=get_status_key)
async def get_player_quest_status(p):
query = Quest.load(tasks=QuestTask,
items=QuestAwardItem,
furniture=QuestAwardFurniture,
pet=QuestAwardPuffleItem,
complete=PenguinQuestTask.on((PenguinQuestTask.penguin_id == p.data.id) &
(QuestTask.id == PenguinQuestTask.task_id))).gino
def has_award(quest):
for award in quest.items:
if award.item_id not in p.data.inventory:
return False
for award in quest.furniture:
if award.furniture_id not in p.data.furniture:
return False
for award in quest.pet:
if award.puffle_item_id not in p.data.puffle_items:
return False
return True
def encode_quest(quest):
tasks_complete = [task.id in quest.complete for task in quest.tasks]
quest_status = 'prize claimed' if has_award(quest) else 'complete' if all(tasks_complete) else 'available' \
if quest.in_progress else 'not available'
return {
'id': quest.id,
'status': quest_status,
'tasks': tasks_complete
}
async with db.transaction():
player_quest_status = {
'quests': [encode_quest(quest) async for quest in query.iterate()]
}
return ujson.dumps(player_quest_status)
AwardTypes = {
QuestAwardItem: ('item_id', 'penguinItem'),
QuestAwardFurniture: ('furniture_id', 'furnitureItem'),
QuestAwardPuffleItem: ('puffle_item_id', 'puffleItem')
}
@cached(alias='default', key_builder=get_settings_key)
async def get_quest_settings(p):
query = Quest.load(items=QuestAwardItem,
furniture=QuestAwardFurniture,
pet=QuestAwardPuffleItem,
tasks=QuestTask).gino
def encode_award(award):
award_id, award_type = AwardTypes[type(award)]
return {
'id': getattr(award, award_id),
'type': award_type,
'n': award.quantity if hasattr(award, 'quantity') else 1
}
def encode_task(task):
return {
'type': 'room' if task.room_id is not None else '',
'description': task.description,
'data': task.room_id if task.room_id is not None else task.data
}
async with db.transaction():
quest_settings = {
'ver': 1, 'spawnRoomId': p.room.id,
'quests': [
{
'id': quest.id,
'name': quest.name,
'awards': [encode_award(award) for award in quest.awards],
'tasks': [encode_task(task) for task in quest.tasks]
} async for quest in query.iterate()
]}
return ujson.dumps(quest_settings)
async def init_all_quests(p):
query = Quest.load(tasks=QuestTask,
complete=PenguinQuestTask.on((PenguinQuestTask.penguin_id == p.data.id) &
(QuestTask.id == PenguinQuestTask.task_id))).gino
async with db.transaction():
async for quest in query.iterate():
for task in quest.tasks:
if task.id not in quest.in_progress.union(quest.complete):
await PenguinQuestTask.create(task_id=task.id, penguin_id=p.data.id)
await load_active_quests(p)
async def load_active_quests(p):
p.active_quests = await Quest.load(tasks=QuestTask,
items=QuestAwardItem,
furniture=QuestAwardFurniture,
pet=QuestAwardPuffleItem,
complete=PenguinQuestTask.on((PenguinQuestTask.penguin_id == p.data.id) &
(PenguinQuestTask.task_id == QuestTask.id) &
(PenguinQuestTask.complete == False))).gino.all()
@handlers.handler(XTPacket('j', 'js'), after=handle_join_server)
@handlers.allow_once
async def handle_quest_join_server(p):
await p.server.cache.delete('quest.status.{}'.format(p.data.id))
await load_active_quests(p)
await p.send_xt('nxquestsettings', await get_quest_settings(p))
await p.send_xt('nxquestdata', await get_player_quest_status(p))
@handlers.handler(XTPacket('j', 'jr'), after=handle_join_room)
async def handle_quest_join_room(p):
if p.active_quests is not None:
for quest in p.active_quests:
for task in quest.tasks:
if task.id in quest.in_progress and task.room_id == p.room.id:
await p.server.cache.delete('quest.status.{}'.format(p.data.id))
await PenguinQuestTask.update.values(complete=True)\
.where((PenguinQuestTask.task_id == task.id) &
(PenguinQuestTask.penguin_id == p.data.id)).gino.status()
p.active_quests.remove(quest)
return await p.send_xt('nxquestdata', await get_player_quest_status(p))
@handlers.handler(XTPacket('nx', 'nxquestaward'))
async def handle_quest_award(p, quest_id: int):
await p.server.cache.delete('quest.status.{}'.format(p.data.id))
quest = await Quest.load(items=QuestAwardItem,
furniture=QuestAwardFurniture,
pet=QuestAwardPuffleItem).where(Quest.id == quest_id).gino.first()
for award in quest.items:
await p.add_inventory(p.server.items[award.item_id])
for award in quest.furniture:
await p.add_furniture(p.server.furniture[award.furniture_id])
for award in quest.pet:
await p.add_puffle_item(p.server.puffle_items[award.puffle_item_id])
await p.send_xt('nxquestdata', await get_player_quest_status(p))
@handlers.handler(XTPacket('nx', 'nxquestactivate'))
@handlers.allow_once
async def handle_quest_activate(p):
await p.server.cache.delete('quest.status.{}'.format(p.data.id))
await init_all_quests(p)
await p.send_xt('nxquestdata', await get_player_quest_status(p))
@handlers.handler(XTPacket('nx', 'gas'))
@handlers.allow_once
async def handle_get_action_status(p):
await p.send_xt('gas', int(p.data.special_dance), int(p.data.special_wave),
int(p.data.special_snowball))
@handlers.handler(XTPacket('nx', 'mcs'))
async def handle_map_category_setting(p, map_category: int):
if 0 <= map_category <= 4:
await p.data.update(map_category=map_category).apply()
@handlers.handler(XTPacket('nx', 'pcos'))
@handlers.allow_once
@handlers.player_data_attribute(opened_playercard=False)
async def handle_playercard_opened_setting(p):
await p.data.update(opened_playercard=True).apply()
@handlers.handler(XTPacket('nx', 'swave'))
@handlers.allow_once
@handlers.player_data_attribute(special_wave=False)
async def handle_special_wave(p):
await p.data.update(special_wave=True).apply()
@handlers.handler(XTPacket('nx', 'sdance'))
@handlers.allow_once
@handlers.player_data_attribute(special_dance=False)
async def handle_special_dance(p):
await p.data.update(special_dance=True).apply()
@handlers.handler(XTPacket('nx', 'ssnowball'))
@handlers.allow_once
@handlers.player_data_attribute(special_snowball=False)
async def handle_special_snowball(p):
await p.data.update(special_snowball=True).apply()
```
#### File: handlers/play/mail.py
```python
from houdini import handlers
from houdini.handlers import XTPacket
from houdini.data import db
from houdini.data.penguin import Penguin
from houdini.data.buddy import IgnoreList
from houdini.data.mail import PenguinPostcard
import time
import random
import datetime
@handlers.handler(XTPacket('l', 'mst'))
@handlers.allow_once
async def handle_start_mail_engine(p):
postcards = []
if not p.data.agent_status and random.random() < 0.4:
epf_invited = await PenguinPostcard.query.where(
(PenguinPostcard.penguin_id == p.data.id) & ((PenguinPostcard.postcard_id == 112)
| (PenguinPostcard.postcard_id == 47))).gino.scalar()
if not epf_invited:
postcards.append({
'penguin_id': p.data.id,
'postcard_id': 112
})
last_paycheck = p.data.last_paycheck.date()
today = datetime.date.today()
first_day_of_month = today.replace(day=1)
last_paycheck = last_paycheck.replace(day=1)
player_data = p.data
while last_paycheck < first_day_of_month:
last_paycheck = last_paycheck + datetime.timedelta(days=32)
last_paycheck = last_paycheck.replace(day=1)
send_date = last_paycheck + datetime.timedelta(days=1)
if 428 in p.data.inventory:
postcards.append({
'penguin_id': p.data.id,
'postcard_id': 172,
'send_date': send_date
})
player_data.update(coins=p.data.coins + 250)
if p.data.agent_status:
postcards.append({
'penguin_id': p.data.id,
'postcard_id': 184,
'send_date': send_date
})
player_data.update(coins=p.data.coins + 350)
await player_data.update(last_paycheck=last_paycheck).apply()
if postcards:
await PenguinPostcard.insert().values(postcards).gino.status()
mail_count = await db.select([db.func.count(PenguinPostcard.id)]).where(
PenguinPostcard.penguin_id == p.data.id).gino.scalar()
unread_mail_count = await db.select([db.func.count(PenguinPostcard.id)]).where(
(PenguinPostcard.penguin_id == p.data.id)
& (PenguinPostcard.has_read == False)).gino.scalar()
await p.send_xt('mst', unread_mail_count, mail_count)
@handlers.handler(XTPacket('l', 'mg'))
@handlers.allow_once
async def handle_get_mail(p):
mail_query = PenguinPostcard.load(parent=Penguin.on(Penguin.id == PenguinPostcard.sender_id)).where(
PenguinPostcard.penguin_id == p.data.id).order_by(
PenguinPostcard.send_date.desc())
postcards = []
async with p.server.db.transaction():
async for postcard in mail_query.gino.iterate():
sender_name, sender_id = ('sys', 0) if postcard.sender_id is None else (
postcard.parent.nickname, postcard.sender_id)
sent_timestamp = int(time.mktime(postcard.send_date.timetuple()))
postcards.append(f'{sender_name}|{sender_id}|{postcard.postcard_id}|'
f'{postcard.details}|{sent_timestamp}|{postcard.id}|{int(postcard.has_read)}')
await p.send_xt('mg', *postcards)
@handlers.handler(XTPacket('l', 'ms'))
@handlers.cooldown(2)
async def handle_send_mail(p, recipient_id: int, postcard_id: int):
if p.data.coins < 10:
return await p.send_xt('ms', p.data.coins, 0)
if recipient_id in p.server.penguins_by_id:
recipient = p.server.penguins_by_id[recipient_id]
if p.data.id in recipient.data.ignore:
return await p.send_xt('ms', p.data.coins, 1)
if len(recipient.data.postcards) >= 100:
return await p.send_xt('ms', p.data.coins, 0)
postcard = await PenguinPostcard.create(penguin_id=recipient_id, sender_id=p.data.id,
postcard_id=postcard_id)
sent_timestamp = int(time.mktime(postcard.send_date.timetuple()))
await recipient.send_xt('mr', p.data.nickname, p.data.id, postcard_id, '', sent_timestamp, postcard.id)
else:
ignored = await IgnoreList.query.where((IgnoreList.penguin_id == recipient_id)
& (IgnoreList.ignore_id == p.data.id)).gino.scalar()
if ignored is not None:
return await p.send_xt('ms', p.data.coins, 1)
mail_count = await db.select([db.func.count(PenguinPostcard.id)]).where(
PenguinPostcard.penguin_id == recipient_id).gino.scalar()
if mail_count >= 100:
return await p.send_xt('ms', p.data.coins, 0)
await PenguinPostcard.create(penguin_id=recipient_id, sender_id=p.data.id, postcard_id=postcard_id)
await p.data.update(coins=p.data.coins - 10).apply()
return await p.send_xt('ms', p.data.coins, 1)
@handlers.handler(XTPacket('l', 'mc'))
async def handle_mail_checked(p):
await PenguinPostcard.update.values(has_read=True).where(
PenguinPostcard.penguin_id == p.data.id).gino.status()
@handlers.handler(XTPacket('l', 'md'))
async def handle_delete_mail(p, postcard_id: int):
await PenguinPostcard.delete.where((PenguinPostcard.penguin_id == p.data.id)
& (PenguinPostcard.id == postcard_id)).gino.status()
@handlers.handler(XTPacket('l', 'mdp'))
async def handle_delete_mail_from_user(p, sender_id: int):
sender_id = None if sender_id == 0 else sender_id
await PenguinPostcard.delete.where((PenguinPostcard.penguin_id == p.data.id)
& (PenguinPostcard.sender_id == sender_id)).gino.status()
mail_count = await db.select([db.func.count(PenguinPostcard.id)]).where(
PenguinPostcard.penguin_id == p.data.id).gino.scalar()
await p.send_xt('mdp', mail_count)
```
#### File: handlers/play/message.py
```python
from houdini import handlers
from houdini.handlers import XTPacket
from houdini.commands import invoke_command_string, has_command_prefix
@handlers.handler(XTPacket('m', 'sm'))
@handlers.cooldown(.5)
async def handle_send_message(p, penguin_id: int, message: str):
if penguin_id != p.data.id:
return await p.close()
if p.muted:
for penguin in p.room.penguins_by_id.values():
if penguin.data.moderator:
await penguin.send_xt("mm", message, penguin_id)
return
if has_command_prefix(message):
await invoke_command_string(p.server.commands, p, message)
else:
await p.room.send_xt('sm', p.data.id, message)
```
#### File: handlers/play/stampbook.py
```python
from houdini import handlers
from houdini.handlers import XTPacket
from houdini.handlers.play.navigation import handle_join_server
from houdini.data.stamp import Stamp, CoverStamp, CoverItem, StampCrumbsCollection
from houdini.data.penguin import Penguin
from aiocache import cached
def get_book_cover_key(_, p, player_id):
return 'book.{}'.format(player_id)
def get_player_stamps_key(_, p, player_id):
return 'stamps.{}'.format(player_id)
@cached(alias='default', key_builder=get_book_cover_key)
async def get_book_cover_string(p, player_id):
if player_id in p.server.penguins_by_id:
player = p.server.penguins_by_id[player_id]
cover_details = [player.data.book_color, player.data.book_highlight, player.data.book_pattern,
player.data.book_icon]
else:
cover_details = list(await Penguin.select('book_color', 'book_highlight', 'book_pattern', 'book_icon')
.where(Penguin.id == player_id).gino.first())
cover_stamps = CoverStamp.query.where(CoverStamp.penguin_id == player_id)
cover_items = CoverItem.query.where(CoverItem.penguin_id == player_id)
async with p.server.db.transaction():
async for stamp in cover_stamps.gino.iterate():
cover_details.append(f'0|{stamp.stamp_id}|{stamp.x}|{stamp.y}|{stamp.rotation}|{stamp.depth}')
async for item in cover_items.gino.iterate():
item_type = 2 if p.server.items[item.item_id].is_award() else 1
cover_details.append(f'{item_type}|{item.item_id}|{item.x}|{item.y}|{item.rotation}|{item.depth}')
return '%'.join(map(str, cover_details))
@cached(alias='default', key_builder=get_player_stamps_key)
async def get_player_stamps_string(p, player_id):
if player_id in p.server.penguins_by_id:
stamp_inventory = p.server.penguins_by_id[player_id].data.stamps
else:
stamp_inventory = await StampCrumbsCollection.get_collection(player_id)
return '|'.join(map(str, stamp_inventory.keys()))
@handlers.handler(XTPacket('j', 'js'), after=handle_join_server)
@handlers.allow_once
async def handle_get_stamps(p):
await p.send_xt('gps', p.data.id, await get_player_stamps_string(p, p.data.id))
@handlers.handler(XTPacket('st', 'gps'))
@handlers.cooldown(1)
async def handle_get_player_stamps(p, player_id: int):
await p.send_xt('gps', p.data.id, await get_player_stamps_string(p, player_id))
@handlers.handler(XTPacket('st', 'gmres'))
@handlers.cooldown(1)
async def handle_get_recent_stamps(p):
recent_stamps = []
for stamp in p.data.stamps.values():
if stamp.recent:
recent_stamps.append(stamp.stamp_id)
await stamp.update(recent=False).apply()
await p.send_xt('gmres', '|'.join(map(str, recent_stamps)))
@handlers.handler(XTPacket('st', 'sse'))
@handlers.allow_once
async def handle_stamp_add(p, stamp: Stamp):
await p.add_stamp(stamp)
@handlers.handler(XTPacket('st', 'gsbcd'))
@handlers.cooldown()
async def handle_get_book_cover(p, player_id: int):
await p.send_xt('gsbcd', await get_book_cover_string(p, player_id))
@handlers.handler(XTPacket('st', 'ssbcd'))
@handlers.cooldown()
async def handle_update_book_cover(p, color: int, highlight: int, pattern: int, icon: int, *cover):
if not(1 <= int(color) <= 6 and 1 <= int(highlight) <= 18 and 0 <= int(pattern) <= 6 and 1 <= int(icon) <= 6
and len(cover) <= 10):
return
await CoverItem.delete.where(CoverItem.penguin_id == p.data.id).gino.status()
await CoverStamp.delete.where(CoverStamp.penguin_id == p.data.id).gino.status()
stamp_tracker = set()
inventory_tracker = set()
cover_items = []
cover_stamps = []
for stamp in cover:
stamp_array = stamp.split('|', 5)
stamp_type, stamp_id, pos_x, pos_y, rotation, depth = map(int, stamp_array)
if not (0 <= stamp_type <= 2 and 0 <= pos_x <= 600 and 0 <= pos_y <= 600 and
0 <= rotation <= 360 and 0 <= depth <= 100):
return
if stamp_type == 0:
if stamp_id in stamp_tracker or stamp_id not in p.data.stamps:
return
stamp_tracker.add(stamp_id)
cover_stamps.append({'penguin_id': p.data.id, 'stamp_id': stamp_id, 'x': pos_x, 'y': pos_y,
'rotation': rotation, 'depth': depth})
elif stamp_type == 1 or stamp_type == 2:
cover_item = p.server.items[stamp_id]
if stamp_id in inventory_tracker or stamp_id not in p.data.inventory or \
(stamp_type == 1 and not cover_item.is_flag()) or \
(stamp_type == 2 and not cover_item.is_award()):
return
inventory_tracker.add(stamp_id)
cover_items.append({'penguin_id': p.data.id, 'item_id': stamp_id, 'x': pos_x, 'y': pos_y,
'rotation': rotation, 'depth': depth})
if cover_items:
await CoverItem.insert().values(cover_items).gino.status()
if cover_stamps:
await CoverStamp.insert().values(cover_stamps).gino.status()
await p.data.update(book_color=color,
book_highlight=highlight,
book_pattern=pattern,
book_icon=icon,
book_modified=1).apply()
stringified_cover = '%'.join(cover)
await p.server.cache.set('book.{}'.format(p.data.id), f'{color}%{highlight}%{pattern}%{icon}%{stringified_cover}')
```
#### File: Houdini-asyncio/houdini/__init__.py
```python
from collections import OrderedDict
from types import FunctionType
from abc import abstractmethod
import asyncio
import logging
import importlib
import pkgutil
def get_package_modules(package):
package_modules = []
for importer, module_name, is_package in pkgutil.iter_modules(package.__path__):
full_module_name = f'{package.__name__}.{module_name}'
subpackage_object = importlib.import_module(full_module_name, package=package.__path__)
if is_package:
sub_package_modules = get_package_modules(subpackage_object)
package_modules = package_modules + sub_package_modules
package_modules.append(subpackage_object)
return package_modules
class _AbstractManager(dict):
def __init__(self, server):
self.server = server
self.logger = logging.getLogger('houdini')
super().__init__()
@abstractmethod
async def setup(self, module):
"""Setup manager class"""
@abstractmethod
async def load(self, module):
"""Loads entries from module"""
class PenguinStringCompiler(OrderedDict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def __setitem__(self, key, compiler_method):
assert type(compiler_method) == FunctionType
super().__setitem__(key, compiler_method)
async def compile(self, p):
compiler_method_results = []
for compiler_method in self.values():
if asyncio.iscoroutinefunction(compiler_method):
compiler_method_result = await compiler_method(p)
else:
compiler_method_result = compiler_method(p)
compiler_method_results.append(str(compiler_method_result))
compiler_result = '|'.join(compiler_method_results)
return compiler_result
@classmethod
def attribute_by_name(cls, attribute_name):
async def attribute_method(p):
return getattr(p, attribute_name) or 0
return attribute_method
@classmethod
def data_attribute_by_name(cls, attribute_name):
async def attribute_method(p):
return getattr(p.data, attribute_name) or 0
return attribute_method
@classmethod
def setup_default_builder(cls, string_builder):
string_builder.update({
'ID': PenguinStringCompiler.data_attribute_by_name('id'),
'Nickname': PenguinStringCompiler.data_attribute_by_name('nickname'),
'Approval': PenguinStringCompiler.data_attribute_by_name('approval'),
'Color': PenguinStringCompiler.data_attribute_by_name('color'),
'Head': PenguinStringCompiler.data_attribute_by_name('head'),
'Face': PenguinStringCompiler.data_attribute_by_name('face'),
'Neck': PenguinStringCompiler.data_attribute_by_name('neck'),
'Body': PenguinStringCompiler.data_attribute_by_name('body'),
'Hand': PenguinStringCompiler.data_attribute_by_name('hand'),
'Feet': PenguinStringCompiler.data_attribute_by_name('feet'),
'Flag': PenguinStringCompiler.data_attribute_by_name('flag'),
'Photo': PenguinStringCompiler.data_attribute_by_name('photo'),
'X': PenguinStringCompiler.attribute_by_name('x'),
'Y': PenguinStringCompiler.attribute_by_name('y'),
'Frame': PenguinStringCompiler.attribute_by_name('frame'),
'Member': PenguinStringCompiler.attribute_by_name('member'),
'MemberDays': PenguinStringCompiler.attribute_by_name('membership_days'),
'Avatar': PenguinStringCompiler.attribute_by_name('avatar'),
'PenguinState': PenguinStringCompiler.attribute_by_name('penguin_state'),
'PartyState': PenguinStringCompiler.attribute_by_name('party_state'),
'PuffleState': PenguinStringCompiler.attribute_by_name('puffle_state')
})
@classmethod
def setup_anonymous_default_builder(cls, string_builder):
string_builder.update({
'ID': PenguinStringCompiler.attribute_by_name('id'),
'Nickname': PenguinStringCompiler.attribute_by_name('nickname'),
'Approval': PenguinStringCompiler.attribute_by_name('approval'),
'Color': PenguinStringCompiler.attribute_by_name('color'),
'Head': PenguinStringCompiler.attribute_by_name('head'),
'Face': PenguinStringCompiler.attribute_by_name('face'),
'Neck': PenguinStringCompiler.attribute_by_name('neck'),
'Body': PenguinStringCompiler.attribute_by_name('body'),
'Hand': PenguinStringCompiler.attribute_by_name('hand'),
'Feet': PenguinStringCompiler.attribute_by_name('feet'),
'Flag': PenguinStringCompiler.attribute_by_name('flag'),
'Photo': PenguinStringCompiler.attribute_by_name('photo')
})
```
#### File: plugins/example/__init__.py
```python
from houdini import handlers
from houdini.handlers import XTPacket
from houdini.plugins import IPlugin
from houdini import commands
from houdini import permissions
class Example(IPlugin):
author = "Ben"
description = "Example plugin for developers"
version = "1.0.0"
def __init__(self, server):
super().__init__(server)
async def ready(self):
self.server.logger.info('Example.ready()')
await self.server.permissions.register('houdini.ping')
async def message_cooling(self, p):
print("{}, Message was sent during cooldown".format(p))
@handlers.handler(XTPacket('m', 'sm'))
@handlers.cooldown(1, callback=message_cooling)
async def handle_send_message(self, p, penguin_id: int, message: str):
print('Do stuff with {}'.format(message))
@commands.command('ping')
@permissions.has('houdini.ping')
async def ping(self, p):
await p.send_xt('cprompt', 'Pong')
``` |
{
"source": "JoseChulani/PyFDTD",
"score": 3
} |
#### File: JoseChulani/PyFDTD/main2D.py
```python
import numpy as np
import math
import scipy.constants
import time
import matplotlib.pyplot as plt
import matplotlib.animation as animation
# ==== Preamble ===============================================================
c0 = scipy.constants.speed_of_light
mu0 = scipy.constants.mu_0
eps0 = scipy.constants.epsilon_0
imp0 = math.sqrt(mu0 / eps0)
# ==== Inputs / Pre-processing ================================================
# ---- Problem definition -----------------------------------------------------
L = 10.0
dx = 0.1
dy = 0.1
finalTime = L/c0*5
cfl = .99
gridEX = np.linspace(0, L, num=L/dx+1, endpoint=True)
gridEY = np.linspace(0, L, num=L/dy+1, endpoint=True)
gridHX = np.linspace(dx/2.0, L-dx/2.0, num=L/dx, endpoint=True)
gridHY = np.linspace(dy/2.0, L-dy/2.0, num=L/dy, endpoint=True)
# ---- Materials --------------------------------------------------------------
#PML
pmlxStart=3.0/4.0*L
pmlxBegin=1.0/4.0*L
pmlyStart=3.0/4.0*L
pmlyBegin=1.0/4.0*L
pmlSigmaE0X=0.1
pmlSigmaH0X=pmlSigmaE0X*mu0/eps0
pmlSigmaE0Y=0.1
pmlSigmaH0Y=pmlSigmaE0Y*mu0/eps0
# ---- Boundary conditions ----------------------------------------------------
# ---- Sources ----------------------------------------------------------------
# Initial field
spread = 1.0
center = (L/2.0, L/2.0)
initialH = np.zeros((gridHX.size, gridHY.size))
for i in range(gridHX.size):
for j in range(gridHY.size):
initialH[i,j] = math.exp(
- ((gridHX[i]-center[0])**2 + (gridHY[j]-center[1])**2) /
math.sqrt(2.0) / spread)
#Para poder quitar los bucles for buscar en internet "double index slicing"
vector=[1,2,3,4,5]
# ---- Output requests --------------------------------------------------------
samplingPeriod = 0.0
# ==== Processing =============================================================
# ---- Solver initialization --------------------------------------------------
dt = cfl * dx / c0 / math.sqrt(2)
numberOfTimeSteps = int( finalTime / dt )
if samplingPeriod == 0.0:
samplingPeriod = dt
nSamples = int( math.floor(finalTime/samplingPeriod) )
probeH = np.zeros((gridHX.size, gridHY.size, nSamples))
probeTime = np.zeros(nSamples)
exOld = np.zeros((gridEX.size, gridEY.size))
exNew = np.zeros((gridEX.size, gridEY.size))
eyOld = np.zeros((gridEX.size, gridEY.size))
eyNew = np.zeros((gridEX.size, gridEY.size))
hzOld = np.zeros((gridHX.size, gridHY.size))
hzNew = np.zeros((gridHX.size, gridHY.size))
pmlIndexX=np.searchsorted(gridEX,3.0/4.0*L)
pmlIndexX2=np.searchsorted(gridEX,L/4.0)
pmlIndexY=np.searchsorted(gridEY,3.0/4.0*L)
if 'initialH' in locals():
hzOld = initialH
# Determines recursion coefficients
cEx = dt / eps0 / dx
cEy = dt / eps0 / dy
cHx = dt / mu0 / dx
cHy = dt / mu0 / dy
# ---- Time integration -------------------------------------------------------
print('--- Processing starts---')
tic = time.time();
t = 0.0
for n in range(numberOfTimeSteps):
# --- Updates E field ---
#for i in range(1, gridEX.size-1):
# for j in range(1, gridEY.size-1):
# exNew[i][j] = exOld[i][j] + cEy * (hzOld[i][j] - hzOld[i ][j-1])
# eyNew[i][j] = eyOld[i][j] - cEx * (hzOld[i][j] - hzOld[i-1][j ])
#for i in range(1, gridEY.size-1):
exNew[1:-1, 1:-1] = exOld[1:-1, 1:-1] + cEy * (hzOld[1:, 1:] - hzOld[1:, :-1])
eyNew[1:-1, 1:-1] = eyOld[1:-1, 1:-1] - cEx * (hzOld[1:, 1:] - hzOld[:-1, 1: ])
# E field boundary conditions
# ---Empieza PML ---
for i in range(pmlIndexX,gridEX.size-1):
pmlSigmaEX=pmlSigmaE0X*pow((gridEX[i]-pmlxStart)/(L-pmlxStart),3)
eyNew[i, 1: -1] = (2.0*eps0-pmlSigmaEX*dt)/(2.0*eps0+dt*pmlSigmaEX)*eyOld[i, 1:-1]- \
2.0*dt/(2.0*eps0+dt*pmlSigmaEX)/dx*(hzOld[i, 1:] - hzOld[i-1, 1:])
exNew[i, 1: -1] = (2.0*eps0-dt*pmlSigmaEX)/(2.0*eps0+dt*pmlSigmaEX)*exOld[i, 1:-1]+ \
2.0*dt/(2.0*eps0+dt*pmlSigmaEX)/dy * (hzOld[i, 1:] - hzOld[i, :-1])
for i in range(pmlIndexY,gridEY.size-1):
pmlSigmaEY=pmlSigmaE0Y*pow((gridEY[i]-pmlxStart)/(L-pmlxStart),3)
eyNew[1:-1, i] = (2.0*eps0-pmlSigmaEY*dt)/(2.0*eps0+dt*pmlSigmaEY)*eyOld[1:-1, i]- \
2.0*dt/(2.0*eps0+dt*pmlSigmaEY)/dx*(hzOld[1:, i] - hzOld[:-1, i])
exNew[1:-1, i] = (2.0*eps0-dt*pmlSigmaEY)/(2.0*eps0+dt*pmlSigmaEY)*exOld[1:-1, i]+ \
2.0*dt/(2.0*eps0+dt*pmlSigmaEY)/dy * (hzOld[1:, i] - hzOld[1:, i-1])
for i in range(26,1,-1):
pmlSigmaEX=pmlSigmaE0X*pow((gridEX[101-i]-pmlxStart)/(L-pmlxStart),3)
eyNew[i, 1: -1] = (2.0*eps0-pmlSigmaEX*dt)/(2.0*eps0+dt*pmlSigmaEX)*eyOld[i, 1:-1]- \
2.0*dt/(2.0*eps0+dt*pmlSigmaEX)/dx * (hzOld[i, 1:] - hzOld[i-1, 1:])
exNew[i, 1: -1] = (2.0*eps0-dt*pmlSigmaEX)/(2.0*eps0+dt*pmlSigmaEX)*exOld[i, 1:-1]+ \
2.0*dt/(2.0*eps0+dt*pmlSigmaEX)/dy * (hzOld[i, 1:] - hzOld[i, :-1])
for i in range(26,1,-1):
pmlSigmaEY=pmlSigmaE0Y*pow((gridEY[101-i]-pmlxStart)/(L-pmlxStart),3)
eyNew[1:-1, i] = (2.0*eps0-pmlSigmaEY*dt)/(2.0*eps0+dt*pmlSigmaEY)*eyOld[1:-1, i]- \
2.0*dt/(2.0*eps0+dt*pmlSigmaEY)/dx*(hzOld[1:, i] - hzOld[:-1, i])
exNew[1:-1, i] = (2.0*eps0-dt*pmlSigmaEY)/(2.0*eps0+dt*pmlSigmaEY)*exOld[1:-1, i]+ \
2.0*dt/(2.0*eps0+dt*pmlSigmaEY)/dy * (hzOld[1:, i] - hzOld[1:, i-1])
# PEC
exNew[ :, 0] = 0.0;
exNew[ :,-1] = 0.0;
eyNew[ 0, :] = 0.0;
eyNew[-1, :] = 0.0;
# --- Updates H field ---
#for i in range(gridHX.size):
# for j in range(gridHX.size):
# hzNew[i][j] = hzOld[i][j] - cHx * (eyNew[i+1][j ] - eyNew[i][j]) +\
# cHy * (exNew[i ][j+1] - exNew[i][j])
hzNew[:, :] = hzOld[:, :] - cHx * (eyNew[1:, :-1 ] - eyNew[:-1, :-1]) +\
cHy * (exNew[:-1 ,1:] - exNew[:-1, :-1])
# ---Empieza PML ---
for i in range(pmlIndexX-1,gridHX.size-1):
pmlSigmaHX = pmlSigmaH0X*pow((gridHX[i]-pmlxStart)/(L-pmlxStart),3)
hzNew[i, :] = (2.0*mu0-dt*pmlSigmaHX)/(2.0*mu0+dt*pmlSigmaHX)*hzOld[i, :] +\
2.0*dt/(2.0*mu0+dt*pmlSigmaHX)*(exNew[i, 1:]-exNew[i, :-1])/dy -\
2.0*dt/(2.0*mu0+dt*pmlSigmaHX)/dx*(eyNew[i+1 ,:-1] - eyNew[i ,:-1])
for i in range(pmlIndexY-1,gridHY.size-1):
pmlSigmaHY = pmlSigmaH0Y*pow((gridHY[i]-pmlxStart)/(L-pmlxStart),3)
hzNew[:, i] = (2.0*mu0-dt*pmlSigmaHY)/(2.0*mu0+dt*pmlSigmaHY)*hzOld[:, i] +\
2.0*dt/(2.0*mu0+dt*pmlSigmaHY)*(exNew[:-1, i+1]-exNew[:-1, i])/dy -\
2.0*dt/(2.0*mu0+dt*pmlSigmaHY)/dx*(eyNew[1: ,i] - eyNew[:-1 ,i])
for j in range(25,0-1):
pmlSigmaHX = pmlSigmaH0X*pow((gridHX[100-j]-pmlxStart)/(L-pmlxStart),3)
hzNew[j, :] = (2.0*mu0-dt*pmlSigmaHX)/(2.0*mu0+dt*pmlSigmaHX)*hzOld[j, :] +\
2.0*dt/(2.0*mu0+dt*pmlSigmaHX)*(exNew[j, 1:]-exNew[j, :-1])/dy -\
2.0*dt/(2.0*mu0+dt*pmlSigmaHX)/dx*(eyNew[j+1 ,:-1] - eyNew[j ,:-1])
for i in range(25,0,-1):
pmlSigmaHY = pmlSigmaH0Y*pow((gridHY[100-i]-pmlxStart)/(L-pmlxStart),3)
hzNew[:, i] = (2.0*mu0-dt*pmlSigmaHY)/(2.0*mu0+dt*pmlSigmaHY)*hzOld[:, i] +\
2.0*dt/(2.0*mu0+dt*pmlSigmaHY)*(exNew[:-1, i+1]-exNew[:-1, i])/dy -\
2.0*dt/(2.0*mu0+dt*pmlSigmaHY)/dx*(eyNew[1: ,i] - eyNew[:-1 ,i])
# --- Updates output requests ---
probeH[:,:,n] = hzNew[:,:]
probeTime[n] = t
# --- Updates fields and time
exOld[:] = exNew[:]
eyOld[:] = eyNew[:]
hzOld[:] = hzNew[:]
t += dt
print ("Time step: %d of %d" % (n, numberOfTimeSteps-1))
tictoc = time.time() - tic;
print('--- Processing finished ---')
print("CPU Time: %f [s]" % tictoc)
# ==== Post-processing ========================================================
# --- Creates animation ---
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
#ax = plt.axes(xlim=(gridE[0], gridE[-1]), ylim=(-1.1, 1.1))
ax.set_xlabel('X coordinate [m]')
ax.set_ylabel('Y coordinate [m]')
line = plt.imshow(probeH[:,:,0], animated=True, vmin=-0.5, vmax=0.5)
timeText = ax.text(0.02, 0.95, '', transform=ax.transAxes)
def init():
line.set_array(probeH[:,:,0])
timeText.set_text('')
return line, timeText
def animate(i):
line.set_array(probeH[:,:,i])
timeText.set_text('Time = %2.1f [ns]' % (probeTime[i]*1e9))
return line, timeText
anim = animation.FuncAnimation(fig, animate, init_func=init,
frames=nSamples, interval=50, blit=True)
plt.show()
print('=== Program finished ===')
``` |
{
"source": "joseck12/award",
"score": 2
} |
#### File: award/reward/views.py
```python
from django.shortcuts import render, redirect
from django.http import HttpResponse, Http404, HttpResponseRedirect
from django.core.exceptions import ObjectDoesNotExist
# Create your views here.
@login_required(login_url='/accounts/login/')
def welcome(request):
id = request.user.id
profile = Profile.objects.get(user=id)
return render(request, 'index.html',{'profile':profile})
@login_required(login_url='/accounts/login/')
def myprojects(request):
id = request.user.id
profile = Profile.objects.get(user=id)
projects = Project.objects.all().order_by('-pub_date')
return render(request, 'myproject.html',{'projects':projects,'profile':profile})
def convert_dates(dates):
@login_required(login_url='/accounts/login/')
def password(request):
id = request.user.id
profile = Profile.objects.get(user=id)
return render(request, 'password.html',{'profile':profile})
class ProjectList(APIView):
def get(self, request, format=None):
all_projects = Project.objects.all()
serializers = ProjectSerializer(all_projects, many=True)
return Response(serializers.data)
``` |
{
"source": "joseck12/blog",
"score": 2
} |
#### File: app/main/views.py
```python
from flask import render_template,request,redirect,url_for,abort
from ..models import User,Blog,Comment
from .forms import UpdateProfile,BlogForm,CommentForm,SubscriberForm
from . import main
from .. import db,photos
from flask_login import login_required,current_user
from ..models import User,Blog,Comment,Subscriber
from datetime import datetime
from ..requests import get_quotes
from ..email import mail_message
@main.route('/user/<uname>')
def profile(uname):
user = User.query.filter_by(username = uname).first()
if user is None:
abort(404)
return render_template("profile/profile.html", user = user)
@main.route('/quotes', methods=['GET','POST'])
def quotes():
quotes = get_quotes()
print (quotes)
return render_template('quotes.html',quotes = quotes)
@main.route('/user/<uname>/update',methods = ['GET','POST'])
@login_required
def update_profile(uname):
user = User.query.filter_by(username = uname).first()
if user is None:
abort(404)
form = UpdateProfile()
if form.validate_on_submit():
user.bio = form.bio.data
db.session.add(user)
db.session.commit()
return redirect(url_for('.profile',uname=user.username))
return render_template('profile/update.html',form =form)
@main.route('/user/<uname>/update/pic',methods= ['POST'])
@login_required
def update_pic(uname):
user = User.query.filter_by(username = uname).first()
if 'photo' in request.files:
filename = photos.save(request.files['photo'])
path = f'photos/{filename}'
user.profile_pic_path = path
db.session.commit()
return redirect(url_for('main.profile',uname=uname))
@main.route('/detail', methods=['GET','POST'])
@login_required
def detail():
blog_form=BlogForm()
if blog_form.validate_on_submit():
detail = Blog(category=blog_form.category.data,title = blog_form.title.data)
db.session.add(detail)
db.session.commit()
subscribers = Subscriber.query.all()
for email in subscribers:
mail_message("Hey Welcome To My Blog ","email/welcome_post",email.email,subscribers=subscribers)
return render_template('detail.html',blog_form=blog_form)
@main.route('/', methods=['GET','POST'])
def subscriber():
subscriber_form=SubscriberForm()
if subscriber_form.validate_on_submit():
subscriber= Subscriber(email=subscriber_form.email.data,title = subscriber_form.title.data)
db.session.add(subscriber)
db.session.commit()
mail_message("Hey Welcome To My Blog ","email/welcome_subscriber",subscriber.email,subscriber=subscriber)
subscriber = Blog.query.all()
detail = Blog.query.all()
return render_template('index.html',subscriber=subscriber,subscriber_form=subscriber_form,detail=detail)
@main.route('/comments/<int:id>', methods=['GET','POST'])
def comment(id):
comment_form=CommentForm()
if comment_form.validate_on_submit():
new_comment = Comment(description=comment_form.description.data,blog_id=id)
db.session.add(new_comment)
db.session.commit()
comments = Comment.query.filter_by(blog_id=id)
return render_template('comment.html',comment_form=comment_form,comments=comments)
@main.route('/delete/<int:id>', methods=['GET','POST'])
def delete(id):
try:
if current_user.is_authenticated:
blog = Blog.query.filter_by(id=id).all()
for blogs in blog:
db.session.delete(blogs)
db.session.commit()
return redirect(url_for('main.detail'))
return ''
except Exception as e:
return(str(e))
@main.route('/delete1/<int:id>', methods=['GET','POST'])
def delete1(id):
try:
if current_user.is_authenticated:
comment_form=CommentForm()
comment = Comment.query.filter_by(comment_id=id).first()
for comments in comment:
db.session.delete(comment)
db.session.commit()
return redirect(url_for('main.comment'))
return ''
except Exception as e:
return(str(e))
```
#### File: python3.6/site-packages/jinja2_markdown.py
```python
import markdown
from jinja2.nodes import CallBlock
from jinja2.ext import Extension
class MarkdownExtension(Extension):
tags = set(['markdown'])
def __init__(self, environment):
super(MarkdownExtension, self).__init__(environment)
environment.extend(
markdowner=markdown.Markdown(extensions=['extra'])
)
def parse(self, parser):
lineno = next(parser.stream).lineno
body = parser.parse_statements(
['name:endmarkdown'],
drop_needle=True
)
return CallBlock(
self.call_method('_markdown_support'),
[],
[],
body
).set_lineno(lineno)
def _markdown_support(self, caller):
block = caller()
block = self._strip_whitespace(block)
return self._render_markdown(block)
def _strip_whitespace(self, block):
lines = block.split('\n')
whitespace = ''
output = ''
if (len(lines) > 1):
for char in lines[1]:
if (char == ' ' or char == '\t'):
whitespace += char
else:
break
for line in lines:
output += line.replace(whitespace, '', 1) + '\r\n'
return output.strip()
def _render_markdown(self, block):
block = self.environment.markdowner.convert(block)
return block
``` |
{
"source": "joseck12/myhood",
"score": 2
} |
#### File: myhood/hoodapp/models.py
```python
from django.db import models
from django.contrib.auth.models import User
from tinymce.models import HTMLField
from django.db import IntegrityError
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.db.models.signals import post_save
from django.contrib.auth.models import User
from django.dispatch import receiver
from django.core.validators import MinValueValidator,MaxValueValidator
@receiver(post_save,sender=User)
def create_profile(sender, instance,created,**kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(post_save,sender=User)
def save_profile(sender, instance,**kwargs):
instance.profile.save()
# Create your models here.
class hoodpro(models.Model):
hoodpro_name=models.CharField(max_length = 60)
hoodpro_location = models.CharField(max_length = 90)
occupants_count=models.CharField(max_length = 70)
user = models.ForeignKey(User,related_name='hoodpro',null=True)
class Meta:
ordering = ['-id']
def __str__(self):
return self.hoodpro_name
def create_hoodpro(self):
self.save()
def save_hoodpro(self):
self.save()
def delete_hoodpro(self):
self.delete()
def update_hoodpro(self):
self.save()
def update_occupants(self):
self.save()
@classmethod
def find_hoodpro(cls,hoodpro_id):
hoodpro = cls.objects.get(id=hoodpro_id)
return hoodpro
class Location(models.Model):
location = models.CharField(max_length= 30)
def __str__(self):
return self.location
class Profile(models.Model):
username = models.CharField(max_length = 30)
user = models.OneToOneField(User,on_delete=models.CASCADE,related_name='profile',null=True)
profile_image = models.ImageField(upload_to = 'images/')
email = models.EmailField(max_length=70,blank=True)
hoodpro = models.ForeignKey(hoodpro,null=True)
location = models.ForeignKey(Location,null=True)
def __str__(self):
return self.username
def save_profile(self):
self.save()
def delete_profile(self):
self.save()
def update_description(self):
self.save()
def create_profile(self):
self.save()
@classmethod
def search_by_username(cls,search_term):
profiles = cls.objects.filter(username__icontains=search_term)
return profiles
@property
def photo_url(self):
if self.profile_image and hasattr(self.profile_image, 'url'):
return self.profile_image.url
class Post(models.Model):
image = models.ImageField(upload_to = 'images/',null=True)
hoodpro = models.ForeignKey(hoodpro,related_name='post',null=True)
post = models.CharField(max_length = 30)
post_description = models.CharField(max_length = 40,blank=True)
profile = models.ForeignKey(Profile,null=True)
user = models.ForeignKey(User,null=True)
posted_time = models.DateTimeField(auto_now_add=True,null=True)
link = models.URLField(max_length = 80,null=True)
class Meta:
ordering = ['-id']
def __str__(self):
return self.post
def save_image(self):
self.save()
def delete_image(self):
self.save()
def update_caption(self):
self.save()
def average_rating(self):
all_ratings =list(map(lambda x: x.rating, self.review_set.all()))
return np.mean(all_ratings)
def __unicode__(self):
return self.image_name
def create_post(self):
self.save()
class Business(models.Model):
business_name = models.CharField(max_length = 130)
business_email = models.EmailField(max_length=70,blank=True)
profile = models.ForeignKey(Profile,null=True)
hoodpro = models.ForeignKey(hoodpro,null=True)
user = models.ForeignKey(User,null=True)
posted_time = models.DateTimeField(auto_now_add=True,null=True)
product = models.ImageField(upload_to = 'images/',null=True)
class Meta:
ordering = ['-id']
def __str__(self):
return self.business_name
def save_business(self):
self.save()
def delete_business(self):
self.save()
def update_business(self):
self.save()
def create_business(self):
self.save()
@classmethod
def find_business(cls,business_id):
business = cls.objects.get(id=business_id)
return business
@classmethod
def search_by_business_name(cls,search_term):
businesses = cls.objects.filter(business_name__icontains=search_term)
return businesses
``` |
{
"source": "joseck12/password-locker",
"score": 4
} |
#### File: joseck12/password-locker/run.py
```python
from user import User
def create_account(account,username,password):
'''
Function to create a new account
'''
new_user = User(account,username,password)
return new_user
# save users
def save_user(user):
'''
Function to save users
'''
user.save_user()
# delete users
def del_user(user):
'''
Function to delete a users
'''
user.delete_user()
# finding a user
def find_user(account):
'''
Function that finds a user by account and returns the account
'''
return User.find_by_account(account)
# checkif acount exists
def check_existing_users(account):
'''
Function that check if a user exists with that account and return a Boolean
'''
return User.user_exist(account)
# display all users
def display_users():
'''
Function that returns all the saved users
'''
return User.display_users()
def main():
print("Hello Welcome to Password Locker. What is your name?")
user_name = input()
print(f"Hello {user_name}. what would you like to do?")
print('\n')
while True:
print("Use these short codes : ca - create a new account, da - display users, fa -find a user, ex -exit the Password Locker")
short_code = input('Enter : ').lower()
if short_code == 'ca':
print("Sign up for Password Locker")
your_name = input("Enter username: ")
password = input("Enter your password: ")
print('\033[94m')
print( your_name +" " + " Welcome to password Locker!")
print('\033[0m')
print("______________ ")
print("Which account details would you like to store?")
account = input("Account name: ")
username = input("What's your username: ")
password = input("Enter your password: ")
save_user(User(account, username, password)) # create and save new account.
print ('\033[93m \n')
print(f"Successful new User {username} stored")
print ( '\033[0m \n')
elif short_code == 'da':
if display_users():
print("Here is a list of all your users")
print('\n')
for user in display_users():
print(f"{user.username}")
print('\n')
else:
print('\n')
print("You dont seem to have any users saved yet")
print('\n')
elif short_code == 'fa':
print("Enter the username you want to search for")
search_username = input()
if check_existing_users(search_username):
search_user = find_user(search_username)
print(f"{search_user.username}")
print('-' * 20)
# print(f"Phon.......{search_contact.phone_number}")
# print(f"Email address.......{search_contact.email}")
else:
print("That user does not exist")
elif short_code == "ex":
print("Bye .......")
break
else:
print("I really didn't get that. Please use the short codes")
if __name__ == '__main__':
main()
```
#### File: joseck12/password-locker/test_user.py
```python
import unittest # Importing the unittest module
import pyperclip
from user import User # Importing the user class
class TestUser(unittest.TestCase):
'''
Test class that defines test cases for the user class behaviours.
Args:
unittest.TestCase: TestCase class that helps in creating test cases
'''
# Items up here .......
def setUp(self):
'''
Set up method to run before each test cases.
'''
self.new_user = User("Tweeter", "Mercy", "0000") # create user object
def test_init(self):
'''
test_init test case to test if the object is initialized properly
'''
self.assertEqual(self.new_user.account,"Tweeter")
self.assertEqual(self.new_user.username,"Mercy")
self.assertEqual(self.new_user.password,"<PASSWORD>")
def test_save_user(self):
'''
test_save_user test case to test if the user object is saved into
the user list
'''
self.new_user.save_user() # saving the new user
self.assertEqual(len(User.user_list),1)
# setup and class creation up here
def tearDown(self):
'''
tearDown method that does clean up after each test case has run.
'''
User.user_list = []
def test_save_multiple_user(self):
'''
test_save_multiple_contact to check if we can save multiple user
objects to our contact_list
'''
self.new_user.save_user()
test_user = User("Tweeter", "Mercy", "00000") # new user
test_user.save_user()
self.assertEqual(len(User.user_list),2)
def test_delete_user(self):
self.new_user.save_user()
test_user = User("Tweeter", "Mercy", "00000") # new user
test_user.save_user()
self.new_user.delete_user()# Deleting a user object
self.assertEqual(len(User.user_list),1)
def test_find_user_by_account(self):
'''
test to check if we can find a user by account and display information
'''
self.new_user.save_user()
test_user = User("Tweeter", "Mercy", "00000") # new user
test_user.save_user()
found_user = User.find_by_account("Tweeter")
self.assertEqual(found_user.username,test_user.username)
def test_user_exists(self):
self.new_user.save_user()
test_user = User("Tweeter", "Mercy", "00000")
test_user.save_user()
user_exists = User.user_exist("Tweeter")
self.assertTrue(user_exists)
def test_display_all_users(self):
'''
method that returns a list of all users saved
'''
self.assertEqual(User.display_users(),User.user_list)
def test_copy_username(self):
'''
Test to confirm that we are copying the username from a found user
'''
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "joseck12/picturegram",
"score": 2
} |
#### File: picturegram/pictureapp/views.py
```python
from django.http import Http404
from django.shortcuts import render,redirect
from . models import Image ,Profile, Like, Follow, Comment
import datetime as dt
from django.contrib.auth.decorators import login_required
from django.conf import settings
from . forms import ImageForm, CommentForm, ProfileUpdateForm,UpdateImageCaption
import os
from django.template.defaulttags import register
@login_required(login_url='/accounts/login/')
def user_page(request):
date = dt.date.today()
current_user = request.user
followed_people= []
images1 =[]
following = Follow.objects.filter(follower = current_user)
is_following = Follow.objects.filter(follower = current_user).count()
try:
if is_following != 0:
for folling_object in following:
image_set = Profile.objects.filter(id = folling_object.user.id)
for item in image_set:
followed_people.append(item)
for followed_profile in followed_people:
post = Image.objects.filter(user_key = followed_profile.user)
for item in post:
images1.append(item)
images= list(reversed(images1))
return render(request, 'my-grams/user_page.html',{"date":date,"user_page_images":images})
except:
raise Http404
return render(request, 'my-grams/startup.html')
@login_required(login_url='/accounts/login/')
def search_result(request):
if 'name' in request.GET and request.GET["name"]:
search_name = request.GET.get("name")
found_users = Profile.find_profile(search_name)
message =f"{search_name}"
return render(request,'my-grams/search_result.html',{"message":message,"found_users":found_users})
else:
message = "Please enter a valid username"
return render(request,'my-grams/search_result.html',{"message":message})
@login_required(login_url='/accounts/login/')
def single_user(request,id):
try:
user = Profile.objects.get(id=id)
except:
raise Http404()
return render(request,'my-grams/single.html',{"user":user})
@login_required(login_url='/accounts/login/')
def image(request,image_id):
try:
image = Image.objects.get(id= image_id)
except:
raise Http404()
return render(request, 'my-grams/image.html',{"image":image})
@login_required(login_url='/accounts/login/')
def post(request):
'''
View function that displays a forms that allows users to upload images
'''
current_user = request.user
if request.method == 'POST':
form = ImageForm(request.POST ,request.FILES)
if form.is_valid():
image = form.save(commit = False)
image.user_key = current_user
image.likes +=0
image.save()
return redirect( user_page)
else:
form = ImageForm()
return render(request, 'my-grams/post.html',{"form" : form})
@login_required(login_url='/accounts/login/')
def comment(request, image_id):
comments = Comment.objects.filter(image_id=image_id)
current_image = Image.objects.get(id=image_id)
current_user = request.user
if request.method == 'POST':
form = CommentForm(request.POST)
logger_in = request.user
if form.is_valid():
comment = form.save(commit = False)
comment.user_id= current_user
comment.image_id = current_image
current_image.comments_number+=1
current_image.save_image()
comment.save()
return redirect(user_page)
else:
form = CommentForm()
return render(request,'my-grams/comment.html',{"form":form,"comments":comments})
@login_required(login_url='/accounts/login/')
def update_profile(request):
current_user = request.user
title = 'Update Profile'
try:
requested_profile = Profile.objects.get(user_id = current_user.id)
if request.method == 'POST':
form = ProfileUpdateForm(request.POST,request.FILES)
if form.is_valid():
requested_profile.profile_photo = form.cleaned_data['profile_photo']
requested_profile.bio = form.cleaned_data['bio']
requested_profile.username = form.cleaned_data['username']
requested_profile.save_profile()
return redirect( profile )
else:
form = ProfileUpdateForm()
except:
if request.method == 'POST':
form = ProfileUpdateForm(request.POST,request.FILES)
if form.is_valid():
new_profile = Profile(profile_photo= form.cleaned_data['profile_photo'],bio = form.cleaned_data['bio'],username = form.cleaned_data['username'],user = current_user)
new_profile.save_profile()
return redirect( profile )
else:
form = ProfileUpdateForm()
return render(request,'profile/update_profile.html',{"title":title,"current_user":current_user,"form":form})
@login_required(login_url='/accounts/login/')
def profile(request):
title = 'Profile'
current_user = request.user
try:
profile = Profile.objects.get(user_id = current_user)
following = Follow.objects.filter(follower = current_user)
followers = Follow.objects.filter(user = profile)
except:
profile = Profile.objects.get(username = 'default_user')
following = Follow.objects.filter(follower = current_user)
followers = Follow.objects.filter(user = profile)
return render(request, 'profile/profile.html',{"profile":profile,"current_user":current_user,"following":following,"followers":followers})
@login_required(login_url='/accounts/login/')
def more(request,image_id):
image = Image.objects.get(id = image_id)
current_user = request.user
update_image = Image.objects.get(id= image_id)
if request.method == 'POST':
form = UpdateImageCaption(request.POST)
if form.is_valid():
new_caption = form.cleaned_data['image_caption']
update_image.image_caption = new_caption
update_image.save_image()
return redirect( more ,image_id)
else:
form = UpdateImageCaption()
return render(request,'my-grams/more.html',{"image":image, "form":form})
@login_required(login_url='/accounts/login/')
def view_profiles(request):
all_profiles = Profile.objects.all()
return render(request,'profile/all.html',{"all_profiles":all_profiles})
@login_required(login_url='/accounts/login/')
def follow(request,profile_id):
current_user = request.user
requested_profile = Profile.objects.get(id = profile_id)
is_following = Follow.objects.filter(follower = current_user,user = requested_profile).count()
follow_object = Follow.objects.filter(follower = current_user,user = requested_profile)
if is_following == 0:
follower = Follow(follower = current_user,user = requested_profile)
follower.save()
return redirect(view_profiles)
else:
follow_object.delete()
return redirect(view_profiles)
@login_required(login_url='/accounts/login/')
def like(request,image_id):
requested_image = Image.objects.get(id = image_id)
current_user = request.user
if_voted = Like.objects.filter(image = requested_image,user = current_user).count()
unlike_parameter = Like.objects.filter(image = requested_image,user = current_user)
if if_voted==0:
requested_image.likes +=1
requested_image.save_image()
like = Like(user = current_user, image = requested_image )
like.save_like()
return redirect(user_page)
else:
requested_image.likes -=1
requested_image.save_image()
for single_unlike in unlike_parameter:
single_unlike.unlike()
return redirect(user_page)
return render(request,'my-grams/user_page.html')
@login_required(login_url='/accounts/login/')
def fist_time(request):
return render(request, 'my-grams/startup.html')
def test(request):
return render(request, 'my-grams/test.html')
``` |
{
"source": "joseclaris6500/UserJorunals",
"score": 4
} |
#### File: joseclaris6500/UserJorunals/app.py
```python
from database import create_table, add_entry, get_entry
menu = """Please select one of the following options:
(1) Add new entry for today
(2) View entries
(3) Exit
Your selection: """
welcome_mess = "Welcome to your daily journals"
def prompt_new_entry():
entry_content = input("What have you learned today?")
entry_date = input("Enter the date: ")
add_entry(entry_content, entry_date)
def view_entry(entries):
for entry in entries:
print(f"{entry['date']}\n{entry['content']}\n\n")
print(welcome_mess)
create_table()
while (user_input := input(menu)) != "3":
if user_input == "1":
prompt_new_entry()
elif user_input == "2":
view_entry(get_entry())
else:
print("Invalid option, please tyr again")
user_input = input(menu)
``` |
{
"source": "josecolella/dcrum-site-converter",
"score": 2
} |
#### File: josecolella/dcrum-site-converter/siteConverter.py
```python
import pandas as pd
import ipcalc
import os
import typing
class SiteDefinitionException(Exception):
def __init__(self, message, errors):
super(SiteDefinitionException, self).__init__(message)
class SiteConverter(object):
# Dictionary with the formal site definition and default values
DCRUM_SITE_COLUMNS = {
"Id": '',
"Name": '',
"Site Type": 'Manual',
"Region": '',
"Area": '',
"UDL": 'false',
"WAN": 'false',
"Link Speed In": '',
"Link Speed Out": '',
"Comment": '',
"Domains": ''
}
"""docstring for ClassName"""
def __init__(self, file):
file_name, extension = os.path.splitext(file)
if extension == 'csv':
self.df = pd.read_csv(file)
else:
self.df = pd.read_excel(file)
self.original_columns = self.df.columns.values
def clean_to_site_definition(self, mappings: dict, translateNetworkMasks=False):
# Optimize this more
for column, value in SiteConverter.DCRUM_SITE_COLUMNS.items():
if mappings[column] and mappings[column] != value:
try:
self.df[column] = self.df[mappings[column]]
except KeyError:
self.df[column] = mappings[column]
else:
self.df[column] = value
if translateNetworkMasks:
def convertNetworkMask(ipWithNetworkMask: str):
ipRanges = tuple(str(ip)
for ip in ipcalc.Network(ipWithNetworkMask)
)
return "{beginningIP}-{endingIP}".format(
beginningIP=ipRanges[0], endingIP=ipRanges[-1])
self.df['Domains'] = self.df[
'Domains'].apply(convertNetworkMask)
def transform_to_site_definition(self):
self.df = self.df[list(SiteConverter.DCRUM_SITE_COLUMNS.keys())]
def save_to_site_definition(self, file_name: str, sep: str=';'):
self.df.to_csv(file_name, sep=sep, index=False)
``` |
{
"source": "josecolella/Scurry-Challenge",
"score": 4
} |
#### File: josecolella/Scurry-Challenge/solution.py
```python
import itertools
import typing
def fizz_buzz(beginning_number: int = 1, ending_number: int = 100, key: int=None):
"""Returns a generator that yields an integer or a string after verifying
if the integer from `beginning_number` to `ending_number` is divisible by 3, 5
or 15
Args:
beginning_number: The integer that denotes the beginning of the sequence
ending_number: The integer that denotes the end of the sequence
key: An optional integer that when specified only returns the result for the key
Returns:
A generator that yields an integer or a string based on the following conditions:
- the string is `ThreeFive` if the integer is divisible by 3 and 5,
-`Three` if integer is only divisible by 3,
- `Five` if integer is only divisible by 5.
- the integer if none of the above conditions apply
"""
if beginning_number > ending_number:
raise ValueError(
"`beginning_number` should be less than `ending_number`")
if beginning_number < 0:
raise ValueError("`beginning_number` should be greater than 0")
if ending_number < 0:
raise ValueError("`ending_number` should be greater than 0")
result_list = None
# Create set with range from `beginning_number` to `ending_number`
# inclusive
all_numbers = set(range(beginning_number, ending_number + 1))
# Create two sets; 1. Multiple of 3. 2. Multiple of 5
multiple_three = set((number for number in all_numbers if number % 3 == 0))
multiple_five = set((number for number in all_numbers if number % 5 == 0))
# Create a set with the remaining numbers that do not fullfill the
# conditions
remaining_numbers = all_numbers.difference(multiple_three, multiple_five)
# Set that is interesection of numbers that are multiple of 3 and multiple
# of 5
multiple_both = multiple_three.intersection(multiple_five)
# Set that contains multiples of three that are not also multiples of 5
multiple_three_restrict = multiple_three.difference(multiple_both)
# Set that contains multiples of five that are not also multiples of 3
multiple_five_restrict = multiple_five.difference(multiple_both)
# Create transformations for printing
transform_remaining_numbers = ((number, '')
for number in remaining_numbers)
transform_multiple_both = ((number, 'ThreeFive')
for number in multiple_both)
transform_multiple_five = ((number, 'Five')
for number in multiple_five_restrict)
transform_multiple_three = ((number, 'Three')
for number in multiple_three_restrict)
# Create sorted list, that is sorted based on the integer
transformed_result_list = sorted(
itertools.chain(transform_remaining_numbers,
transform_multiple_both,
transform_multiple_five,
transform_multiple_three
), key=lambda x: x[0])
# Helper function to return integer is string in tuple is ''
def clean_result_tuple(result_tuple):
return result_tuple[0] if result_tuple[1] is '' else result_tuple[1]
# Create clean result generator
result_generator = (clean_result_tuple(trans_tuple)
for trans_tuple in transformed_result_list)
if key is not None:
result_generator = itertools.islice(result_generator, key - 1, key)
return result_generator
``` |
{
"source": "Josecolin99/Data-Fake",
"score": 3
} |
#### File: Data-Fake/fakedata/__main__.py
```python
import logging
from fakedata.fakedata import FakeData
logging.basicConfig(level=logging.DEBUG)
def main():
nombreRandom = FakeData.RandomName()
sex = FakeData.CheckSex(nombreRandom)
nombreOne = FakeData.RandomOneName()
sexOne = FakeData.CheckSex(nombreOne)
nombreTwo = FakeData.RandomTwoName()
sexTwo = FakeData.CheckSex(nombreTwo)
email = FakeData.RandomEmail(nombreRandom)
emailPersonalizado = FakeData.PersonalizedEmail(nombreRandom, 'outlook.com')
sexRandom = FakeData.RandomSex()
tel = FakeData.RandomTel()
print(f'Nombre ramdom: {nombreRandom}')
print(f'Email random: {email}')
print(f'Sexo : {sex}')
print(f'Email personalizado: {emailPersonalizado}')
print(f'Un solo nombre: {nombreOne}')
print(f'Sexo One: {sexOne}')
print(f'Dos nombre: {nombreTwo}')
print(f'Sexo Two: {sexTwo}')
print(f'Sexo Random: {sexRandom}')
print(f'Telefono: {tel}')
if __name__ == '__main__':
x = 0
while x != 1:
logging.debug("Ejecutando pruebas FakeData".center(50, '-'))
main()
logging.debug("Finalizando pruebas FakeData".center(50, '-'))
x += 1
``` |
{
"source": "josecols/fundahog",
"score": 2
} |
#### File: fundahog/blog/models.py
```python
from django.db import models
from django.contrib.auth.models import User
from django.template.defaultfilters import slugify
class Categoria(models.Model):
descripcion = models.CharField(max_length=100, unique=True,
verbose_name="descripción")
slug = models.SlugField(max_length=100, editable=False)
class Meta:
verbose_name = "categoría"
verbose_name_plural = "categorías"
def __unicode__(self):
return self.descripcion
def save(self):
if not self.id:
self.slug = slugify(self.descripcion)
super(Categoria, self).save()
class Entrada(models.Model):
titulo = models.CharField(max_length=100, unique=True,
verbose_name="título")
contenido = models.TextField()
categorias = models.ManyToManyField(Categoria)
importante = \
models.BooleanField(help_text='Una entrada marcada como importante aparecerá en todo el sitio.'
)
autor = models.ForeignKey(User, editable=False)
slug = models.SlugField(max_length=100, editable=False)
creado = models.DateTimeField(editable=False, auto_now_add=True)
def __unicode__(self):
return self.titulo
def save(self):
if not self.id:
self.slug = slugify(self.titulo)
super(Entrada, self).save()
``` |
{
"source": "josecols/steps",
"score": 3
} |
#### File: josecols/steps/calibrar.py
```python
import cv2, numpy, sys, pickle
from detector import Detector
AREA_MIN = 1000
AREA_MAX = 10000
AZUL_MIN = numpy.array([100, 150, 110], numpy.uint8)
AZUL_MAX = numpy.array([130, 255, 255], numpy.uint8)
BLANCO_MIN = cv2.mean((200, 200, 200))
BLANCO_MAX = cv2.mean((255, 255, 255))
class Calibrador():
def __init__(self, camara):
self.detector = Detector(camara, True, False, AREA_MIN, AREA_MAX, BLANCO_MIN, BLANCO_MAX, AZUL_MIN, AZUL_MAX)
def calibrar(self, tipo):
if (tipo == "franjas"):
self.franjas = open("franjas.obj", "wb")
self.detector.detectarFranjas()
elif (tipo == "zapato"):
self.yMin = open("limite.txt", "w")
self.detector.detectarZapatos()
else:
print("No se reconoce el parámetro: '%s'" % tipo)
sys.exit(0)
def guardar(self, tipo):
if (tipo == "franjas"):
pickle.dump(self.detector.getFranjas(), self.franjas)
self.franjas.close()
elif (tipo == "zapato"):
self.yMin.write("%d" % self.detector.getZapatos()[0].getYMin())
self.yMin.close()
if __name__ == "__main__":
if (len(sys.argv) > 2):
calibrador = Calibrador(int(sys.argv[2]))
else:
calibrador = Calibrador(0)
while True:
calibrador.calibrar(sys.argv[1])
if (cv2.waitKey(27) != -1):
calibrador.guardar(sys.argv[1])
break
``` |
{
"source": "JOSECONDORI5/API-marketplace",
"score": 2
} |
#### File: api/views/general_views.py
```python
from rest_framework import generics
from rest_framework import viewsets
from rest_framework.response import Response
from apps.base.api import GeneralListApiView
from apps.products.models import MeasureUnit, Indicator, CategoryProduct
from apps.products.api.serializers.general_serializers import MeasureUnitSerializer, IndicatorSerializer, CategoryProductSerializer
class MeasureUnitViewSet(viewsets.GenericViewSet):
"""
Hola desde unidad de medida
"""
model = MeasureUnit
serializer_class = MeasureUnitSerializer
def get_queryset(self):
return self.get_serializer().Meta.model.objects.filter(state=True)
def list(self, request):
"""
Retorna todas las unidades de medida disponibles
params.
name → nombre de la unidad de medida
"""
data = self.get_queryset()
data = self.get_serializer(data, many=True)
return Response(data.data)
def create(self, request):
return Response({})
class IndicatorViewSet(viewsets.ModelViewSet):
serializer_class = IndicatorSerializer
def get_queryset(self):
return self.get_serializer().Meta.model.objects.filter(state=True)
class CategoryProductViewSet(viewsets.ModelViewSet):
serializer_class = CategoryProductSerializer
def get_queryset(self):
return self.get_serializer().Meta.model.objects.filter(state=True)
```
#### File: api/views/product_viewsets.py
```python
from rest_framework import generics, status
from rest_framework import viewsets
from rest_framework.response import Response
from apps.base.api import GeneralListApiView
from apps.products.api.serializers.product_serializer import ProductSerializer
from apps.users.authentication_mixins import Authentication
class ProductViewSet(Authentication, viewsets.ModelViewSet):
serializer_class = ProductSerializer
def get_queryset(self, pk=None):
if pk is None:
return self.get_serializer().Meta.model.objects.filter(state=True)
return self.get_serializer().Meta.model.objects.filter(id=pk, state=True).first()
def list(self, request):
product_serializer = self.get_serializer(self.get_queryset(), many=True)
# print(self.user)
return Response(product_serializer.data, status=status.HTTP_200_OK)
def create(self, request):
# Send information to serializer
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
serializer.save()
return Response({'message': 'Producto creado correctamente!'}, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def update(self, request, pk=None):
if self.get_queryset(pk):
# Send information to serializer referencing the instance
product_serializer = self.serializer_class(self.get_queryset(pk), data=request.data)
if product_serializer.is_valid():
product_serializer.save()
return Response(product_serializer.data, status=status.HTTP_200_OK)
return Response(product_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, pk=None):
product = self.get_queryset().filter(id=pk).first()
if product:
product.state = False
product.save()
return Response({'message': 'Producto eliminado correctamente!'}, status=status.HTTP_200_OK)
return Response({'error': 'No existe un producto con estos datos!'}, status=status.HTTP_400_BAD_REQUEST)
# class ProductListAPIView(GeneralListApiView):
# serializer_class = ProductSerializer
# class ProductListCreateAPIView(generics.ListCreateAPIView):
# serializer_class = ProductSerializer
# queryset = ProductSerializer.Meta.model.objects.filter(state=True)
#
# # def get_queryset(self):
# # assert
# # return self.queryset
#
# def post(self, request):
# # Send information to serializer
# serializer = self.serializer_class(data=request.data)
# if serializer.is_valid():
# serializer.save()
# return Response({'message': 'Producto creado correctamente!'}, status=status.HTTP_201_CREATED)
# return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
#
# class ProductRetrieveUpdateDestroyAPIView(generics.RetrieveUpdateDestroyAPIView):
# serializer_class = ProductSerializer
#
# def get_queryset(self, pk=None):
# if pk is None:
# return self.get_serializer().Meta.model.objects.filter(state=True)
# else:
# return self.get_serializer().Meta.model.objects.filter(id=pk, state=True).first()
#
# def patch(self, request, pk=None):
# # product = self.get_queryset().filter(id=pk).first()
# # if product:
# if self.get_queryset(pk):
# # product_serializer = self.serializer_class(product)
# product_serializer = self.serializer_class(self.get_queryset(pk))
# return Response(product_serializer.data, status=status.HTTP_200_OK)
# return Response({'error': 'No existe un producto con estos datos!'}, status=status.HTTP_400_BAD_REQUEST)
#
# def put(self, request, pk=None):
# if self.get_queryset(pk):
# # Send information to serializer referencing the instance
# product_serializer = self.serializer_class(self.get_queryset(pk), data=request.data)
# if product_serializer.is_valid():
# product_serializer.save()
# return Response(product_serializer.data, status=status.HTTP_200_OK)
# return Response(product_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
#
# def delete(self, request, pk=None):
# product = self.get_queryset().filter(id=pk).first()
# if product:
# product.state = False
# product.save()
# return Response({'message': 'Producto eliminado correctamente!'}, status=status.HTTP_200_OK)
# return Response({'error': 'No existe un producto con estos datos!'}, status=status.HTTP_400_BAD_REQUEST)
# def get(self, request, pk=None):
# pass
# class ProductDestroyAPIView(generics.DestroyAPIView):
# serializer_class = ProductSerializer
#
# def get_queryset(self):
# return self.get_serializer().Meta.model.objects.filter(state=True)
#
# def delete(self, request, pk=None):
# product = self.get_queryset().filter(id=pk).first()
# if product:
# product.state = False
# product.save()
# return Response({'message': 'Producto eliminado correctamente!'}, status=status.HTTP_200_OK)
# return Response({'error': 'No existe un producto con estos datos!'}, status=status.HTTP_400_BAD_REQUEST)
# class ProductUpdateAPIView(generics.UpdateAPIView):
# serializer_class = ProductSerializer
#
# def get_queryset(self, pk):
# # return self.get_serializer().Meta.model.objects.filter(state=True)
# return self.get_serializer().Meta.model.objects.filter(state=True).filter(id=pk).first()
#
# def patch(self, request, pk=None):
# # product = self.get_queryset().filter(id=pk).first()
# # if product:
# if self.get_queryset(pk):
# # product_serializer = self.serializer_class(product)
# product_serializer = self.serializer_class(self.get_queryset(pk))
# return Response(product_serializer.data, status=status.HTTP_200_OK)
# return Response({'error': 'No existe un producto con estos datos!'}, status=status.HTTP_400_BAD_REQUEST)
#
# def put(self, request, pk=None):
# if self.get_queryset(pk):
# product_serializer = self.serializer_class(self.get_queryset(pk), data=request.data)
# if product_serializer.is_valid():
# product_serializer.save()
# return Response(product_serializer.data, status=status.HTTP_200_OK)
# return Response(product_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
```
#### File: apps/users/models.py
```python
from django.contrib.auth.models import PermissionsMixin, BaseUserManager, AbstractBaseUser
from django.db import models
from simple_history.models import HistoricalRecords
class UserManager(BaseUserManager):
def _create_user(self, username, email, name, lastname, password, is_staff, is_superuser, **extra_fields):
user = self.model(
username=username,
email=email,
name=name,
lastname=lastname,
is_staff=is_staff,
is_superuser=is_superuser,
**extra_fields
)
user.set_password(password)
user.save(using=self.db)
return user
def create_user(self, username, email, name, lastname, password=<PASSWORD>, **extra_fields):
return self._create_user(username, email, name, lastname, password, True, False, **extra_fields)
def create_superuser(self, username, email, name, lastname, password=<PASSWORD>, **extra_fields):
return self._create_user(username, email, name, lastname, password, True, True, **extra_fields)
class User(AbstractBaseUser, PermissionsMixin):
username = models.CharField(max_length=255, unique=True)
email = models.EmailField('Correo electrónico', max_length=255, unique=True)
name = models.CharField('Nombres', max_length=255, blank=True, null=True)
lastname = models.CharField('Apellidos', max_length=255, blank=True, null=True)
image = models.ImageField('Imagen de perfil', upload_to='perfil/', max_length=255, blank=True, null=True)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
historical = HistoricalRecords()
objects = UserManager()
class Meta:
verbose_name = 'Usuario'
verbose_name_plural = 'Usuarios'
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = ['email', 'name', 'lastname']
def natural_key(self):
return self.username
def __str__(self):
return f'{self.name} {self.lastname}'
# def save(self, *args, **kwargs):
# print('Save')
``` |
{
"source": "josecope/E05a-Animation",
"score": 4
} |
#### File: josecope/E05a-Animation/main1.py
```python
This simple animation example shows how to move an item with the mouse, and
handle mouse clicks.
If Python and Arcade are installed, this example can be run from the command line with:
python -m arcade.examples.move_mouse
import arcade
SCREEN_WIDTH = 640
SCREEN_HEIGHT = 480
SCREEN_TITLE = "Move Mouse Example"
# this sets up the window that the animation will run in
class Ball:
def __init__(self, position_x, position_y, radius, color):
# Take the parameters of the init function above, and create instance variables out of them.
self.position_x = position_x
self.position_y = position_y
self.radius = radius
self.color = color
def draw(self):
""" Draw the balls with the instance variables we have. """
arcade.draw_circle_filled(self.position_x, self.position_y, self.radius, self.color)
class MyGame(arcade.Window):
def __init__(self, width, height, title):
# Call the parent class's init function
super().__init__(width, height, title)
# Make the mouse disappear when it is over the window.
# So we just see our object, not the pointer.
self.set_mouse_visible(False)
arcade.set_background_color(arcade.color.ASH_GREY)
# this command sets the background of the page/animation
# Create our ball
self.ball = Ball(50, 50, 15, arcade.color.AUBURN)
# this line sets the color and size of the ball to be animated
def on_draw(self):
""" Called whenever we need to draw the window. """
arcade.start_render()
self.ball.draw()
def on_mouse_motion(self, x, y, dx, dy):
""" Called to update our objects. Happens approximately 60 times per second.""" # 60 times per second = 60FPS
self.ball.position_x = x
self.ball.position_y = y
def on_mouse_press(self, x, y, button, modifiers):
"""
Called when the user presses a mouse button.
"""
print(f"You clicked button number: {button}")
if button == arcade.MOUSE_BUTTON_LEFT:
self.ball.color = arcade.color.BLACK
# when the ball is left-clicked, it will turn black
def on_mouse_release(self, x, y, button, modifiers):
"""
Called when a user releases a mouse button.
"""
if button == arcade.MOUSE_BUTTON_LEFT:
self.ball.color = arcade.color.AUBURN
# this returns the ball to its original color
def main():
window = MyGame(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
arcade.run()
if __name__ == "__main__":
main()
``` |
{
"source": "JoseCordobaEAN/EstructurasDeDatosUE4P",
"score": 4
} |
#### File: EstructurasDeDatosUE4P/Clase2/Vehiculo.py
```python
class Vehiculo:
color = ""
ruedas = 0
energia = 0
nombre = ""
def __init__(self, nombre, ruedas, energia, color):
self.nombre = nombre
self.ruedas = ruedas
self.energia = energia
self.color = color
def __repr__(self):
return f'Este es un vehiculo llamado {self.nombre}' \
f' de color {self.color} ' \
f'tiene {self.ruedas} ruedas ' \
f'y {self.energia} energia'
def __eq__(self, other):
return self.nombre == other.nombre \
and self.color == other.color \
and self.ruedas == other.ruedas \
and self.energia == other.energia
def mover(self, cantidad):
if cantidad <= self.energia:
print(f'El vehiculo {self.nombre} se mueve {cantidad}')
self.energia -= cantidad
else:
raise Exception(f'El vehiculo {self.nombre} no tiene energía para '
f'moverse {cantidad}')
def tanquear(self, cantidad):
if cantidad > 0:
self.energia += cantidad
else:
raise Exception(f'No puede tanquear con {cantidad}')
```
#### File: EstructurasDeDatosUE4P/Clase6/pila.py
```python
class Nodo:
elemento = None
Siguiente = None
def __init__(self, elemento, siguiente):
self.elemento = elemento
self.Siguiente = siguiente
class Pila:
tamano = 0
top = None
def apilar(self, elemento):
"""
Agrega un elemento al tope de la pila
:param elemento: Cualquier elemento
:return: None
"""
nuevo_nodo = Nodo(elemento, self.top)
self.top = nuevo_nodo
self.tamano += 1
def desapilar(self):
"""
Retorna el elemento del Tope de la pila y lo elimina
:return: El elemento del tope de la pila
"""
if self.tamano > 0:
elemento_auxiliar = self.top.elemento
self.top = self.top.Siguiente
self.tamano -= 1
return elemento_auxiliar
raise IndexError('La pila esta vacía')
def mirar(self):
"""
Ve el elemento del tope de la pila sin eliminarlo
:return: El elemento del tope de la pila
"""
return self.top.elemento
def es_vacia(self):
return self.tamano == 0
def invertir(self):
auxiliar = Pila()
nodo_auxiliar = self.top
for i in range(self.tamano):
auxiliar.apilar(nodo_auxiliar.elemento)
nodo_auxiliar = nodo_auxiliar.Siguiente
return auxiliar
def copiar(self):
return self.invertir().invertir()
def __repr__(self):
resultado = []
auxiliar = self
while not auxiliar.es_vacia():
resultado.append(auxiliar.desapilar())
resultado.reverse()
return str(resultado)
``` |
{
"source": "JoseCordobaEAN/estructuras_de_datos_UE",
"score": 4
} |
#### File: estructuras_de_datos_UE/sesion_5/test_producto.py
```python
from unittest import TestCase
from recursion2 import producto
class TestProducto(TestCase):
def test_producto(self):
# Prueba estandar
dado_n1 = 5
dado_n2 = 8
espero = 40
realmente = producto(dado_n1, dado_n2)
self.assertEqual(espero, realmente)
# Prueba extremo
dado_n1 = 8
dado_n2 = 0
espero = 0
realmente = producto(dado_n1, dado_n2)
self.assertEqual(espero, realmente)
# Prueba estandar
dado_n1 = -5
dado_n2 = 8
espero = -40
realmente = producto(dado_n1, dado_n2)
self.assertEqual(espero, realmente)
# Prueba estandar
dado_n1 = -5
dado_n2 = -8
espero = 40
realmente = producto(dado_n1, dado_n2)
self.assertEqual(espero, realmente)
# Prueba estandar
dado_n1 = 5
dado_n2 = -8
espero = -40
realmente = producto(dado_n1, dado_n2)
self.assertEqual(espero, realmente)
``` |
{
"source": "JoseCordobaEAN/refuerzo_programacion_2018_1",
"score": 3
} |
#### File: sesion_10/herencia2/Carro.py
```python
from sesion_10.herencia2.Vehiculo import Vehiculo
class Carro(Vehiculo):
def __init__(self, cambios = 4, pasajeros = 4, ruedas = 4):
"""
Crea un nuevo carro
:param cambios: Los cambios del carro
:param pasajeros: la cantidad de pasajeros
:param ruedas: la cantidad de ruedas
"""
Vehiculo.__init__(self,pasajeros)
self.ruedas = ruedas
self.cambios = cambios
```
#### File: refuerzo_programacion_2018_1/sesion_9/matrices.py
```python
matriz_A = [[1,2,3], [4,5,3], [7,8,3]]
print(matriz_A)
# Podemos imprimir cada uno de los vectores fila de la matriz
for fila in matriz_A:
print(fila)
# Podemos imprimir todos los elementos de la matriz
for fila in matriz_A:
for elemento in fila:
print(elemento)
def vector_columna(matriz, columna):
"""
(list of list, int) -> list
Obtiene el vector columna para la posicion de la matriz
>>> vector_columna([[1,2],[1,2]],0)
[1, 1]
>>> vector_columna([[1,2],[1,2]],1)
[2, 2]
:param matriz: la matriz que contiene nuestro vector
:param columna: int la poscicion de la columna de nuestro vector
:return: list con los elementos de la columna seleccionada
"""
vector_resultante = []
for fila in matriz:
vector_resultante.append(fila[columna])
return vector_resultante
def producto_punto(vector_a, vector_b):
"""
(list, list) -> num
calcula el producto escalar entre dos vectores
>>> producto_punto([1,2], [1,2])
5
>>> producto_punto([2,2,2], [2,2,2])
12
:param vector_a: el primer vector
:param vector_b: el segundo vector
:return: num el escalar resultante
"""
resultado = 0
for i in range(0, len(vector_a)):
resultado += vector_a[i] * vector_b[i]
return resultado
def producto_matrices(matriz_A, matriz_B):
"""
(list of list, list of list) -> list of list
Realiza el producto de matrices
>>> producto_matrices([[1,2], [3, 4]],[[1,2], [3, 4]])
[[7, 10], [15, 22]]
:param matriz_A:
:param matriz_B:
:return: list of list la matriz A * B
"""
resultante = []
for fila in matriz_A:
fila_resultante = []
for j in range(0, len(fila)):
columna = vector_columna(matriz_B,j)
escalar = producto_punto(fila, columna)
fila_resultante.append(escalar)
resultante.append(fila_resultante)
return resultante
``` |
{
"source": "josecorella/web_scrapping",
"score": 3
} |
#### File: josecorella/web_scrapping/usfca_programs.py
```python
import requests
import sys
import pandas as pd
from bs4 import BeautifulSoup
def parse_html(content: str) -> dict:
soup = BeautifulSoup(content, 'html.parser')
if not soup:
sys.exit("Could not make some soup. Sorry :(")
programs = soup.findAll("ul", {"class":"program-list"})
if len(soup) == 0:
sys.exit("No data. Sorry :(")
comph_programs = list()
url_and_classes = dict()
for program in programs:
comph_programs += program.findAll('a')
for i in comph_programs:
url_and_classes[i.string] = f"https://catalog.usfca.edu/{i.get('href')}"
return url_and_classes
def perf_request(url: str) -> dict:
request = requests.get(url)
if request.status_code != 200:
sys.exit("Request not Valid. Sorry :(")
parsed_data = parse_html(request.text)
return parsed_data
def main():
url = "https://catalog.usfca.edu/content.php?catoid=22&navoid=3107"
program_dict = perf_request(url)
df = pd.DataFrame.from_dict(program_dict, orient='index')
df.to_csv('programs.csv')
if __name__ == '__main__':
main()
``` |
{
"source": "josecostamartins/pythonreges",
"score": 3
} |
#### File: lista_bimestre2/lista_objetos/exe1.py
```python
class Bola(object):
def __init__(self, cor, circunferencia, material):
self.cor = cor
self.circunferencia = circunferencia
self.material = material
def trocaCor(self, cor):
self.cor = cor
def mostraCor(self):
print self.cor
return self.cor
```
#### File: pythonreges/lista_primeiro_bimestre/remove_ultimo.py
```python
def remove_ultimo(minha_lista):
minha_lista.pop()
return minha_lista
a = [1, 2, 3, 4, 5, 6]
b = remove_ultimo(a[:])
c = remove_ultimo(b)
print a,",", b,",", c
```
#### File: pythonreges/prova1/exec2.py
```python
def verifica_numero(numero):
if numero > 0:
return "P"
else:
return "N"
valor = int(raw_input("Informe o valor: ") or 0)
print verifica_numero(valor)
``` |
{
"source": "josecriane/MES-srv",
"score": 2
} |
#### File: api/models/order.py
```python
from django.db import models
class OrderType(models.Model):
name = models.CharField(max_length=40, blank=False)
description = models.CharField(max_length=255, blank=True, default='')
def __unicode__(self):
return '%s' % (self.name)
class Order(models.Model):
launched = models.DateTimeField(auto_now_add=True)
params = models.CharField(max_length=255, blank=True, default='')
owner = models.ForeignKey('auth.User', related_name='orders')
ordertype = models.ForeignKey(OrderType, related_name='orders')
class Meta:
ordering = ('launched',)
```
#### File: api/views/device.py
```python
from rest_framework import viewsets
from rest_framework.decorators import detail_route
from rest_framework.response import Response
from api.models import Device
from api.serializers import DeviceSerializer
from api.permissions import IsOwnerOrIsTheSameDevice, IsOwner
from utils.hash import generate_sha256
class DeviceViewSet(viewsets.ModelViewSet):
"""
This viewset automatically provides `list`, `create`, `retrieve`,
`update` and `destroy` actions.
"""
queryset = Device.objects.all()
serializer_class = DeviceSerializer
permission_classes = (IsOwnerOrIsTheSameDevice,)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
def list(self, request):
try:
owner_elements = Device.objects.filter(owner=request.user.id)
except:
owner_elements = []
page = self.paginate_queryset(owner_elements)
if page is not None:
serializer = self.get_serializer(page, many=True)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(owner_elements, many=True)
return Response(serializer.data)
@detail_route(methods=['PATCH'], permission_classes=[IsOwnerOrIsTheSameDevice])
def setup(self, request, pk=None):
try:
device = Device.objects.get(id=pk)
except Device.DoesNotExist:
return Response({"detail":"Authentication credentials were not provided."}, status=403)
self.check_object_permissions(request, device)
request.data["token"] = generate_sha256()
serializer = DeviceSerializer(device, data=request.data, partial=True)
if serializer.is_valid():
serializer.save()
return Response({'result':'ok'})
return Response({"detail":"Authentication credentials were not provided."}, status=403)
@detail_route(methods=['POST'], permission_classes=[IsOwner])
def token_post(self, request, pk=None):
print "fuck"
return Response({'result':'ok'})
@detail_route(methods=['GET'], permission_classes=[IsOwnerOrIsTheSameDevice])
def token_get(self, request, pk=None):
print request.data
try:
device = Device.objects.get(id=pk)
except Device.DoesNotExist:
return Response({"detail":"Authentication credentials were not provided."}, status=403)
self.check_object_permissions(request, device)
return Response({'token':device.token})
``` |
{
"source": "josecv/ebpf-usdt-exporter",
"score": 2
} |
#### File: ebpf-usdt-exporter/integration/usdt_counter_test.py
```python
import time
from concurrent.futures import ThreadPoolExecutor
import pytest
import requests
from prometheus_client.parser import text_string_to_metric_families
@pytest.fixture()
def apply_gunicorn(kubectl, apply_manifest, get_manifest_path,
wait_for_pod_ready):
manifest = get_manifest_path('gunicorn.yaml')
apply_manifest(manifest)
wait_for_pod_ready('test-pod', timeout='60s')
def test_pod_goes_ready(apply_gunicorn):
# If the apply_gunicorn fixture succeeds, the pod will have gone into
# ready. This test is really just here to make it easy to surface failures
# in apply_gunicorn
pass
def test_info_metrics_present(kubectl, apply_gunicorn, port_forward):
port_forward('test-pod', '8080')
time.sleep(0.5)
r = requests.get('http://localhost:8080/metrics')
assert r.status_code == 200
found = False
print(r.text)
for family in text_string_to_metric_families(r.text):
if family.name == 'userspace_exporter_enabled_programs':
found = True
assert len(family.samples) == 5
for sample in family.samples:
assert 'pid' in sample.labels
assert sample.labels['name'] == 'gc_total'
assert sample.value == 1.0
assert found, r.text
def test_counter_is_reported(kubectl, apply_gunicorn, port_forward):
port_forward('test-pod', '5000')
port_forward('test-pod', '8080')
time.sleep(0.5)
with ThreadPoolExecutor(max_workers=4) as executor:
list(
executor.map(lambda _: requests.get('http://localhost:5000/'),
range(1000)))
r = requests.get('http://localhost:8080/metrics')
assert r.status_code == 200
found = False
print(r.text)
for family in text_string_to_metric_families(r.text):
if family.name == 'userspace_exporter_gc':
found = True
pids = set()
for sample in family.samples:
assert 'pid' in sample.labels
pids.add(sample.labels['pid'])
assert 'gen' in sample.labels
assert sample.labels['gen'] in {'0', '1', '2'}
assert sample.value > 0
# We expect there to have been garbage collections in all
# four workers, but maybe not the parent process itself
assert len(pids) in (4, 5)
assert found
``` |
{
"source": "josecv/hashkov",
"score": 3
} |
#### File: josecv/hashkov/hashkov_tweet.py
```python
from argparse import ArgumentParser
import sys
from hashkov.twitter import Twitter
from hashkov.chain import MarkovChain
from hashkov import text_pipeline
import os
import pickle
import random
def get_argument_parser():
'''
Build and return an argument parser.
'''
parser = ArgumentParser(description='Tweet Markov chain generated tweets.'
' See README.md for more')
parser.add_argument('-a', '--app-key', dest='app_key',
help='The app key', required=True)
parser.add_argument('-c', '--app-secret', dest='app_secret',
help='The app secret', required=True)
parser.add_argument('-k', '--access-token', dest='access_token',
help='The access token')
parser.add_argument('-s', '--access-secret', dest='access_secret',
help='The access secret')
parser.add_argument('-l', '--lang', dest='lang',
help='The language to tweet in', default='en')
parser.add_argument('-p', '--pickle', dest='pickle', default=None,
help='Optionally, a file to save the chain '
'so that it does better next time')
parser.add_argument('-w', '--woeid', dest='woeid', default=4118, type=int,
help='For use with -d. The woeid that the trending'
' hashtag should be from')
parser.add_argument('-f', '--force', dest='force', action='store_true',
help='Force the hashtag to appear in the tweet '
'(by starting it off with it)')
parser.add_argument('-n', '--ngram', dest='ngram', default=2, type=int,
help='How many words to consider as a token.'
' Default 2')
hashtag_parser = parser.add_mutually_exclusive_group(required=True)
hashtag_parser.add_argument('-t', '--hashtag', dest='hashtag',
help='The hashtag to tweet to')
hashtag_parser.add_argument('-d', '--autonomous', dest='autonomous',
help='Whether to run in autonomous mode.'
' Incompatible with -t', action='store_true')
return parser
def build_pipeline(opts):
'''
Build a text pipeline.
'''
pipeline = text_pipeline.HashtagCleaner()
(pipeline.attach_next(text_pipeline.MentionCleaner())
.attach_next(text_pipeline.UrlCleaner())
.attach_next(text_pipeline.WhitespaceCleaner())
.attach_next(text_pipeline.Tokenizer(opts.ngram)))
return pipeline
def get_chain(opts):
'''
Build or unpickle the markov chain.
'''
if opts.pickle is not None and os.path.isfile(opts.pickle):
with open(opts.pickle, 'rb') as f:
chain = pickle.load(f)
return chain
return MarkovChain()
def save_chain(chain, opts):
'''
Pickle the markov chain given, if desired
'''
if opts.pickle is not None:
with open(opts.pickle, 'wb') as f:
pickle.dump(chain, f)
def get_hashtag(twitter, opts):
'''
Figure out a hashtag to use.
'''
if opts.autonomous:
trending = twitter.get_trending(opts.woeid)
if not trending:
return None
return random.choice(trending)
return opts.hashtag
def generate_tweet(chain, opts, hashtag):
start = ''
if opts.force:
if not hashtag.startswith('#'):
hashtag = '#' + hashtag
hashtag = hashtag.replace('#', '#_').lower()
keys = chain.get_possible_starts()
keys = [k for k in keys if hashtag in k]
start = random.choice(keys)
# 40 words should be more than enough to get us a nice tweet
tweet = chain.sample(20, start)
result = []
for token in tweet:
if len(' '.join(result)) + len(token) < 140:
result.append(token)
tweet = ' '.join(result)
return tweet
def main():
parser = get_argument_parser()
opts = parser.parse_args()
twitter = Twitter(opts.app_key, opts.app_secret)
if any([getattr(opts, i) is None for i in
['access_token', 'access_secret']]):
(token, url) = twitter.request_request_token()
print("Please go to %s and input the pin that you get here." % url)
pin = input('Pin: ')
(key, secret) = twitter.request_access_token(pin)
print("Your access token is:\nKey: %s\nSecret: %s\n" % (key, secret))
else:
twitter.set_access_token(opts.access_token, opts.access_secret)
hashtag = get_hashtag(twitter, opts)
if hashtag is None:
print('Could not decide on a hashtag. Will now quit')
return 1
print("Tweeting to %s" % hashtag)
tweets = twitter.search_by_hashtag(hashtag, 10, opts.lang)
pipeline = build_pipeline(opts)
tweets = [pipeline.process(tweet) for tweet in tweets]
chain = get_chain(opts)
chain.train(tweets)
start = ''
tweet = generate_tweet(chain, opts, hashtag)
twitter.tweet(tweet)
print("I Tweeted: %s" % tweet)
save_chain(chain, opts)
return 0
if __name__ == '__main__':
sys.exit(main())
```
#### File: hashkov/tests/test_chain.py
```python
from hashkov.chain import MarkovChain
import unittest
class MarkovChainTest(unittest.TestCase):
'''
test the Markov chain implementation
'''
def setUp(self):
self.markov = MarkovChain()
def test_memory(self):
'''
Test that the memory of the markov chain is populated correctly
'''
samples = [['a', 'b', 'c']]
self.markov.train(samples)
self.assertDictEqual(self.markov.memory,
{'': ['a'], 'a': ['b'], 'b': ['c']})
samples = [['a', 'a', 'c', 'd', 'b']]
self.markov.train(samples)
self.assertDictEqual(self.markov.memory,
{'': ['a', 'a'], 'a': ['b', 'a', 'c'],
'b': ['c'], 'c': ['d'], 'd': ['b']})
def test_sample(self):
'''
Test that we can sample properly.
'''
samples = [['a', 'b', 'c']]
self.markov.train(samples)
# Only one way to go here
result = ''.join(self.markov.sample(3))
self.assertEqual(result, 'abc')
result = ''.join(self.markov.sample(100))
self.assertEqual(result, 'abc')
```
#### File: hashkov/tests/test_twitter.py
```python
from hashkov.twitter import Twitter, TwitterException
import unittest
from unittest.mock import Mock, ANY, call
from urllib import parse
import os
import json
class TwitterTest(unittest.TestCase):
'''
Test the twitter class.
'''
app_key = 'app_key'
app_secret = 'app_secret'
request_key = 'request_key'
request_secret = 'request_secret'
access_key = 'access_key'
access_secret = 'access_secret'
pin = 'pin'
def setUp(self):
'''
Set up the test.
'''
self.requests = Mock()
self.oauth_class = Mock()
self.oauth_object = Mock()
self.oauth_class.return_value = self.oauth_object
self.twitter = Twitter(self.app_key, self.app_secret,
self.requests, self.oauth_class)
def test_original_construction(self):
'''
Make sure the oauth object has been properly created.
'''
self.oauth_class.assert_called_once_with(self.app_key,
client_secret=self.app_secret)
def test_request_token(self):
'''
Test that we can get a request token.
'''
encoded = parse.urlencode({'oauth_token': self.request_key,
'oauth_token_secret': self.request_secret})
r = Mock()
r.content = bytearray(encoded, 'utf-8')
self.requests.post.return_value = r
(got_token, got_url) = self.twitter.request_request_token()
self.assertEqual(got_token, self.request_key)
self.requests.post.assert_called_once_with(url=ANY,
auth=self.oauth_object)
def test_access_token(self):
'''
Test that we can get an access token.
'''
encoded = parse.urlencode({'oauth_token': self.request_key,
'oauth_token_secret': self.request_secret})
r = Mock()
r.content = bytearray(encoded, 'utf-8')
self.requests.post.return_value = r
self.twitter.request_request_token()
encoded = parse.urlencode({'oauth_token': self.access_key,
'oauth_token_secret': self.access_secret})
r.content = bytearray(encoded, 'utf-8')
self.oauth_class.reset_mock()
self.requests.post.reset_mock()
(access_key, access_secret) = (self.twitter.
request_access_token(self.pin))
self.requests.post.assert_called_once_with(url=ANY,
auth=self.oauth_object)
self.assertEqual(access_key, self.access_key)
self.assertEqual(access_secret, self.access_secret)
self.oauth_class.assert_any_call(
self.app_key,
client_secret=self.app_secret,
resource_owner_key=self.request_key,
resource_owner_secret=self.request_secret,
verifier=self.pin)
self.oauth_class.assert_called_with(
self.app_key,
client_secret=self.app_secret,
resource_owner_key=self.access_key,
resource_owner_secret=self.access_secret)
def test_search_by_hashtag(self):
'''
Test the search method.
'''
json_path = os.path.join(os.path.dirname(__file__), 'resources',
'search_results.json')
with open(json_path, 'r') as f:
text = ''.join(f.readlines())
r = Mock()
r.status_code = 200
r.text = text
r.json.return_value = json.loads(text)
self.requests.get.return_value = r
expected = ['Aggressive Ponytail #freebandnames',
'Thee Namaste Nerdz. #FreeBandNames',
'Mexican Heaven, Mexican Hell #freebandnames',
'The Foolish Mortals #freebandnames']
q = '#freebandnames'
results = self.twitter.search_by_hashtag(q)
self.requests.get.assert_called_once_with(ANY, auth=ANY,
params={'q': q})
self.assertEquals(expected, results)
def test_search_by_hashtag_no_pound(self):
'''
Test the search method, when the hashtag doesn't have a #hash.
'''
json_path = os.path.join(os.path.dirname(__file__), 'resources',
'search_results.json')
with open(json_path, 'r') as f:
text = ''.join(f.readlines())
r = Mock()
r.status_code = 200
r.text = text
r.json.return_value = json.loads(text)
self.requests.get.return_value = r
expected = ['Aggressive Ponytail #freebandnames',
'Thee Namaste Nerdz. #FreeBandNames',
'Mexican Heaven, Mexican Hell #freebandnames',
'The Foolish Mortals #freebandnames']
q = 'freebandnames'
results = self.twitter.search_by_hashtag(q)
self.requests.get.assert_called_once_with(ANY, auth=ANY,
params={'q': '#' + q})
self.assertEquals(expected, results)
def test_search_by_hashtag_lang(self):
'''
Test the search method with a language.
'''
json_path = os.path.join(os.path.dirname(__file__), 'resources',
'search_results.json')
with open(json_path, 'r') as f:
text = ''.join(f.readlines())
r = Mock()
r.status_code = 200
r.text = text
r.json.return_value = json.loads(text)
self.requests.get.return_value = r
expected = ['Aggressive Ponytail #freebandnames',
'Thee Namaste Nerdz. #FreeBandNames',
'Mexican Heaven, Mexican Hell #freebandnames',
'The Foolish Mortals #freebandnames']
q = '#freebandnames'
results = self.twitter.search_by_hashtag(q, 1, 'en')
self.requests.get.assert_called_once_with(ANY, auth=ANY,
params={'q': q, 'l': 'en'})
self.assertEquals(expected, results)
def test_search_by_hashtag_paginated(self):
'''
Test the search method with pagination.
'''
json_path = os.path.join(os.path.dirname(__file__), 'resources',
'search_results.json')
with open(json_path, 'r') as f:
text = ''.join(f.readlines())
r = Mock()
r.status_code = 200
r.text = text
r.json.return_value = json.loads(text)
self.requests.get.return_value = r
expected = ['Aggressive Ponytail #freebandnames',
'Thee Namaste Nerdz. #FreeBandNames',
'Mexican Heaven, Mexican Hell #freebandnames',
'The Foolish Mortals #freebandnames',
'Aggressive Ponytail #freebandnames',
'Thee Namaste Nerdz. #FreeBandNames',
'Mexican Heaven, Mexican Hell #freebandnames',
'The Foolish Mortals #freebandnames',
'Aggressive Ponytail #freebandnames',
'Thee Namaste Nerdz. #FreeBandNames',
'Mexican Heaven, Mexican Hell #freebandnames',
'The Foolish Mortals #freebandnames']
q = 'freebandnames'
results = self.twitter.search_by_hashtag(q, 3)
self.assertEqual(expected, results)
class ContainsNextPage(str):
'''To check whether the next_results stuff is given'''
def __eq__(self, other):
return other.endswith("?max_id=249279667666817023&"
"q=%23freebandnames&count=4&"
"include_entities=1&result_type=mixed")
calls = [call(ANY, auth=ANY, params={'q': '#' + q}),
call(ContainsNextPage(), auth=ANY),
call(ContainsNextPage(), auth=ANY)]
self.requests.get.assert_has_calls(calls, any_order=True)
def test_search_by_hashtag_error(self):
'''
Test the search method, when twitter returns an error.
'''
r = Mock()
r.status_code = 401
self.requests.get.return_value = r
try:
self.twitter.search_by_hashtag('#WhyTho')
self.fail('Did not throw on http error')
except TwitterException:
pass
def test_get_trending(self):
'''
Test the get_trending method.
'''
json_path = os.path.join(os.path.dirname(__file__), 'resources',
'trends_results.json')
with open(json_path, 'r') as f:
text = ''.join(f.readlines())
r = Mock()
r.status_code = 200
r.json.return_value = json.loads(text)
self.requests.get.return_value = r
expected = ['#GanaPuntosSi', '#WordsThatDescribeMe',
'#10PersonasQueExtra\u00f1oMucho']
results = self.twitter.get_trending(10)
self.assertEqual(expected, results)
self.requests.get.assert_called_once_with(ANY, auth=ANY,
params={'id': 10})
def test_tweet(self):
'''
Test the tweet method
'''
tweet = 'Test tweet please ignore'
r = Mock()
r.status_code = 200
self.requests.post.return_value = r
self.twitter.tweet(tweet)
self.requests.post.assert_called_once_with(ANY, auth=ANY,
params={'status': tweet})
def test_tweet_error(self):
'''
Test the tweet method when there's an error.
'''
r = Mock()
r.status_code = 401
self.requests.post.return_value = r
try:
self.twitter.tweet('Test tweet please ignore')
self.fail('Did not throw on http error')
except TwitterException:
pass
``` |
{
"source": "josecyc/Neurotron",
"score": 3
} |
#### File: data_acquisition/leap_raw/read_leap.py
```python
import sys, time, os
import Leap
from Leap import CircleGesture, KeyTapGesture, ScreenTapGesture, SwipeGesture
import numpy as np
import pandas as pd
import argparse
class LeapMotionListener(Leap.Listener):
data = []
finger_names = ['Thumb', 'Index', 'Middle', 'Ring', 'Pinky'];
bone_names = ['Metacarpal', 'Proximal', 'Intermediate', 'Distal']
state_names = ['STATE_INVALID', 'STATE_START', 'STATE_UPDATE', 'STATE_END']
columns = ["Unix Time", "Timestamp", "Palm x", "Palm y", "Palm z", "Wrist x", "Wrist y", "Wrist z",
"Thumb Proximal x", "Thumb Proximal y", "Thumb Proximal z",
"Thumb Intermediate x", "Thumb Intermediate y", "Thumb Intermediate z",
"Thumb Distal x", "Thumb Distal y", "Thumb Distal z",
"Thumb Tip x", "Thumb Tip y", "Thumb Tip z",
"Index Proximal x", "Index Proximal y", "Index Proximal z",
"Index Intermediate x", "Index Intermediate y", "Index Intermediate z",
"Index Distal x", "Index Distal y", "Index Distal z",
"Index Tip x", "Index Tip y", "Index Tip z",
"Middle Proximal x", "Middle Proximal y", "Middle Proximal z",
"Middle Intermediate x", "Middle Intermediate y", "Middle Intermediate z",
"Middle Distal x", "Middle Distal y", "Middle Distal z",
"Middle Tip x", "Middle Tip y", "Middle Tip z",
"Ring Proximal x", "Ring Proximal y", "Ring Proximal z",
"Ring Intermediate x", "Ring Intermediate y", "Ring Intermediate z",
"Ring Distal x", "Ring Distal y", "Ring Distal z",
"Ring Tip x", "Ring Tip y", "Ring Tip z",
"Pinky Proximal x", "Pinky Proximal y", "Pinky Proximal z",
"Pinky Intermediate x", "Pinky Intermediate y", "Pinky Intermediate z",
"Pinky Distal x", "Pinky Distal y", "Pinky Distal z",
"Pinky Tip x", "Pinky Tip y", "Pinky Tip z"]
def on_init(self, controller):
print ("Leap : Initialized")
def on_connect(self, controller):
print ("Leap : Motion Sensor Connected")
controller.enable_gesture(Leap.Gesture.TYPE_CIRCLE);
controller.enable_gesture(Leap.Gesture.TYPE_KEY_TAP);
controller.enable_gesture(Leap.Gesture.TYPE_SCREEN_TAP);
controller.enable_gesture(Leap.Gesture.TYPE_SWIPE);
def on_disconnect(self, controller):
print ("Leap : Motion Sensor Disconnected")
def on_exit(self, controller):
print ("Leap : Exited")
def on_frame(self, controller):
frame = controller.frame()
print ("Frame id: {}, timestamp: {}, hands: {}, fingers: {}".format(frame.id,
frame.timestamp, len(frame.hands), len(frame.fingers)))
for hand in frame.hands:
if hand.is_right:
handType = "Left Hand" if hand.is_left else "Right Hand"
arm = hand.arm
wrist_rel = arm.wrist_position - hand.palm_position
thumb_proximal_rel = hand.fingers[0].bone(1).prev_joint - hand.palm_position
thumb_intermediate_rel = hand.fingers[0].bone(2).prev_joint - hand.palm_position
thumb_distal_rel = hand.fingers[0].bone(3).prev_joint - hand.palm_position
thumb_tip_rel = hand.fingers[0].bone(3).next_joint - hand.palm_position
index_proximal_rel = hand.fingers[1].bone(1).prev_joint - hand.palm_position
index_intermediate_rel = hand.fingers[1].bone(2).prev_joint - hand.palm_position
index_distal_rel = hand.fingers[1].bone(3).prev_joint - hand.palm_position
index_tip_rel = hand.fingers[1].bone(3).next_joint - hand.palm_position
middle_proximal_rel = hand.fingers[2].bone(1).prev_joint - hand.palm_position
middle_intermediate_rel = hand.fingers[2].bone(2).prev_joint - hand.palm_position
middle_distal_rel = hand.fingers[2].bone(3).prev_joint - hand.palm_position
middle_tip_rel = hand.fingers[2].bone(3).next_joint - hand.palm_position
ring_proximal_rel = hand.fingers[3].bone(1).prev_joint - hand.palm_position
ring_intermediate_rel = hand.fingers[3].bone(2).prev_joint - hand.palm_position
ring_distal_rel = hand.fingers[3].bone(3).prev_joint - hand.palm_position
ring_tip_rel = hand.fingers[3].bone(3).next_joint - hand.palm_position
pinky_proximal_rel = hand.fingers[4].bone(1).prev_joint - hand.palm_position
pinky_intermediate_rel = hand.fingers[4].bone(2).prev_joint - hand.palm_position
pinky_distal_rel = hand.fingers[4].bone(3).prev_joint - hand.palm_position
pinky_tip_rel = hand.fingers[4].bone(3).next_joint - hand.palm_position
self.data.append(np.array([time.time(), frame.timestamp,
hand.palm_position[0], hand.palm_position[1], hand.palm_position[2],
wrist_rel[0], wrist_rel[1], wrist_rel[2],
thumb_proximal_rel[0], thumb_proximal_rel[1], thumb_proximal_rel[2],
thumb_intermediate_rel[0], thumb_intermediate_rel[1], thumb_intermediate_rel[2],
thumb_distal_rel[0], thumb_distal_rel[1], thumb_distal_rel[2],
thumb_tip_rel[0], thumb_tip_rel[1], thumb_tip_rel[2],
index_proximal_rel[0], index_proximal_rel[1], index_proximal_rel[2],
index_intermediate_rel[0], index_intermediate_rel[1], index_intermediate_rel[2],
index_distal_rel[0], index_distal_rel[1], index_distal_rel[2],
index_tip_rel[0], index_tip_rel[1], index_tip_rel[2],
middle_proximal_rel[0], middle_proximal_rel[1], middle_proximal_rel[2],
middle_intermediate_rel[0], middle_intermediate_rel[1], middle_intermediate_rel[2],
middle_distal_rel[0], middle_distal_rel[1], middle_distal_rel[2],
middle_tip_rel[0], middle_tip_rel[1], middle_tip_rel[2],
ring_proximal_rel[0], ring_proximal_rel[1], ring_proximal_rel[2],
ring_intermediate_rel[0], ring_intermediate_rel[1], ring_intermediate_rel[2],
ring_distal_rel[0], ring_distal_rel[1], ring_distal_rel[2],
ring_tip_rel[0], ring_tip_rel[1], ring_tip_rel[2],
pinky_proximal_rel[0], pinky_proximal_rel[1], pinky_proximal_rel[2],
pinky_intermediate_rel[0], pinky_intermediate_rel[1], pinky_intermediate_rel[2],
pinky_distal_rel[0], pinky_distal_rel[1], pinky_distal_rel[2],
pinky_tip_rel[0], pinky_tip_rel[1], pinky_tip_rel[2]]))
print(handType)
print('palm_position: {}'.format(hand.palm_position))
print('fingertips: {} {} {} {} {}'.format(thumb_tip_rel, index_tip_rel, middle_tip_rel, ring_tip_rel, pinky_tip_rel))
def parse():
parser = argparse.ArgumentParser()
parser.add_argument('name')
parser.add_argument('nbr')
return parser.parse_args()
def main():
args = parse()
listener = LeapMotionListener()
controller = Leap.Controller()
controller.add_listener(listener)
print ("Leap - Press enter to quit")
try:
sys.stdin.readline()
except KeyboardInterrupt:
pass
finally:
sys.stdout.flush()
print('Leap - Exiting...')
print ("Data dump: {} points".format(len(listener.data)))
df = pd.DataFrame(data=listener.data, columns=listener.columns)
df.to_csv("leap_data_{}_{}.csv".format(args.name, args.nbr), index=False)
controller.remove_listener(listener)
if __name__ == "__main__":
main()
``` |
{
"source": "josedab/udacity-ai-flower-image-classification",
"score": 2
} |
#### File: josedab/udacity-ai-flower-image-classification/predict.py
```python
import torch
from get_input_args import get_input_args_prediction
from checkpoint_utils import load_model
from prediction_utils import predict
from classes_utils import get_category_mappings
def main():
arguments = get_input_args_prediction()
print(arguments)
is_gpu_enabled = torch.cuda.is_available() and arguments.gpu
if is_gpu_enabled:
device = "cuda"
model_file_path = arguments.checkpoint
model = load_model(model_file_path)
probabilities, labels = predict(image_path=arguments.input,
model=model,
topk=arguments.topk,
is_gpu_enabled=is_gpu_enabled)
category_mappings_path = arguments.category_names
if len(category_mappings_path) > 0:
category_mappings = get_category_mappings(category_mappings_path)
category_labels = [category_mappings[str(folder_idx)] for folder_idx in labels]
labels = category_labels
print_prediction_results(labels, probabilities, arguments.topk)
def print_prediction_results(labels, probabilities, topk=5):
print("==================================")
print("Prediction results")
print("==================================")
for label, probability in list(zip(labels, probabilities))[:topk]:
print("Class: {}, Probability: {:.4f}".format(label, probability))
if __name__ == "__main__":
main()
```
#### File: josedab/udacity-ai-flower-image-classification/training_utils.py
```python
import torch
def training_validation(model, dataloader, criterion, device):
test_loss = 0
accuracy = 0
for inputs, labels in dataloader:
inputs, labels = inputs.to(device), labels.to(device)
output = model.forward(inputs)
test_loss += criterion(output, labels).item()
ps = torch.exp(output)
equality = (labels.data == ps.max(dim=1)[1])
accuracy += equality.type(torch.FloatTensor).mean()
return test_loss, accuracy
def train_model(training_set, validation_set, model, optimizer, criterion, epochs=5, print_every=5, device='cuda'):
model.to(device)
steps = 0
for e in range(epochs):
model.train()
running_loss = 0
for images, labels in training_set:
steps += 1
images, labels = images.to(device), labels.to(device)
optimizer.zero_grad()
# Forward and backward passes
outputs = model.forward(images)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
running_loss += loss.item()
if steps % print_every == 0:
# Evaluation mode for the validation
model.eval()
with torch.no_grad():
validation_loss, validation_accuracy = training_validation(model, validation_set, criterion, device)
print("Epoch: {}/{} --- ".format(e + 1, epochs),
"Training Loss: {:.4f} ".format(running_loss / print_every),
"Validation Loss: {:.4f} ".format(validation_loss / len(validation_set)),
"Validation Accuracy: {:.4f} ".format(validation_accuracy / len(validation_set)))
running_loss = 0
# Back on training mode
model.train()
``` |
{
"source": "josedavidcortes-git/usd",
"score": 4
} |
#### File: josedavidcortes-git/usd/DavidCortes_test05.py
```python
with open('donQuixote.txt','r') as myFile:
text = myFile.read()
import nltk
nltk.download('all')
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
from nltk.stem import WordNetLemmatizer
from nltk.corpus import wordnet
lemmatizer = WordNetLemmatizer()
import string
stop_words = set(stopwords.words('english'))
word_tokens = word_tokenize(text)
#word_tokens = list(map(filter(lambda token: token not in string.punctuation,word_tokens)))
word_tokens = list(filter(lambda token: token not in string.punctuation,word_tokens))
list = []
for word in word_tokens:
if word not in stop_words:
list.append(word)
def get_magic(word):
tag = nltk.pos_tag([word])[0][1][0].upper()
tag_dict = {"J": wordnet.ADJ,
"N": wordnet.NOUN,
"V": wordnet.VERB,
"R": wordnet.ADV}
return tag_dict.get(tag, wordnet.NOUN)
for word in word_tokens:
print(lemmatizer.lemmatize(word,get_magic(word)))
#print(word_tokens)
``` |
{
"source": "JoseDavi/PyGraph",
"score": 3
} |
#### File: PyGraph/py_graph_t/SimpleGraph.py
```python
from .vertex.SimpleVertex import SimpleVertex
from .edges.SimpleEdge import SimpleEdge
from .exceptions.SimpleGraphException import VertexNotExistsException, EdgeDuplicatedException, EdgeNotFoundException, VertexDuplicatedException
class SimpleGraph:
"""Implementação de um simples grafo."""
vertices = dict()
edges = []
def __init__(self):
pass
def add_vertex(self, value):
"""
Método que adiciona um vértice ao grafo.
Parâmetros:
----------
value: Um tipo existente ou criado por você
- Valor a ser colocado no vértice.
"""
if not self.vertices.__contains__(value):
self.vertices[value] = SimpleVertex(value)
return self.vertices[value]
else:
raise VertexDuplicatedException()
def delete_vertex(self, value):
"""
Método que remove um vertice do grafo e consequentemente todas as arestas
conectadas ao vertice.
Parâmetros:
----------
value: *
- identificador do vértice a ser removido
"""
vertex_removed = self.vertices[value]
if self.vertex_exists(value):
for i in range(len(self.edges)-1, -1, -1):
edge = self.edges[i]
if self.is_terminal(edge, value):
self.edges.pop(i)
self.vertices.__delitem__(value)
return vertex_removed
def show_edge(self, value_a, value_b):
"""
Método que retorna uma aresta entre dois vértices, se ela existe.
Parâmetros:
----------
value_a: *
- identificador do vértice.
value_b: *
- identificador do vértice.
"""
vertex_a = self.vertices.get(value_a)
vertex_b = self.vertices.get(value_b)
if vertex_a is None or vertex_b is None:
raise VertexNotExistsException
edge_test = SimpleEdge(vertex_a=vertex_a, vertex_b=vertex_b)
for edge in self.edges:
if edge_test.__eq__(edge):
return edge
def add_edge(self, value_a, value_b, name=None):
"""
Método que adiciona uma aresta ao grafo.
Parâmetros:
----------
value_a: *
- Identificador do vértice cabeça da aresta.
value_b: *.
- Identificador do vértice cauda da aresta.
name: String
- Nome da aresta do grafo.
"""
vertex_a = self.vertices.get(value_a)
vertex_b = self.vertices.get(value_b)
if vertex_a is None or vertex_b is None:
raise VertexNotExistsException()
if SimpleEdge(name=name, vertex_a=vertex_a, vertex_b=vertex_b) in self.edges:
raise EdgeDuplicatedException()
else:
self.edges.append(SimpleEdge(name=name, vertex_a=vertex_a, vertex_b=vertex_b))
return self.show_edge(value_a, value_b)
def delete_edge(self, value_a, value_b):
"""
Método que remove uma aresta do grafo.
Parâmetros:
----------
value_a: *
- Identificador do vértice cabeça da aresta.
value_b: *.
- Identificador do vértice cauda da aresta.
"""
vertex_a = self.vertices.get(value_a)
vertex_b = self.vertices.get(value_b)
edge_aux = SimpleEdge(vertex_a=vertex_a, vertex_b=vertex_b)
if self.edges.__contains__(edge_aux):
self.edges.remove(edge_aux)
return edge_aux
else:
raise EdgeNotFoundException()
def is_terminal(self, edge, value):
"""
Método que verifica se um dado vértice é terminal de uma dada aresta.
Parâmetros:
----------
edge: SimpleEdge
- Aresta a ser verificada.
vertex: *
- identificador do vertice.
Retorno:
----------
resultado: bool
- Valor booleano indicando se o vértice é um dos terminais da aresta.
"""
return edge.vertex_a.value == value or edge.vertex_b.value == value
def num_vertex(self):
"""
Método que retorna o número de vértices no grafo.
Retorno:
----------
Quantidade: Int
- Quantidade de número de vértices.
"""
return len(self.vertices)
def vertex_exists(self, value):
"""
Método que indica se um determinado vértice pertence ao Grafo.
Parâmetros:
----------
value: *
- identificador do vértice a ser verificado
Retorno:
----------
True: Caso o vertice pertença ao grafo.
False: Caso não.
"""
return self.vertices.__contains__(value)
def edge_exists(self, value_a, value_b):
"""
Método booleano que indica se um determinada aresta pertence ao Grafo.
Parâmetros:
----------
value_a:
- Identificador do vértice cabeça da aresta.
value_b:
- Identificador do vértice cauda da aresta.
Retorno:
----------
True: caso a aresta exista.
False: caso contrário.
"""
vertex_a = self.vertices.get(value_a)
vertex_b = self.vertices.get(value_b)
edge_aux = SimpleEdge(vertex_a=vertex_a, vertex_b=vertex_b)
if self.edges.__contains__(edge_aux):
return True
else:
return False
def num_edges(self):
"""
Método que retorna o número de arestas no grafo.
Retorno:
----------
Quantidade: Int
- Quantidade de número de arestas.
"""
return len(self.edges)
def vertex_neighbors(self, value):
"""
Método que encontra vertices vizinhos do vertice de entrada.
Parâmetros:
----------
value: *
- identificador do vertice.
Retorno:
----------
neigh_vertices: List
- Lista de vertices.
"""
neigh_vertices = []
for edge in self.edges:
if edge.vertex_a.value == value:
neigh_vertices.append(edge.vertex_b)
elif edge.vertex_b.value == value:
neigh_vertices.append(edge.vertex_a)
return neigh_vertices
def vertex_degree(self, value):
"""
Método que retorna o grau do vértice de entrada.
Parâmetros:
----------
value: *
- Tipo do vértice de entrada.
Retorno:
----------
Quantidade: Int
- Quantidade de vizinhos do vértice de entrada.
"""
if self.vertex_exists(value):
return len(self.vertex_neighbors(value))
else:
raise VertexNotExistsException()
def is_vertices_adjacents(self, value_a, value_b):
"""
Método que indica se os vértices de entrada são adjacentes.
Parâmetros:
----------
value_a: *
- identificador do vértice.
value_b: *
- identificador do vértice.
Retorno:
----------
True: Caso os vertices sejam adjacentes.
False: Caso contrario.
"""
neigh_vertices = self.vertex_neighbors(value_a)
vertex_b = self.vertices.get(value_b)
if vertex_b in neigh_vertices:
return True
else:
return False
def get_all_vertex(self):
"""
Método que retorna uma lista com os vértices do grafo.
Retorno:
----------
vertices: List
- Lista com todos os vértices do grafo.
"""
return self.vertices
def list_graph_vertices(self):
"""
Método que retorna lista com todos os identificadores dos vértices do grafo.
Retorno:
----------
vertices: lista com o nome de todas os vértices do grafo.
"""
vertices = []
for vertex in self.vertices:
vertices.append(vertex)
return vertices
def list_graph_edges(self):
"""
Método que retorna lista com todos os nomes as arestas do grafo.
Retorno:
----------
edges: lista com o nome de todas as arestas do grafo.
"""
edges = []
for edge in self.edges:
edges.append(edge.name)
return edges
def cycle(self, v, visited, parent):
"""
Método que verifica se tem ciclo no subgrafo a partir do vértice v.
Parâmetros:
----------
v: *
- Vértice.
visited: *
- Lista de vértices já visitados.
parent: *
- Pai do vértice atual.
Retorno:
----------
True: se o subgrafo possuir um loop.
False: caso não possua.
"""
visited[v]= True
for i in self.vertices:
if(self.is_vertices_adjacents(v,i)):
if visited[i] == False :
if(self.cycle(i, visited, v)):
return True
elif parent != i:
return True
return False
def has_loop(self):
"""
Método que verifica se o grafo possui um loop/ciclo.
Retorno:
----------
True: se gráfo possuir loop/ciclo.
False: caso nao possua.
"""
visited = dict()
for i in self.vertices:
visited[i] = False
for i in self.vertices:
if visited[i] == False:
if(self.cycle(i, visited, -1)) == True:
return True
return False
def __str__(self):
"""
Método que retorna a representação textual do grafo.
Retorno:
----------
graph_string: String
- Representação textual do grafo.
"""
graph_string = ""
for edge in self.edges:
graph_string += edge.__str__()
graph_string += "\n"
return graph_string
def check_regular_graph(self):
"""
Função que verifica a regularidade de um grafo.
Retorno:
----------
True: Se o grafo for regular.
False: Se o grafo não for regular.
"""
valency = []
for i in self.vertices:
v = 0
aux = SimpleVertex(i)
for j in self.edges:
if aux == j.vertex_a or aux == j.vertex_b:
v += 1
valency.append(v)
return len(set(valency)) <= 1
``` |
{
"source": "jose-delarosa/sysadmin-tools",
"score": 2
} |
#### File: sysadmin-tools/redfish/turn-power-on.py
```python
import json
import sys
import requests
from urllib2 import URLError, HTTPError
# http://bit.ly/2iGTEGS
from requests.packages.urllib3.exceptions import InsecureRequestWarning
# Is this the standard?
redfish_uri = "/redfish/v1"
def usage(me):
print("Usage: %s <ip> <user> <password>" % (me))
exit(1)
def send_get_request(uri, creds):
try:
resp = requests.get(uri, verify=False,
auth=(creds['user'], creds['pswd']))
data = resp.json()
except HTTPError as e:
return {'ret': False, 'msg': "HTTP Error: %s" % e.code}
except URLError as e:
return {'ret': False, 'msg': "URL Error: %s" % e.reason}
# Almost all errors should be caught above, but just in case
except:
return {'ret': False, 'msg': "Error"}
return {'ret': True, 'data': data}
def send_post_request(uri, creds, payload, headers):
try:
requests.post(uri, data=json.dumps(payload),
headers=headers, verify=False,
auth=(creds['user'], creds['pswd']))
except HTTPError as e:
return {'ret': False, 'msg': "HTTP Error: %s" % e.code}
except URLError as e:
return {'ret': False, 'msg': "URL Error: %s" % e.reason}
# Almost all errors should be caught above, but just in case
except:
return {'ret': False, 'msg': "Error"}
return {'ret': True}
def find_systems_resource(base_uri, creds):
systems_uri = []
response = send_get_request(base_uri + redfish_uri, creds)
if response['ret'] is False:
return {'ret': False, 'msg': "Error getting Systems resource"}
data = response['data']
if 'Systems' not in data:
return {'ret': False, 'msg': "Systems resource not found"}
else:
systems = data["Systems"]["@odata.id"]
response = send_get_request(base_uri + systems, creds)
if response['ret'] is False:
return {'ret': False, 'msg': "Couldn't get Systems resource value"}
data = response['data']
# more than one entry possible, so put in a list
for member in data[u'Members']:
systems_uri.append(member[u'@odata.id'])
return {'ret': True, 'systems_uri': systems_uri}
def turn_power_on(base_uri, uris_list, creds):
key = "Actions"
payload = {'ResetType': 'On'}
headers = {'content-type': 'application/json'}
for uri in uris_list:
# Search for 'key' entry and extract URI from it
response = send_get_request(base_uri + uri, creds)
if response['ret'] is False:
return {'ret': False, 'msg': "Couldn't get power management URI"}
data = response['data']
action_uri = data[key]["#ComputerSystem.Reset"]["target"]
response = send_post_request(base_uri + action_uri, creds, payload, headers)
if response['ret'] is False:
print("Error sending power command")
else:
print("Power command successful")
return
def main():
if len(sys.argv) < 4:
usage(sys.argv[0])
# Disable insecure-certificate-warning message
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
base_uri = "https://" + sys.argv[1]
creds = {'user': sys.argv[2],
'pswd': sys.argv[3]}
# This will vary by OEM
result = find_systems_resource(base_uri, creds)
if result['ret'] is True:
turn_power_on(base_uri, result['systems_uri'], creds)
else:
print("Error: %s" % result['msg'])
exit(1)
if __name__ == '__main__':
main()
``` |
{
"source": "josediazb/alpha-beta-harmonics",
"score": 2
} |
#### File: josediazb/alpha-beta-harmonics/transformLibrary.py
```python
import numpy as np
from numpy import sqrt, sin, cos
def clarkeTransformation(a, b, c):
alpha = (2/3) * (a + (-1*b/2) + (-1*c/2))
beta = (2/3) * (0 + (np.sqrt(3) * b/2) + (-1 * np.sqrt(3) * c/2))
gamma = (2/3) * (a/2 + b/2 + c/2)
amplitude = np.sqrt(np.power(alpha, 2) + np.power(beta, 2))
angle = 180/np.pi*(np.arctan2(alpha, beta))
return alpha, beta, gamma, amplitude, angle
``` |
{
"source": "josediogonc/tesouro-direto",
"score": 3
} |
#### File: tesourodireto/spiders/tesourodireto.py
```python
import scrapy
class TesouroDiretoSpider(scrapy.Spider):
name = "tesourodireto"
allowed_domains = ["fazenda.gov.br"]
start_urls = (
'http://www.tesouro.fazenda.gov.br/tesouro-direto-precos-e-taxas-dos-titulos',
)
def parse(self, response):
list_resp = response.xpath("//tr[contains(@class, 'camposTesouroDireto')]/td/text()").extract()
i = 0
contador = 0
for index in xrange((len(list_resp)/5)):
Titulo = list_resp[i+0]
Vencto = list_resp[i+1]
TaxaRendimentoAa = list_resp[i+2]
print('Titulo: %s Vencimento: %s Taxa a.a: %s \n' % (Titulo, Vencto, TaxaRendimentoAa))
i+=5
if (Titulo.strip() == "Tesouro IPCA+ 2019 (NTNB Princ)"):
contador+=1
if contador == 2:
break
``` |
{
"source": "jose-d/telegraf-collectors",
"score": 2
} |
#### File: telegraf-collectors/gpfs_stats_collector/give_stats.py
```python
import os
import subprocess
import sys
import threading
mmpmon_path = '/usr/lpp/mmfs/bin/mmpmon'
# --
# classes
class Command:
# good old class I use to execute commands in OS shell.
def __init__(self, cmd):
self.cmd = cmd
self.process = None
self.stdout = None
self.stderr = None
self.rc = None
def run(self, timeout):
def target():
self.process = subprocess.Popen(self.cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self.stdout, self.stderr = self.process.communicate()
self.rc = self.process.wait()
thread = threading.Thread(target=target)
thread.start()
thread.join(timeout)
if thread.is_alive():
print('Command.run(): timeout: terminating process')
self.process.terminate()
thread.join()
self.rc = 999
return self.rc, self.stdout, self.stderr
# --
# functions
def dump_data(data):
site_prefix = str("gpfs,cluster=" + str(data['cluster']) + ",filesystem=" + str(data['filesystem']))
for metric in metrics:
print(str(site_prefix) + " " + str(metric) + "=" + str(data[metric]))
# --
# metric config
metrics = ["disks", "bytes_read", "bytes_written", "opens", "closes", "reads", "writes", "readdir", "inode_updates"]
nvdict = {}
nvdict['cluster:'] = 'cluster'
nvdict['filesystem:'] = 'filesystem'
nvdict['disks:'] = 'disks'
nvdict['bytes read:'] = 'bytes_read'
nvdict['bytes written:'] = 'bytes_written'
nvdict['opens:'] = 'opens'
nvdict['closes:'] = 'closes'
nvdict['reads:'] = 'reads'
nvdict['writes:'] = 'writes'
nvdict['readdir:'] = 'readdir'
nvdict['inode updates:'] = 'inode_updates'
# --
# main
if __name__ == "__main__":
script_path = os.path.dirname(os.path.realpath(__file__))
arguments = '-i ' + str(script_path) + '/commandFile'
cmd_string = str(mmpmon_path) + ' ' + str(arguments)
cmd = Command(cmd_string)
rc, stdout, stderr = cmd.run(5)
if rc != 0:
print('Something went wrong when calling ' + str(mmpmon_path) + ' ' + str(arguments))
sys.exit(1)
for line in stdout.splitlines():
if "mmpmon node" in str(line):
if 'data' in locals():
dump_data(data)
data = {}
node_name = str(line).split()[4]
data['name'] = node_name
data['measurement'] = 'gpfs'
if "timestamp:" in str(line):
data['timestamp'] = str(line).split(':')[1].strip().split('/')[0]
for metric in nvdict:
if metric in str(line):
data[nvdict[metric]] = str(line).split(':')[1].strip()
if 'data' in locals():
dump_data(data)
``` |
{
"source": "josedvq/covfee",
"score": 2
} |
#### File: covfee/orm/task.py
```python
import json
import datetime
from .orm import db, app
from .. import tasks
import os
def url_prepend(url):
if url[:4] != 'http':
return app.config['MEDIA_URL'] + '/' + url
return url
class Task(db.Model):
""" Represents a single task, like eg. annotating one video """
__tablename__ = 'tasks'
id = db.Column(db.Integer, primary_key=True)
type = db.Column(db.String)
order = db.Column(db.Integer)
name = db.Column(db.String)
props = db.Column(db.JSON)
responses = db.relationship("TaskResponse", backref='task', cascade="all, delete-orphan")
# backref hits
# backref hitinstances
created_at = db.Column(db.Date, default=datetime.datetime.now)
updated_at = db.Column(db.Date, onupdate=datetime.datetime.now)
def __init__(self, type, order=0, name=None, **props):
self.type = type
self.order = order
self.name = name
# fix URLs
if 'media' in props:
for k, v in props['media'].items():
if k[-3:] == 'url':
props['media'][k] = url_prepend(v)
if k[-4:] == 'urls' and isinstance(v, list):
props['media'][k] = [url_prepend(url) for url in v]
self.props = props
def as_dict(self, editable=False):
task_dict = {c.name: getattr(self, c.name) for c in self.__table__.columns if c != 'props'}
task_dict = {**task_dict, **self.props}
task_dict['editable'] = editable
return task_dict
def __str__(self):
return f'{self.name}: chunks={len(self.chunks):d}'
def __repr__(self):
return str(self)
class TaskResponse(db.Model):
""" Represents a task's response """
__tablename__ = 'taskresponses'
id = db.Column(db.Integer, primary_key=True)
# for numbering multiple response submissions
index = db.Column(db.Integer)
submitted = db.Column(db.Boolean)
task_id = db.Column(db.Integer, db.ForeignKey('tasks.id'))
hitinstance_id = db.Column(db.LargeBinary, db.ForeignKey('hitinstances.id'))
data = db.Column(db.JSON)
chunks = db.relationship("Chunk", backref='taskresponse', order_by="Chunk.index", cascade="all, delete-orphan")
def __init__(self, task_id, hitinstance_id, index, submitted=False, data=None, chunks=None):
self.task_id = task_id
self.hitinstance_id = hitinstance_id
self.index = index
self.submitted = submitted
self.data = data
self.chunks = chunks
def as_dict(self, with_chunk_data=False):
response_dict = {c.name: getattr(self, c.name)
for c in self.__table__.columns}
response_dict['hitinstance_id'] = response_dict['hitinstance_id'].hex()
if with_chunk_data:
response_dict['chunk_data'] = self.aggregate_chunk_data()
return response_dict
def aggregate_chunk_data(self):
if hasattr(tasks, self.task.type):
task_class = getattr(tasks, self.task.type)
chunk_data = [chunk.data for chunk in self.chunks]
return task_class.process_response(self.data, chunk_data, self.hitinstance, self.task)
else:
# default aggregation
return [x for y in self.chunks for x in y.data]
def make_results_object(self):
# apply task-specific aggregation method
result = {
'hit_id': self.hitinstance.hit.id.hex(),
'instance_id': self.hitinstance.id.hex(),
'task_id': self.task.id,
'hit_name': self.hitinstance.hit.name,
'task_name': self.task.name,
'data': self.aggregate_chunk_data()
}
return result
def write_json(self, dirpath):
fpath = os.path.join(dirpath, f'{self.task.name}_{self.index:d}.json')
processed_response = self.make_results_object()
if processed_response is None:
return False
json.dump(processed_response, open(fpath, 'w'))
return True
def write_csv(self, dirpath):
if not hasattr(tasks, self.task.type):
return False
processed_response = self.make_results_object()
if processed_response is None:
return False
fpath = os.path.join(dirpath, f'{self.task.name}_{self.index:d}.csv')
task_class = getattr(tasks, self.task.type)
df = task_class.to_dataframe(processed_response)
df.to_csv(fpath)
return True
db.Index('taskresponse_index', TaskResponse.task_id,
TaskResponse.hitinstance_id, TaskResponse.index)
# represents a chunk of task response (for continuous responses)
class Chunk(db.Model):
""" Represents a chunk of or partial task response"""
__tablename__ = 'chunks'
# for order-keeping of the chunks
index = db.Column(db.Integer, primary_key=True)
taskresponse_id = db.Column(db.Integer, db.ForeignKey(
'taskresponses.id'), primary_key=True)
data = db.Column(db.JSON)
def __init__(self, index, data):
self.index = index
self.data = data
def __str__(self):
return f' idx={self.index}'
def __repr__(self):
return str(self)
```
#### File: covfee/rest_api/api.py
```python
import json
from flask import request, jsonify, Blueprint, send_from_directory, make_response, Response, stream_with_context
from ..orm import db, app, Project, HIT, HITInstance, Task, TaskResponse, Chunk
from .auth import admin_required
import shutil
import os
api = Blueprint('api', __name__)
# PROJECTS
def jsonify_or_404(res, **kwargs):
if res is None:
return {'msg': 'not found'}, 404
else:
return jsonify(res.as_dict(**kwargs))
@app.teardown_request
def teardown_request(exception):
if exception:
db.session.rollback()
db.session.remove()
# return all projects
@api.route('/projects')
@admin_required
def projects():
with_hits = request.args.get('with_hits', False)
res = db.session.query(Project).all()
if res is None:
return jsonify([])
else:
return jsonify([p.as_dict(with_hits=with_hits) for p in res])
# return one project
@api.route('/projects/<pid>')
@admin_required
def project(pid):
with_hits = request.args.get('with_hits', False)
with_instances = request.args.get('with_instances', False)
res = db.session.query(Project).get(bytes.fromhex(pid))
return jsonify_or_404(res, with_hits=with_hits, with_instances=with_instances)
@api.route('/projects/<pid>/csv')
@admin_required
def project_csv(pid):
project = db.session.query(Project).get(bytes.fromhex(pid))
if project is None:
return {'msg': 'not found'}, 404
else:
df = project.get_dataframe()
res = make_response(df.to_csv())
res.headers["Content-Disposition"] = "attachment; filename=export.csv"
res.headers["Content-Type"] = "text/csv"
return res
@api.route('/projects/<pid>/download')
@admin_required
def project_download(pid):
is_csv = bool(request.args.get('csv', False))
project = db.session.query(Project).get(bytes.fromhex(pid))
if project is None:
return {'msg': 'not found'}, 404
dirpath, num_files = project.make_download(csv=is_csv)
if dirpath is None or num_files == 0:
# nothing to download
return '', 204
fname = os.path.join(app.config['TMP_PATH'], 'download')
shutil.make_archive(fname, 'zip', dirpath)
shutil.rmtree(dirpath)
return send_from_directory(app.config['TMP_PATH'], 'download.zip', as_attachment=True)
# HITS
# return one hit
@api.route('/hits/<hid>')
def hit(hid):
with_tasks = request.args.get('with_tasks', True)
with_instances = request.args.get('with_instances', False)
with_instance_tasks = request.args.get('with_instance_tasks', False)
res = db.session.query(HIT).get(bytes.fromhex(hid))
return jsonify_or_404(res,
with_tasks=with_tasks,
with_instances=with_instances,
with_instance_tasks=with_instance_tasks)
@api.route('/hits/<hid>/add_instances')
@admin_required
def instance_add(hid):
num_instances = request.args.get('num_instances', 1)
hit = db.session.query(HIT).get(bytes.fromhex(hid))
if hit is None:
return {'msg': 'not found'}, 404
new_instances = hit.add_instances(num_instances)
db.session.commit()
return jsonify([instance.as_dict(with_tasks=True) for instance in new_instances])
# INSTANCES
# return one HIT instance
@api.route('/instances/<iid>')
def instance(iid):
with_tasks = request.args.get('with_tasks', True)
with_responses = request.args.get('with_responses', True)
res = db.session.query(HITInstance).get(bytes.fromhex(iid))
return jsonify_or_404(res, with_tasks=with_tasks, with_responses=with_responses)
@api.route('/instance-previews/<iid>')
def instance_preview(iid):
res = HITInstance.query.filter_by(preview_id=bytes.fromhex(iid)).first()
return jsonify_or_404(res, with_tasks=True, with_responses=False)
# submit a hit (when finished)
@api.route('/instances/<iid>/submit', methods=['POST'])
def instance_submit(iid):
instance = db.session.query(HITInstance).get(bytes.fromhex(iid))
if instance is None:
return jsonify({'msg': 'invalid instance'}), 400
instance.submitted = True
return jsonify(instance.as_dict(with_tasks=True))
# @api.route('/instances/<iid>/copy')
# @admin_required
# def instance_copy(iid):
# preserve_data = request.args.get('preserve_data', False)
# instance = db.session.query(HITInstance).get(bytes.fromhex(iid))
# if instance is None:
# return jsonify({'msg': 'invalid instance'}), 400
# instance_copy = instance.copy()
# db.session.add(instance_copy)
# db.session.commit()
# return jsonify(instance_copy.as_dict(with_tasks=False))
@api.route('/instances/<iid>/download.json')
@admin_required
def instance_download(iid):
instance = db.session.query(HITInstance).get(bytes.fromhex(iid))
if instance is None:
return jsonify({'msg': 'not found'}), 404
responses = instance.responses.filter_by(submitted=True).all()
if len(responses) == 0:
# nothing to download
return '', 204
def generate():
yield '['
for i, res in enumerate(responses):
yield json.dumps(res.make_results_object())
if i < len(responses)-1:
yield ','
yield ']'
#res.headers["Content-Disposition"] = "attachment; filename=export.csv"
#res.headers["Content-Type"] = "text/csv"
return Response(stream_with_context(generate()), mimetype='application/octet-stream')
# try:
# dirpath, num_files = instance.make_download(csv=is_csv)
# except NotImplementedError:
# return jsonify({'msg': 'File aggregation not implemented for this task.'}), 404
# if dirpath is None or num_files == 0:
# # nothing to download
# return '', 204
# fname = os.path.join(app.config['TMP_PATH'], 'download')
# shutil.make_archive(fname, 'zip', dirpath)
# shutil.rmtree(dirpath)
# return send_from_directory(app.config['TMP_PATH'], 'download.zip', as_attachment=True)
# TASKS
# create a task attached to an instance
@api.route('/instances/<iid>/tasks/add', methods=['POST'])
def task_add_to_instance(iid):
instance = db.session.query(HITInstance).get(bytes.fromhex(iid))
if instance is None:
return jsonify({'msg': 'invalid instance'}), 400
if instance.hit.type != 'annotation':
return jsonify(msg='Only annotation-type instances can be user-edited.'), 403
task = Task(**request.json)
instance.tasks.append(task)
db.session.commit()
return jsonify(task.as_dict(editable=True))
# edit an existing task
@api.route('/tasks/<kid>/edit', methods=['POST'])
def task_edit(kid):
task = db.session.query(Task).get(int(kid))
if task is None:
return jsonify({'msg': 'invalid task'}), 400
if task.hits:
return jsonify(msg='Only annotation-type instances can be user-edited.'), 403
task.name = request.json['name']
db.session.commit()
return jsonify(task.as_dict(editable=True))
# delete an existing task
@api.route('/tasks/<kid>/delete')
def task_delete(kid):
task = db.session.query(Task).get(int(kid))
if task is None:
return jsonify({'msg': 'invalid task'}), 400
if task.hits:
return jsonify(msg='Only annotation-type instances can be user-deleted.'), 403
db.session.delete(task)
db.session.commit()
return jsonify({'success': True}), 200
@api.route('/instances/<iid>/tasks/<kid>/responses')
def response(iid, kid):
with_chunk_data = request.args.get('with_chunk_data', True)
lastResponse = TaskResponse.query.filter_by(
hitinstance_id=bytes.fromhex(iid),
task_id=int(kid),
submitted=True).order_by(TaskResponse.index.desc()).first()
if lastResponse is None:
return jsonify(msg='No submitted responses found.'), 403
response_dict = lastResponse.as_dict(with_chunk_data=with_chunk_data)
return jsonify(response_dict)
# record a response to a task
# task kid may or may not be associated to instance iid
@api.route('/instances/<iid>/tasks/<kid>/submit', methods=['POST'])
def response_submit(iid, kid):
with_chunk_data = request.args.get('with_chunk_data', True)
lastResponse = TaskResponse.query.filter_by(
hitinstance_id=bytes.fromhex(iid), task_id=int(kid)).order_by(TaskResponse.index.desc()).first()
if lastResponse is not None and not lastResponse.submitted:
# there is an open (not submitted) response:
lastResponse.data = request.json
lastResponse.submitted = True
db.session.commit()
return jsonify(lastResponse.as_dict(with_chunk_data=with_chunk_data))
if lastResponse is None:
response_index = 0 # first response
elif lastResponse.submitted:
response_index = lastResponse.index+1 # following response
# no responses have been submitted or only completed responses
response = TaskResponse(
task_id=int(kid),
hitinstance_id=bytes.fromhex(iid),
index=response_index,
data=request.json,
chunks=[],
submitted=True)
db.session.add(response)
db.session.commit()
return jsonify(response.as_dict(with_chunk_data=with_chunk_data))
# receive a chunk of a response, for continuous responses
@api.route('/instances/<iid>/tasks/<kid>/chunk', methods=['POST'])
def response_chunk(iid, kid):
response = TaskResponse.query.filter_by(hitinstance_id=bytes.fromhex(iid), task_id=int(kid)).order_by(TaskResponse.index.desc()).first()
# no responses or only submitted responses
# -> create new response
if response is None or response.submitted:
response_index = 0
# increment index of last response
if response is not None and response.submitted:
response_index = response.index + 1
response = TaskResponse(
task_id=int(kid),
hitinstance_id=bytes.fromhex(iid),
index=response_index,
submitted=False,
chunks=[])
# if there is a previous chunk with the same index, overwrite it
if len(response.chunks) > 0:
sent_index = request.json['index']
chunk = next(
(chunk for chunk in response.chunks if chunk.index == sent_index), None)
if chunk is not None:
chunk.data = request.json['data']
db.session.commit()
return jsonify({'success': True}), 201
# no previous chunk with the same index -> append the chunk
chunk = Chunk(**request.json)
response.chunks.append(chunk)
db.session.add(response)
db.session.commit()
return jsonify({'success': True}), 201
```
#### File: utils/continuous_keypoint/serialize.py
```python
from pathlib import Path
from typing import List, Any
CSV_HEADER = ["linux-time", "frame", "x", "y", "occluded", "last-action"]
def write_to_csv(chunk_data: List[List[Any]], out_file: Path,
convert_time_to_frame: bool = False) -> None:
""" Writes a chunk to a csv file at the specified path """
if not out_file.is_file():
# File does not exist, write header
with open(out_file, "w") as f:
f.write(",".join(CSV_HEADER)+"\n")
last_action = None
with open(out_file, "a") as f:
# Write data in append mode
for entry in chunk_data:
# Handle frame number
frame = entry[1]
if convert_time_to_frame:
# TODO: Get LTC Frame to avoid rounding errors
frame = frame
# Handle payload
payload = entry[-1]
if len(payload) == 3:
payload_str = ",".join(map(str, payload))
elif len(payload) == 1:
last_action = payload[0]
payload_str = "None,None,None"
else:
raise ValueError("Unrecognized payload length; accepts 3 or 1")
f.write("{},{},{},{}\n".format(
str(entry[0]), frame, payload_str, last_action)
)
def main() -> None:
""" Usage example for the csv writer """
out_file = Path("csv_example.csv")
chunk_data = [[1600004663282, 0, ['play']],
[1600004664438, 0.438416, [0.5264663805436338,
0.27735368956743, False]]]
write_to_csv(chunk_data, out_file)
if __name__ == "__main__":
main()
``` |
{
"source": "JoseDzirehChong/PyMsgBox",
"score": 3
} |
#### File: PyMsgBox/tests/basicTests.py
```python
import unittest
import sys
import os
import time
import threading
import inspect
sys.path.insert(0, os.path.abspath('..'))
import pymsgbox
# Note: Yes, PyAutoGUI does have PyMsgBox itself as a dependency, but we won't be using that part of PyAutoGUI for this testing.
import pyautogui # PyAutoGUI simulates key presses on the message boxes.
pyautogui.PAUSE = 0.1
GUI_WAIT = 0.2 # if tests start failing, maybe try bumping this up a bit (though that'll slow the tests down)
"""
NOTE: You will often see this code in this test:
print('Line', inspect.currentframe().f_lineno);
This is because due to the GUI nature of these tests, if something messes up
and PyAutoGUI is unable to click on the message box, this program will get
held up. By printing out the line number, you will at least be able to see
which line displayed the message box that is held up.
This is a bit unorthodox, and I'm welcome to other suggestions about how to
deal with this possible scenario.
"""
class KeyPresses(threading.Thread):
def __init__(self, keyPresses):
super(KeyPresses, self).__init__()
self.keyPresses = keyPresses
def run(self):
time.sleep(GUI_WAIT)
pyautogui.typewrite(self.keyPresses, interval=0.05)
class AlertTests(unittest.TestCase):
def test_alert(self):
# no text
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.alert(), 'OK')
# text
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.alert('Hello'), 'OK')
# text and title
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.alert('Hello', 'Title'), 'OK')
# text, title, and custom button
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.alert('Hello', 'Title', 'Button'), 'Button')
# using keyword arguments
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.alert(text='Hello', title='Title', button='Button'), 'Button')
class ConfirmTests(unittest.TestCase):
def test_confirm(self):
# press enter on OK
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm(), 'OK')
# press right, enter on Cancel
t = KeyPresses(['right', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm(), 'Cancel')
# press right, left, right, enter on Cancel
t = KeyPresses(['right', 'left', 'right', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm(), 'Cancel')
# press tab, enter on Cancel
t = KeyPresses(['tab', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm(), 'Cancel')
# press tab, tab, enter on OK
t = KeyPresses(['tab', 'tab', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm(), 'OK')
# with text
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm('Hello'), 'OK')
# with text, title
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm('Hello', 'Title'), 'OK')
# with text, title, and one custom button
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm('Hello', 'Title', ['A']), 'A')
# with text, title, and one custom blank button
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm('Hello', 'Title', ['']), '')
# with text, title, and two custom buttons
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm('Hello', 'Title', ['A', 'B']), 'A')
t = KeyPresses(['right', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm('Hello', 'Title', ['A', 'B']), 'B')
t = KeyPresses(['right', 'left', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm('Hello', 'Title', ['A', 'B']), 'A')
t = KeyPresses(['tab', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm('Hello', 'Title', ['A', 'B']), 'B')
t = KeyPresses(['tab', 'tab', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm('Hello', 'Title', ['A', 'B']), 'A')
# with text, title, and three custom buttons
t = KeyPresses(['tab', 'tab', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm('Hello', 'Title', ['A', 'B', 'C']), 'C')
# with text, title, and four custom buttons
t = KeyPresses(['tab', 'tab', 'tab', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm('Hello', 'Title', ['A', 'B', 'C', 'D']), 'D')
# with text, title, and five custom buttons
t = KeyPresses(['tab', 'tab', 'tab', 'tab', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm('Hello', 'Title', ['A', 'B', 'C', 'D', 'E']), 'E')
# with text, title, and three custom buttons specified with keyword arguments
t = KeyPresses(['tab', 'tab', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm(text='Hello', title='Title', buttons=['A', 'B', 'C']), 'C')
# test that pressing Esc is the same as clicking Cancel (but only when there is a cancel button)
t = KeyPresses(['escape'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm(text='Escape button press test'), 'Cancel')
# Make sure that Esc keypress does nothing if there is no Cancel button.
t = KeyPresses(['escape', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.confirm(text='Escape button press test', buttons=['OK', 'Not OK']), 'OK')
class PromptPasswordTests(unittest.TestCase):
def test_prompt(self):
self._prompt_and_password_tests(pymsgbox.prompt, 'prompt()')
def test_password(self):
# NOTE: Currently there is no way to test the appearance of the * or custom mask characters.
self._prompt_and_password_tests(pymsgbox.password, 'password()')
def _prompt_and_password_tests(self, msgBoxFunc, msgBoxFuncName):
# entering nothing
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual((msgBoxFuncName, msgBoxFunc()), (msgBoxFuncName, ''))
# entering text
t = KeyPresses(['a', 'b', 'c', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual((msgBoxFuncName, msgBoxFunc()), (msgBoxFuncName, 'abc'))
# entering text, tabbing to the Ok key
t = KeyPresses(['a', 'b', 'c', 'tab', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual((msgBoxFuncName, msgBoxFunc()), (msgBoxFuncName, 'abc'))
# entering text but hitting cancel
t = KeyPresses(['a', 'b', 'c', 'tab', 'tab', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual((msgBoxFuncName, msgBoxFunc()), (msgBoxFuncName, None))
# with text
t = KeyPresses(['a', 'b', 'c', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual((msgBoxFuncName, msgBoxFunc('Hello')), (msgBoxFuncName, 'abc'))
# with text and title
t = KeyPresses(['a', 'b', 'c', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual((msgBoxFuncName, msgBoxFunc('Hello', 'Title')), (msgBoxFuncName, 'abc'))
# with text, title and default value
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual((msgBoxFuncName, msgBoxFunc('Hello', 'Title', 'default')), (msgBoxFuncName, 'default'))
# with text, title and default value specified by keyword arguments
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual((msgBoxFuncName, msgBoxFunc(text='Hello', title='Title', default='default')), (msgBoxFuncName, 'default'))
class TimeoutTests(unittest.TestCase):
def test_timeout(self):
# Note: If these test's fail, the unit tests will hang.
self.assertEqual(pymsgbox.alert('timeout test', timeout=1000), pymsgbox.TIMEOUT_TEXT)
self.assertEqual(pymsgbox.confirm('timeout test', timeout=1000), pymsgbox.TIMEOUT_TEXT)
self.assertEqual(pymsgbox.prompt('timeout test', timeout=1000), pymsgbox.TIMEOUT_TEXT)
self.assertEqual(pymsgbox.password('<PASSWORD>', timeout=1000), pymsgbox.TIMEOUT_TEXT)
""""
# NOTE: This is weird. This test fails (the additional typed in text gets added
# to the end of the default string, instead of replacing it), but when I run
# this same code using PyAutoGUI from the interactive shell (on Win7 Py3.3) it
# works. It also works when I type it in myself.
# Commenting this out for now.
class DefaultValueOverwriteTests(unittest.TestCase):
def test_prompt(self):
self._prompt_and_password_tests(pymsgbox.prompt, 'prompt()')
def test_password(self):
# NOTE: Currently there is no way to test the appearance of the * or custom mask characters.
self._prompt_and_password_tests(pymsgbox.password, 'password()')
def _prompt_and_password_tests(self, msgBoxFunc, msgBoxFuncName):
# with text, title and default value that is typed over
t = KeyPresses(['a', 'b', 'c', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual((msgBoxFuncName, msgBoxFunc('Hello', 'Title', 'default')), (msgBoxFuncName, 'abc'))
"""
class WindowsNativeAlertTests(unittest.TestCase):
def test_alert(self):
if sys.platform != 'win32':
return
# no text
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.alert(), pymsgbox.OK_TEXT)
# text
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.alert('Hello'), pymsgbox.OK_TEXT)
# text and title
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.alert('Hello', 'Title'), pymsgbox.OK_TEXT)
# text, title, and custom button
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.alert('Hello', 'Title', 'Button'), 'Button')
# using keyword arguments
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.alert(text='Hello', title='Title', button='Button'), 'Button')
class WindowsNativeConfirmTests(unittest.TestCase):
def test_confirm(self):
if sys.platform != 'win32':
return
# press enter on OK
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.confirm(), pymsgbox.OK_TEXT)
# press right, enter on Cancel
t = KeyPresses(['right', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.confirm(), pymsgbox.CANCEL_TEXT)
# press right, left, right, enter on Cancel
t = KeyPresses(['right', 'left', 'right', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.confirm(), pymsgbox.CANCEL_TEXT)
# press tab, enter on Cancel
t = KeyPresses(['tab', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.confirm(), pymsgbox.CANCEL_TEXT)
# press tab, tab, enter on OK
t = KeyPresses(['tab', 'tab', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.confirm(), pymsgbox.OK_TEXT)
# with text
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.confirm('Hello'), pymsgbox.OK_TEXT)
# with text, title
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.confirm('Hello', 'Title'), pymsgbox.OK_TEXT)
# with text, title, and one custom button
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.confirm('Hello', 'Title', ['A']), 'A')
# with text, title, and one custom blank button
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.confirm('Hello', 'Title', ['']), '')
# with text, title, and two custom buttons
t = KeyPresses(['enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.confirm('Hello', 'Title', ['A', 'B']), 'A')
t = KeyPresses(['right', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.confirm('Hello', 'Title', ['A', 'B']), 'B')
t = KeyPresses(['right', 'left', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.confirm('Hello', 'Title', ['A', 'B']), 'A')
t = KeyPresses(['tab', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.confirm('Hello', 'Title', ['A', 'B']), 'B')
t = KeyPresses(['tab', 'tab', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.confirm('Hello', 'Title', ['A', 'B']), 'A')
# with text, title, and three custom buttons specified with keyword arguments
t = KeyPresses(['tab', 'enter'])
t.start()
print('Line', inspect.currentframe().f_lineno); self.assertEqual(pymsgbox.native.confirm(text='Hello', title='Title', buttons=['A', 'B']), 'B')
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "joseeden/notes-aws-sysops",
"score": 2
} |
#### File: scripts_digitalcloud_labs/Systems_Manager/lambda-ssm.py
```python
import boto3
ssm = boto3.client('ssm', region_name="ap-southeast-2")
def lambda_handler(event, context):
parameter = ssm.get_parameter(Name='/rds/db1', WithDecryption=True)
print(parameter['Parameter']['Value'])
return "Successfully retrieved parameter!"
``` |
{
"source": "joseeden/notes-cbt-nuggets-devasc",
"score": 3
} |
#### File: notes-cbt-nuggets-devasc/Notes_0-9/3-Singleton.py
```python
class ConfigValues:
# When class variable is preceeded with '__', it is a private variable.
# This means it can't be accessed from outside the class.
__instance = None
# The decorator '@staticmethod' means the getInstance() method can be executed
# even before instantiated an object of this class.
@staticmethod
def getInstance():
# if __instance is blank, create a new instance.
# The __instance will be blank on the first run.
# On the second run, the __instance variable will not be empty,
# thus this If-statement will not be executed.
if ConfigValues.__instance == None:
ConfigValues()
return ConfigValues.__instance
# Initialize the object.
# Note that this constructor prevents you from instantiating a new value
# for the instantiated object
def __init__(self):
"""Virtually private constructor"""
if ConfigValues.__instance != None:
raise Exception("This class is a singleton")
else:
ConfigValues.__instance = self
# Creating the object
s = ConfigValues() # This will create the object
print(s)
s = ConfigValues.getInstance() # This can run without creating the object first.
print(s) # Note that this will create version 2 - but both similar
s = ConfigValues.getInstance() # Same here, this will create version 3 of s
print(s)
# s = ConfigValues() # This will raise an exception "This class is a singleton"
# print(s) # This is because this will create a 2nd copy of s
# r = ConfigValues() # This will raise an exception "This class is a singleton"
# print(r) # This is because this will create a 2nd copy of s
# Test the code by running python 3-Singleton.py
```
#### File: notes-cbt-nuggets-devasc/Notes_0-9/6-MVC.py
```python
class Device:
ipaddress = ""
port = ""
# Thsi will fill up the list (devices) of Device objects
@staticmethod
def finddevices():
devices = []
d = Device()
d.ipaddress = "192.168.1.1"
d.port = "2001"
devices.append(d)
d = Device()
d.ipaddress = "192.168.1.50"
d.port = "7091"
devices.append(d)
d = Device()
d.ipaddress = "192.168.1.100"
d.port = "80"
devices.append(d)
return devices
# This will display the devices in the list
# This doesnt care how many devices are in the list. It will just iterate thru the list.
class DevicesView:
def showdevices(self, devices):
for d in devices:
print("------------------------------")
print("IP Address: " + d.ipaddress)
print("Port: " + str(d.port))
print("------------------------------")
class DevicesController:
def __init__(self):
# Calls finddevices to populate the devices list.
# It then creates a new instance of the view
devices = Device.finddevices()
v = DevicesView()
v.showdevices(devices)
# Creating an instance of the controller
c = DevicesController()
```
#### File: notes-cbt-nuggets-devasc/Notes_30-39/circletest.py
```python
import math
import unittest
# this is the function that we'll test
# This computes circumference fo a circle, given the radius (r)
def circum(r):
return 2*math.pi*r
# Test Cases
class testcircle(unittest.TestCase):
# Test Case 1
def testcircum_valid(self):
# if cirucm(5) is equal to value after it,
# it will return OK
self.assertEqual(circum(5),31.41592653589793)
# Test Case 2
def testcircum_zero(self):
self.assertEqual(circum(0),0)
# Test Case 3
# def testcircum_text(self):
# self.assertRaises(circum("Frank"))
# calls the testcase
unittest.main()
``` |
{
"source": "joseedil/g-pcc-dyadic-codec",
"score": 3
} |
#### File: g-pcc-dyadic-codec/scripts/ply_generator.py
```python
import sys
import random
def main():
if len(sys.argv) < 3:
print("Insuficient arguments!")
file_name = sys.argv[1]
pc_side = int(sys.argv[2])
in_n_points = int(sys.argv[3])
f = open(file_name, "w")
f.write(
"""ply
format ascii 1.0
element vertex {n_points:}
property float x
property float y
property float z
end_header\n""".format(n_points = in_n_points))
for i in range(in_n_points):
x_random,y_random,z_random = [random.randint(0, pc_side-1),
random.randint(0, pc_side-1),
random.randint(0, pc_side-1)]
f.write("{x:} {y:} {z:}\n".format(x = x_random,y = y_random,z = z_random))
f.close()
if __name__ == "__main__":
main()
``` |
{
"source": "Joseesc24/Sudoku_diferentes_Heuristicas",
"score": 4
} |
#### File: Sudoku_diferentes_Heuristicas/container_genetic_algorithm_solver/genetic_algorithm_functions.py
```python
from copy import deepcopy
import random
async def tournament_selection(population: list) -> tuple:
"""Roulette Selection
This function is used to select an individual from all the population based on it's fitness score for making the crossover.
Args:
population (list): The probability of the event occurrence.
Returns:
tuple: An individual chromosome representation for the crossover.
"""
tournament_size = len(population) // 2
if tournament_size == 0:
tournament_size = 2
population = sorted(population, key=lambda individual: individual[0])
tournament_members = population[:tournament_size]
return random.choice(tournament_members)
async def exchange_random_row(individual_1: list, individual_2: list) -> list:
"""Exchange Random Row
This function is used for making the crossover between two individuals, in picks a random row an exchange it between the two
given individuals.
Args:
individual_1 (list): The representation of the first individual.
individual_2 (list): The representation of the second individual.
Returns:
tuple: one of the individuals after making the rows exchange.
"""
exchange_index = random.randrange(len(individual_1))
individual_1[exchange_index], individual_2[exchange_index] = (
deepcopy(individual_2[exchange_index]),
deepcopy(individual_1[exchange_index]),
)
return random.choice([individual_1, individual_2])
```
#### File: Sudoku_diferentes_Heuristicas/container_genetic_algorithm_solver/genetic_algorithm.py
```python
from general_solver_functions_access_custom import calculate_board_fitness_single
from general_solver_functions_access import board_random_initialization
from general_solver_functions_access import board_random_mutation
from genetic_algorithm_functions import tournament_selection
from genetic_algorithm_functions import exchange_random_row
from buffered_gather import buffered_gather
from logger import setup_logger
from copy import deepcopy
import itertools
import random
import os
logger = setup_logger(logger_name=os.path.basename(__file__).split(".")[0])
async def random_decision(probability: float) -> bool:
"""Random Decision
This function is used to decide if a random event based on a probability should or not occur.
Args:
probability (float): The probability of the event occurrence.
Returns:
bool: Indicates if the random event based on the probability should or not occur.
"""
occurrence = random.uniform(0, 1)
if occurrence > probability:
return False
else:
return True
async def crossover(
filled_board: list, population: list, crossover_probability: float
) -> list:
"""Crossover
This function create two new boards exchanging one of their rows depending on it's crossover probability.
Args:
filled_board (list): A full filled board representation.
population (list): All the current population.
crossover_probability (float): The crossover probability of the individual.
Returns:
list: The crossover board or a None if the board dosn't mutate.
"""
occurrence = await random_decision(probability=crossover_probability)
if occurrence is True:
crossover_individual = await tournament_selection(population=population)
return await exchange_random_row(filled_board, crossover_individual[1])
else:
return None
async def mutate(
filled_board: list, fixed_numbers_board: list, mutation_probability: float
) -> list:
"""Mutate
This function create a new board mutating the original board based on it's mutation probability.
Args:
filled_board (list): A full filled board representation.
fixed_numbers_board (list): A board representation that includes just the fixed numbers.
mutation_probability (float): The mutation probability of the individual.
Returns:
list: The mutated board or a None if the board dosn't mutate.
"""
occurrence = await random_decision(probability=mutation_probability)
if occurrence is True:
return await board_random_mutation(
board=filled_board, fixed_numbers_board=fixed_numbers_board
)
else:
return None
async def solve_using_genetic_algorithm(
genetic_algorithm_crossover: float,
genetic_algorithm_generations: int,
genetic_algorithm_population: int,
genetic_algorithm_mutation: float,
zone_height: int,
zone_length: int,
board: list,
) -> list:
"""Solve Using Genetic Algorithm
This function uses a genetic algorithm to solve sudoku boards, its based on chromosomes and threading for solving the board as
fast as possible avoiding local highs as much as possible.
Args:
genetic_algorithm_crossover (float): The crossover probability.
genetic_algorithm_generations (int): The generations number.
genetic_algorithm_population (int): The population number.
genetic_algorithm_mutation (float): The mutation probability.
zone_height (int): The zones height.
zone_length (int): The zones length.
board (list): A full filled board representation.
Returns:
list: The best finded board.
"""
logger.debug(msg=r"starting to solve using genetic algorithm")
fixed_numbers_board = deepcopy(board)
population = await buffered_gather(
[
board_random_initialization(
fixed_numbers_board=fixed_numbers_board,
zone_height=zone_height,
zone_length=zone_length,
)
for _ in itertools.repeat(None, genetic_algorithm_population)
]
)
population = await buffered_gather(
[
calculate_board_fitness_single(
board=individual, zone_height=zone_height, zone_length=zone_length
)
for individual in population
]
)
for _ in itertools.repeat(None, genetic_algorithm_generations):
# Creating mutated population.
mutated_population = await buffered_gather(
[
mutate(
mutation_probability=genetic_algorithm_mutation,
fixed_numbers_board=fixed_numbers_board,
filled_board=individual[1],
)
for individual in population
]
)
# Filtering the mutated population.
mutated_population = [
mutation
for mutation in filter(
lambda mutated_individual: mutated_individual is not None,
mutated_population,
)
]
# Ranking the mutated population.
mutated_population = await buffered_gather(
[
calculate_board_fitness_single(
zone_height=zone_height,
zone_length=zone_length,
board=individual,
)
for individual in mutated_population
]
)
# Craeting crossover population.
population_copy = deepcopy(population)
crossover_population = await buffered_gather(
[
crossover(
crossover_probability=genetic_algorithm_crossover,
population=population_copy,
filled_board=individual[1],
)
for individual in population
]
)
# Filtering the crossover population.
crossover_population = [
crossover
for crossover in filter(
lambda mutated_individual: mutated_individual is not None,
crossover_population,
)
]
# Ranking the crossover population.
crossover_population = await buffered_gather(
[
calculate_board_fitness_single(
zone_height=zone_height,
zone_length=zone_length,
board=individual,
)
for individual in crossover_population
]
)
# Extending and sorting population by individuals rank.
population.extend(crossover_population)
population.extend(mutated_population)
population = sorted(
population, key=lambda individual: individual[0], reverse=False
)
# Removing the not apt individuals.
population = population[:genetic_algorithm_population]
# Increasing generations counter.
logger.debug(msg=r"ending to solve using genetic algorithm")
return population[0][1]
``` |
{
"source": "joseespinadote/aframe_react_flask_travis",
"score": 2
} |
#### File: joseespinadote/aframe_react_flask_travis/server.py
```python
from flask import Flask, render_template, request, Response, send_from_directory
from client import get_historical_events, get_response_descriptions
import json
import datetime
import requests
import re
app = Flask(__name__, template_folder='./public',
static_url_path='/dist', static_folder='./dist')
global_json_travis_notification = None
# todo: recibir notificaciones travis en "caliente"
def event_stream():
# if global_json_travis_notification :
global global_json_travis_notification
yield 'data:{}\n'.format(global_json_travis_notification)
#global_json_travis_notification = None
@app.route('/stream')
def stream():
return Response(event_stream(), mimetype="text/event-stream")
@app.route('/')
def hello_world():
return render_template('index.html')
@app.route('/x')
def web_datos():
return render_template('webdatos.html')
@app.route("/request", methods=["GET"])
def handle_request():
if request.method == "GET":
long, lat = request.args.get("long"), request.args.get("lat")
wdResponse = get_historical_events(lat, long)
return json.dumps(get_response_descriptions(wdResponse))
@app.route('/user')
def travis_wrapper_user():
url = 'https://api.travis-ci.org/user'
token = request.args.get('token')
headers = {'Travis-API-Version': '3',
'User-Agent': 'API Explorer', 'Authorization': 'token ' + token}
r = requests.get(url, headers=headers)
return r.text
@app.route('/repos')
def travis_wrapper_repos():
url = 'https://api.travis-ci.org/repos?repository.active=true&limit=200'
token = request.args.get('token')
headers = {'Travis-API-Version': '3',
'User-Agent': 'API Explorer', 'Authorization': 'token ' + token}
r = requests.get(url, headers=headers)
return r.text
@app.route('/extRepos')
def travis_wrapper_ext_repos():
provider_login = request.args.get('provider_login')
url = 'https://api.travis-ci.org/owner/'+provider_login + \
'/repos?repository.active=true&limit=200'
token = request.args.get('token')
headers = {'Travis-API-Version': '3',
'User-Agent': 'API Explorer', 'Authorization': 'token ' + token}
r = requests.get(url, headers=headers)
return r.text
@app.route('/repoTree')
def travis_wrapper_repoTree():
token = request.args.get('token')
repo_id = request.args.get('repo_id')
url = 'https://api.travis-ci.org/repo/'+repo_id + \
'/builds?include=job.state,job.config&limit=12'
token = request.args.get('token')
headers = {'Travis-API-Version': '3',
'User-Agent': 'API Explorer', 'Authorization': 'token ' + token}
r = requests.get(url, headers=headers)
return r.text
@app.route('/builds')
def travis_wrapper_builds():
repo_id = request.args.get('repo_id')
url = 'https://api.travis-ci.org/repo/'+repo_id+'/builds'
token = request.args.get('token')
headers = {'Travis-API-Version': '3',
'User-Agent': 'API Explorer', 'Authorization': 'token ' + token}
r = requests.get(url, headers=headers)
return r.text
@app.route('/build')
def travis_wrapper_build():
build_id = request.args.get('repo_id')
url = 'https://api.travis-ci.org/build/'+build_id
token = request.args.get('token')
headers = {'Travis-API-Version': '3',
'User-Agent': 'API Explorer', 'Authorization': 'token ' + token}
r = requests.get(url, headers=headers)
return r.text
@app.route('/buildJobs')
def travis_wrapper_build_jobs():
build_id = request.args.get('build_id')
url = 'https://api.travis-ci.org/build/' + \
build_id+'/jobs?include=job.state,job.config'
token = request.args.get('token')
headers = {'Travis-API-Version': '3',
'User-Agent': 'API Explorer', 'Authorization': 'token ' + token}
r = requests.get(url, headers=headers)
return r.text
@app.route('/job')
def travis_wrapper_job():
job_id = request.args.get('job_id')
url = 'https://api.travis-ci.org/job/'+job_id
token = request.args.get('token')
headers = {'Travis-API-Version': '3',
'User-Agent': 'API Explorer', 'Authorization': 'token ' + token}
r = requests.get(url, headers=headers)
return r.text
@app.route('/jobLog')
def travis_wrapper_jobLog():
job_id = request.args.get('job_id')
url = 'https://api.travis-ci.org/job/'+job_id+'/log'
token = request.args.get('token')
headers = {'Travis-API-Version': '3',
'User-Agent': 'API Explorer', 'Authorization': 'token ' + token}
r = requests.get(url, headers=headers)
contents = r.json()['content'].strip()
contents = re.sub(r"\s+", " ", contents)
errorIndexes = [m.start() for m in re.finditer(' [Ee]rror ', contents)]
offset = 150
rightOffset = -1
arrError = []
for i in errorIndexes:
if i <= rightOffset:
continue
rightOffset = i + offset
leftOffset = i - offset if i >= offset else i
arrError.append("..." + contents[leftOffset:rightOffset] + "...")
return {"errores": arrError}
@app.route('/webhook', methods=['POST'])
def respond():
global global_json_travis_notification
global_json_travis_notification = json.loads(request.form['payload'])
print(global_json_travis_notification)
return Response(status=200)
``` |
{
"source": "JoseEspinosa/viralrecon",
"score": 3
} |
#### File: viralrecon/bin/check_samplesheet.py
```python
import os
import sys
import errno
import argparse
def parse_args(args=None):
Description = "Reformat nf-core/viralrecon samplesheet file and check its contents."
Epilog = "Example usage: python check_samplesheet.py <FILE_IN> <FILE_OUT>"
parser = argparse.ArgumentParser(description=Description, epilog=Epilog)
parser.add_argument("FILE_IN", help="Input samplesheet file.")
parser.add_argument("FILE_OUT", help="Output file.")
parser.add_argument('-pl', '--platform', type=str, dest="PLATFORM", default='illumina', help="Sequencing platform for input data. Accepted values = 'illumina' or 'nanopore' (default: 'illumina').")
return parser.parse_args(args)
def make_dir(path):
if len(path) > 0:
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise exception
def print_error(error, context="Line", context_str=""):
error_str = "ERROR: Please check samplesheet -> {}".format(error)
if context != "" and context_str != "":
error_str = "ERROR: Please check samplesheet -> {}\n{}: '{}'".format(
error, context.strip(), context_str.strip()
)
print(error_str)
sys.exit(1)
def check_illumina_samplesheet(file_in, file_out):
"""
This function checks that the samplesheet follows the following structure:
sample,fastq_1,fastq_2
SAMPLE_PE,SAMPLE_PE_RUN1_1.fastq.gz,SAMPLE_PE_RUN1_2.fastq.gz
SAMPLE_PE,SAMPLE_PE_RUN2_1.fastq.gz,SAMPLE_PE_RUN2_2.fastq.gz
SAMPLE_SE,SAMPLE_SE_RUN1_1.fastq.gz,
For an example see:
https://github.com/nf-core/test-datasets/blob/viralrecon/samplesheet/samplesheet_test_illumina_amplicon.csv
"""
sample_mapping_dict = {}
with open(file_in, "r") as fin:
## Check header
MIN_COLS = 2
HEADER = ['sample', 'fastq_1', 'fastq_2']
header = [x.strip('"') for x in fin.readline().strip().split(",")]
if header[: len(HEADER)] != HEADER:
print("ERROR: Please check samplesheet header -> {} != {}".format(",".join(header), ",".join(HEADER)))
sys.exit(1)
## Check sample entries
for line in fin:
lspl = [x.strip().strip('"') for x in line.strip().split(",")]
# Check valid number of columns per row
if len(lspl) < len(HEADER):
print_error(
"Invalid number of columns (minimum = {})!".format(len(HEADER)),
"Line",
line,
)
num_cols = len([x for x in lspl if x])
if num_cols < MIN_COLS:
print_error(
"Invalid number of populated columns (minimum = {})!".format(MIN_COLS),
"Line",
line,
)
## Check sample name entries
sample, fastq_1, fastq_2 = lspl[: len(HEADER)]
sample = sample.replace('-', '_')
sample = sample.replace(' ', '_')
if not sample:
print_error("Sample entry has not been specified!", "Line", line)
## Check FastQ file extension
for fastq in [fastq_1, fastq_2]:
if fastq:
if fastq.find(" ") != -1:
print_error("FastQ file contains spaces!", "Line", line)
if not fastq.endswith(".fastq.gz") and not fastq.endswith(".fq.gz"):
print_error(
"FastQ file does not have extension '.fastq.gz' or '.fq.gz'!",
"Line",
line,
)
## Auto-detect paired-end/single-end
sample_info = [] ## [single_end, fastq_1, fastq_2]
if sample and fastq_1 and fastq_2: ## Paired-end short reads
sample_info = ['0', fastq_1, fastq_2]
elif sample and fastq_1 and not fastq_2: ## Single-end short reads
sample_info = ['1', fastq_1, fastq_2]
else:
print_error("Invalid combination of columns provided!", "Line", line)
## Create sample mapping dictionary = { sample: [ single_end, fastq_1, fastq_2 ] }
if sample not in sample_mapping_dict:
sample_mapping_dict[sample] = [sample_info]
else:
if sample_info in sample_mapping_dict[sample]:
print_error("Samplesheet contains duplicate rows!", "Line", line)
else:
sample_mapping_dict[sample].append(sample_info)
## Write validated samplesheet with appropriate columns
if len(sample_mapping_dict) > 0:
out_dir = os.path.dirname(file_out)
make_dir(out_dir)
with open(file_out, "w") as fout:
fout.write(",".join(["sample", "single_end", "fastq_1", "fastq_2"]) + "\n")
for sample in sorted(sample_mapping_dict.keys()):
## Check that multiple runs of the same sample are of the same datatype
if not all(x[0] == sample_mapping_dict[sample][0][0] for x in sample_mapping_dict[sample]):
print_error("Multiple runs of a sample must be of the same datatype!","Sample: {}".format(sample))
for idx,val in enumerate(sample_mapping_dict[sample]):
fout.write(','.join(["{}_T{}".format(sample,idx+1)] + val) + '\n')
else:
print_error("No entries to process!","Samplesheet: {}".format(file_in))
def check_nanopore_samplesheet(file_in, file_out):
"""
This function checks that the samplesheet follows the following structure:
sample,barcode
SAMPLE_N,1
SAMPLE_X,2
SAMPLE_Z,3
For an example see:
https://github.com/nf-core/test-datasets/blob/viralrecon/samplesheet/samplesheet_test_nanopore.csv
"""
sample_mapping_dict = {}
with open(file_in, "r") as fin:
## Check header
MIN_COLS = 2
HEADER = ['sample', 'barcode']
header = [x.strip('"') for x in fin.readline().strip().split(",")]
if header[: len(HEADER)] != HEADER:
print("ERROR: Please check samplesheet header -> {} != {}".format(",".join(header), ",".join(HEADER)))
sys.exit(1)
## Check sample entries
for line in fin:
lspl = [x.strip().strip('"') for x in line.strip().split(",")]
# Check valid number of columns per row
if len(lspl) < len(HEADER):
print_error(
"Invalid number of columns (minimum = {})!".format(len(HEADER)),
"Line",
line,
)
num_cols = len([x for x in lspl if x])
if num_cols < MIN_COLS:
print_error(
"Invalid number of populated columns (minimum = {})!".format(MIN_COLS),
"Line",
line,
)
## Check sample entry
sample, barcode = lspl[: len(HEADER)]
sample = sample.replace('-', '_')
sample = sample.replace(' ', '_')
if not sample:
print_error("Sample entry has not been specified!", "Line", line)
## Check barcode entry
if barcode:
if not barcode.isdigit():
print_error("Barcode entry is not an integer!", 'Line', line)
else:
barcode = 'barcode%s' % (barcode.zfill(2))
## Create sample mapping dictionary = { sample: barcode }
if barcode in sample_mapping_dict.values():
print_error("Samplesheet contains duplicate entries in the 'barcode' column!", "Line", line)
if sample not in sample_mapping_dict:
sample_mapping_dict[sample] = barcode
else:
print_error("Samplesheet contains duplicate entries in the 'sample' column!", "Line", line)
## Write validated samplesheet with appropriate columns
if len(sample_mapping_dict) > 0:
out_dir = os.path.dirname(file_out)
make_dir(out_dir)
with open(file_out, "w") as fout:
fout.write(",".join(["sample", "barcode"]) + "\n")
for sample in sorted(sample_mapping_dict.keys()):
fout.write(','.join([sample, sample_mapping_dict[sample]]) + '\n')
else:
print_error("No entries to process!","Samplesheet: {}".format(file_in))
def main(args=None):
args = parse_args(args)
if args.PLATFORM == 'illumina':
check_illumina_samplesheet(args.FILE_IN, args.FILE_OUT)
elif args.PLATFORM == 'nanopore':
check_nanopore_samplesheet(args.FILE_IN, args.FILE_OUT)
else:
print("Unrecognised option passed to --platform: {}. Accepted values = 'illumina' or 'nanopore'".format(args.PLATFORM))
sys.exit(1)
if __name__ == "__main__":
sys.exit(main())
``` |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.