id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
|---|---|---|
4948989
|
# ----------------------------------------------------------------------------#
# Imports
# ----------------------------------------------------------------------------#
from datetime import datetime
import dateutil.parser
import babel
from flask import Flask, render_template, request, flash, redirect, url_for, jsonify
from flask_moment import Moment
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import func, desc
from flask_migrate import Migrate
import logging
from logging import Formatter, FileHandler
from flask_wtf.csrf import CSRFProtect
# ----------------------------------------------------------------------------#
# App Config.
# ----------------------------------------------------------------------------#
app = Flask(__name__)
moment = Moment(app)
CSRFProtect(app)
app.config.from_object('config')
db = SQLAlchemy(app)
migrate = Migrate(app, db)
# ----------------------------------------------------------------------------#
# Models. Forms.
# ----------------------------------------------------------------------------#
from forms import ArtistForm, VenueForm, ShowForm
from models import Artist, Venue, Show
# ----------------------------------------------------------------------------#
# Filters.
# ----------------------------------------------------------------------------#
def format_datetime(value, format='medium'):
date = dateutil.parser.parse(value)
if format == 'full':
format = "EEEE MMMM, d, y 'at' h:mma"
elif format == 'medium':
format = "EE MM, dd, y h:mma"
return babel.dates.format_datetime(date, format)
app.jinja_env.filters['datetime'] = format_datetime
# ----------------------------------------------------------------------------#
# Controllers.
# ----------------------------------------------------------------------------#
@app.route('/')
def index():
venues_query = Venue.query.limit(10)
artists_query = Artist.query.limit(10)
venues_data = {
'count': venues_query.count(),
'list': venues_query.all()
}
artists_data = {
'count': artists_query.count(),
'list': artists_query.all()
}
return render_template(
'pages/home.html', venues=venues_data, artists=artists_data)
# Venues
# ----------------------------------------------------------------
@app.route('/venues')
def venues():
areas = Venue.query.with_entities(func.count(Venue.id),
Venue.city,
Venue.state).group_by(Venue.city,
Venue.state).all()
data = []
for area in areas:
# Get Venues that Belong to this area
venues = Venue.query.filter_by(city=area.city,
state=area.state).all()
current_venue = []
for venue in venues:
upcoming_shows = Show.query.filter(Show.venue_id == venue.id,
Show.start_time == Show.start_time > datetime.now()).all()
current_venue.append({
'id': venue.id,
'name': venue.name,
'num_upcoming_shows': upcoming_shows
})
data.append({
'city': area.city,
'state': area.state,
'venues': current_venue
})
return render_template('pages/venues.html', areas=data)
@app.route('/venues/search', methods=['POST'])
def search_venues():
search_term = request.form.get('search_term', '')
query = Venue.query.filter(Venue.name.ilike('%{}%'.format(search_term)))
search_results = query.all()
data = []
for venue in search_results:
upcoming_shows = Show.query.filter(Show.venue_id == venue.id,
Show.start_time == Show.start_time > datetime.now()).all()
data.append({
'id': venue.id,
'name': venue.name,
'num_upcoming_shows': len(upcoming_shows)
})
response = {
"count": query.count(),
"data": data
}
print(response)
return render_template('pages/search_venues.html', results=response,
search_term=request.form.get('search_term', ''))
@app.route('/venues/<int:venue_id>')
def show_venue(venue_id):
"""Show Info about """
venue = Venue.query.get(venue_id)
if not venue:
return render_template('errors/404.html')
join_query = db.session.query(Show).join(Artist, Show.venue_id == venue_id)
# get past shows
past_shows = []
past_shows_query = join_query.filter(
Show.start_time < datetime.now()).all()
if len(past_shows_query):
for past_show in past_shows_query:
past_shows.append({
'artist_id': past_show.artist_id,
'artist_name': past_show.artist.name,
'artist_image_link': past_show.artist.image_link,
'start_time': past_show.start_time.strftime('%Y-%m-%d %H:%M:%S')
})
# get upcoming shows
upcoming_shows = []
upcoming_shows_query = join_query.filter(
Show.start_time > datetime.now()).all()
if len(upcoming_shows_query):
for upcoming_show in upcoming_shows_query:
upcoming_shows.append({
'artist_id': upcoming_show.artist_id,
'artist_name': upcoming_show.artist.name,
'artist_image_link': upcoming_show.artist.image_link,
'start_time': upcoming_show.start_time.strftime('%Y-%m-%d %H:%M:%S')
})
data = {
'id': venue.id,
'name': venue.name,
'genres': venue.genres,
'address': venue.address,
'city': venue.city,
'state': venue.state,
'phone': venue.phone,
'website': venue.website,
'facebook_link': venue.facebook_link,
'image_link': venue.image_link,
'seeking_talent': venue.seeking_talent,
'seeking_description': venue.seeking_description,
'past_shows': past_shows,
'upcoming_shows': upcoming_shows,
'past_shows_count': len(past_shows),
'upcoming_shows_count': len(upcoming_shows)
}
return render_template('pages/show_venue.html', venue=data)
# Create Venue
# ----------------------------------------------------------------
@app.route('/venues/create', methods=['GET'])
def create_venue_form():
form = VenueForm()
return render_template('forms/new_venue.html', form=form)
@app.route('/venues/create', methods=['POST'])
def create_venue_submission():
form = VenueForm()
if form.validate_on_submit():
try:
venue = Venue()
venue.name = request.form['name']
venue.city = request.form['city']
venue.state = request.form['state']
venue.address = request.form['address']
venue.phone = request.form['phone']
venue.image_link = request.form['image_link']
venue.genres = request.form.getlist('genres')
venue.facebook_link = request.form['facebook_link']
venue.website = request.form['website']
venue.seeking_talent = True if 'seeking_talent' in request.form else False
venue.seeking_description = request.form['seeking_description']
db.session.add(venue)
db.session.commit()
except Exception as e:
db.session.rollback()
flash(
'An error occurred. Venue {} Could not be listed!, {}'.format(
request.form['name'], str(e)))
finally:
db.session.close()
flash(
'Venue {} was successfully listed!'.format(
request.form['name']))
return redirect(url_for('venues'))
return render_template('forms/new_venue.html', form=form)
@app.route('/venues/<venue_id>', methods=['DELETE'])
def delete_venue(venue_id):
try:
venue = Venue.query.get(venue_id)
db.session.delete(venue)
db.session.commit()
except Exception as e:
db.session.rollback()
flash('An error occurred. Venue Could not be Deleted! {}'.format(str(e)))
finally:
db.session.close()
flash('An Venue Deleted Successfully!')
return jsonify({
'state': 'sucess',
'message': 'Venue Deleted Successfully',
'redirect': '/venues'
})
# Artists
# ----------------------------------------------------------------
@app.route('/artists')
def artists():
data = []
artists = Artist.query.order_by('name').all()
for artist in artists:
data.append({
'id': artist.id,
'name': artist.name
})
return render_template('pages/artists.html', artists=data)
@app.route('/artists/search', methods=['POST'])
def search_artists():
search_term = request.form.get('search_term', '')
query = Artist.query.filter(Artist.name.ilike('%{}%'.format(search_term)))
search_results = query.all()
data = []
for artist in search_results:
upcoming_shows = Show.query.filter(Show.artist_id == artist.id,
Show.start_time > datetime.now()).all()
data.append({
'id': artist.id,
'name': artist.name,
'num_upcoming_shows': len(upcoming_shows)
})
response = {
"count": query.count(),
"data": data
}
return render_template('pages/search_artists.html', results=response,
search_term=request.form.get('search_term', ''))
@app.route('/artists/<int:artist_id>')
def show_artist(artist_id):
artist = Artist.query.get(artist_id)
if not artist:
return render_template('errors/404.html')
join_query = Show.query.join(Venue, Show.artist_id == artist_id)
# get past shows
past_shows = []
past_shows_query = join_query.filter(
Show.start_time < datetime.now()).all()
if len(past_shows_query):
for past_show in past_shows_query:
past_shows.append({
"venue_id": past_show.venue_id,
"venue_name": past_show.venue.name,
"venue_image_link": past_show.venue.image_link,
"start_time": past_show.start_time.strftime('%Y-%m-%d %H:%M:%S')
})
# get upcoming shows
upcoming_shows = []
upcoming_shows_query = join_query.filter(
Show.start_time > datetime.now()).all()
if len(upcoming_shows_query):
for upcoming_show in upcoming_shows_query:
upcoming_shows.append({
"venue_id": upcoming_show.venue_id,
"venue_name": upcoming_show.venue.name,
"venue_image_link": upcoming_show.venue.image_link,
"start_time": upcoming_show.start_time.strftime('%Y-%m-%d %H:%M:%S')
})
data = {
"id": artist.id,
"name": artist.name,
"genres": artist.genres,
"city": artist.city,
"state": artist.state,
"phone": artist.phone,
"seeking_venue": artist.seeking_venue,
"image_link": artist.image_link,
"past_shows": past_shows,
"upcoming_shows": upcoming_shows,
"past_shows_count": len(past_shows),
"upcoming_shows_count": len(upcoming_shows),
}
return render_template('pages/show_artist.html', artist=data)
# Update
# ----------------------------------------------------------------
@app.route('/artists/<int:artist_id>/edit', methods=['GET'])
def edit_artist(artist_id):
form = ArtistForm()
artist = Artist.query.get(artist_id)
if not artist:
return render_template('errors/404.html')
form.id = artist.id
form.name.data = artist.name
form.city.data = artist.city
form.state.data = artist.state
form.phone.data = artist.phone
form.genres.data = artist.genres
form.facebook_link.data = artist.facebook_link
form.image_link.data = artist.image_link
form.website.data = artist.website
form.seeking_venue.data = artist.seeking_venue
form.seeking_description.data = artist.seeking_description
return render_template('forms/edit_artist.html', form=form, artist=artist)
@app.route('/artists/<int:artist_id>/edit', methods=['POST'])
def edit_artist_submission(artist_id):
artist = Artist.query.get(artist_id)
try:
artist.name = request.form['name']
artist.city = request.form['city']
artist.state = request.form['state']
artist.phone = request.form['phone']
artist.image_link = request.form['image_link']
artist.genres = request.form.getlist('genres')
artist.facebook_link = request.form['facebook_link']
artist.website = request.form['website']
artist.seeking_venue = True if 'seeking_venue' in request.form else False
artist.seeking_description = request.form['seeking_description']
db.session.commit()
except Exception as e:
db.session.rollback()
flash('An error occurred. Artist {} Could not be changed!, {}'.format(
request.form['name'], str(e)))
finally:
db.session.close()
flash(
'Artist {} was successfully Changed!'.format(
request.form['name']))
return redirect(url_for('show_artist', artist_id=artist_id))
@app.route('/venues/<int:venue_id>/edit', methods=['GET'])
def edit_venue(venue_id):
form = VenueForm()
venue = Venue.query.get(venue_id)
if not venue:
return render_template('errors/404.html')
form.name.data = venue.name
form.city.data = venue.city
form.state.data = venue.state
form.phone.data = venue.phone
form.address.data = venue.address
form.genres.data = venue.genres
form.facebook_link.data = venue.facebook_link
form.image_link.data = venue.image_link
form.website.data = venue.website
form.seeking_talent.data = venue.seeking_talent
form.seeking_description.data = venue.seeking_description
return render_template('forms/edit_venue.html', form=form, venue=venue)
@app.route('/venues/<int:venue_id>/edit', methods=['POST'])
def edit_venue_submission(venue_id):
venue = Venue.query.get(venue_id)
try:
venue.name = request.form['name']
venue.city = request.form['city']
venue.state = request.form['state']
venue.address = request.form['address']
venue.phone = request.form['phone']
venue.genres = request.form.getlist('genres')
venue.image_link = request.form['image_link']
venue.facebook_link = request.form['facebook_link']
venue.website = request.form['website']
venue.seeking_talent = True if 'seeking_talent' in request.form else False
venue.seeking_description = request.form['seeking_description']
db.session.commit()
except Exception as e:
db.session.rollback()
flash('An error occurred. Venue {} Could not be changed!, {}'.format(
request.form['name'], str(e)))
finally:
db.session.close()
flash(
'Venue {} was successfully Changed!'.format(
request.form['name']))
return redirect(url_for('show_venue', venue_id=venue_id))
# Create Artist
# ----------------------------------------------------------------
@app.route('/artists/create', methods=['GET'])
def create_artist_form():
form = ArtistForm()
return render_template('forms/new_artist.html', form=form)
@app.route('/artists/create', methods=['POST'])
def create_artist_submission():
form = ArtistForm()
if form.validate_on_submit():
try:
artist = Artist()
artist.name = request.form['name']
artist.city = request.form['city']
artist.state = request.form['state']
artist.phone = request.form['phone']
artist.image_link = request.form['image_link']
artist.genres = request.form.getlist('genres')
artist.facebook_link = request.form['facebook_link']
artist.website = request.form['website']
artist.seeking_venue = True if 'seeking_venue' in request.form else False
artist.seeking_description = request.form['seeking_description']
db.session.add(artist)
db.session.commit()
except Exception as e:
db.session.rollback()
flash(
'An error occurred. Artist {} Could not be listed!, {}'.format(
request.form['name'], str(e)))
finally:
db.session.close()
flash(
'Artist {} was successfully listed!'.format(
request.form['name']))
return redirect(url_for('artists'))
return render_template('forms/new_artist.html', form=form)
# Shows
# ----------------------------------------------------------------
@app.route('/shows')
def shows():
data = []
shows_results = Show.query.join(Venue).join(
Artist).order_by(desc('start_time')).all()
for show in shows_results:
data.append({
'venue_id': show.venue_id,
'venue_name': show.venue.name,
'artist_id': show.artist_id,
'artist_name': show.artist.name,
'artist_image_link': show.artist.image_link,
'start_time': show.start_time.strftime('%Y-%m-%d %H:%M:%S')
})
return render_template('pages/shows.html', shows=data)
@app.route('/shows/create')
def create_shows():
# renders form. do not touch.
form = ShowForm()
return render_template('forms/new_show.html', form=form)
@app.route('/shows/create', methods=['POST'])
def create_show_submission():
form = ShowForm()
if form.validate_on_submit():
try:
show = Show()
show.artist_id = request.form['artist_id']
show.venue_id = request.form['venue_id']
show.start_time = request.form['start_time']
db.session.add(show)
db.session.commit()
except Exception as e:
db.session.rollback()
flash('An error occurred. Show Could not be listed!, {}'.format(str(e)))
finally:
db.session.close()
flash('Show was successfully listed!')
return redirect(url_for('shows'))
return render_template('forms/new_show.html', form=form)
@app.errorhandler(404)
def not_found_error(error):
return render_template('errors/404.html'), 404
@app.errorhandler(500)
def server_error(error):
return render_template('errors/500.html'), 500
if not app.debug:
file_handler = FileHandler('error.log')
file_handler.setFormatter(
Formatter(
'%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]')
)
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('errors')
# ----------------------------------------------------------------------------#
# Launch.
# ----------------------------------------------------------------------------#
# Default port:
if __name__ == '__main__':
app.run(debug=True)
# Or specify port manually:
'''
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
'''
|
StarcoderdataPython
|
5101507
|
def convolve(x, h):
out_length = len(x) + len(h) - 1
out_signal = []
for i in range(0, out_length):
sum = 0
for j in range(0, len(h) - 1):
if i - j >= 0 and j < len(h) and i - j < len(x):
#print("i={0} j={1}".format(i,j))
sum = sum + (h[j] * x[i-j])
out_signal.append(sum)
return out_signal
a = [0, 1, 0]
b = [0, .5, 1, .5, 0]
print(a)
print(b)
print("convolved identity")
print(convolve(b, a))
print()
c = [1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0]
d = [1,2,3,4,5,7,9,11]
print(c)
print(d)
print("convolved first difference")
print(convolve(c,d))
print()
e = [1,1,1,1,1,1,1,1,1,1]
print(d)
print(e)
print("convolved running sum")
print(convolve(d,e))
print()
h = [1, .7, .5, .3, .1]
i = [.1, -.5, .2, -.3, 1, .7, .5, .3, .1, .2, -.3]
print(h)
print(i)
print("cross-correlation")
h.reverse()
print(convolve(h, i))
|
StarcoderdataPython
|
155652
|
<reponame>LHerdy/People_Manager
from django.contrib import admin
from apps.overtime.models import Overtime
admin.site.register(Overtime)
|
StarcoderdataPython
|
3472435
|
<filename>sphinx_packaging/__init__.py
#!/usr/bin/env python3
#
# __init__.py
"""
A collection of Sphinx utilities related to Python packaging.
"""
#
# Copyright © 2021 <NAME> <<EMAIL>>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#
# stdlib
from typing import Any, Dict
# 3rd party
from sphinx.application import Sphinx
__all__ = ["setup"]
__author__: str = "<NAME>"
__copyright__: str = "2021 <NAME>"
__license__: str = "BSD License"
__version__: str = "0.1.0"
__email__: str = "<EMAIL>"
def setup(app: Sphinx) -> Dict[str, Any]:
"""
Setup :mod:`sphinx_packaging`.
:param app: The Sphinx application.
"""
app.setup_extension("sphinx_packaging.peps")
app.setup_extension("sphinx_packaging.tconf")
app.setup_extension("sphinx_packaging.toml")
return {
"version": __version__,
"parallel_read_safe": True,
"parallel_write_safe": True,
}
|
StarcoderdataPython
|
1905688
|
<reponame>harry19023/home_assistant_config<filename>appDaemon/apps/harmony.py
import appdaemon.plugins.hass.hassapi as hass
from paramiko import client
class Harmony(hass.Hass):
def initialize(self):
self.globals = self.get_app('globals')
self.computer_control = self.get_app('computer_control')
self.min_delay=0.01
self.stream_devices = ['surround', 'whole_house', 'living_room_tv']
self.ent_to_activity = {'surround':'stereo music', 'whole_house':'stereo music', 'living_room_tv':'Chromecast'}
self.activity_to_volume = {'stereo music':45, 'Chromecast':35, 'Computer':35}
#listeners
self.listen_state(self.everything_off, 'input_boolean.home', new='off')
self.listen_state(self.stream_on, 'media_player', new='playing')
self.listen_state(self.stream_on, 'media_player.living_room_tv', old='off', new='idle')
self.listen_state(self.stream_off, 'media_player', new='off')
self.listen_state(self.harmony_change, 'remote.harmony_hub', attribute='current_activity')
self.log('Successfully initialized Harmony!' , level='INFO')
def everything_off(self, entity, attribute, old, new, kwargs):
self.call_service('remote/turn_off', entity_id='remote.harmony_hub')
self.log('Turned off Harmony', level='INFO')
def stream_on(self, entity, attribute, old, new, kwargs):
if old is not None:
device, ent = self.split_entity(entity)
#self.log('Stream_on_on called with ' + ent + ' new=' + new + ' old=' + old)
if ent in self.stream_devices:
current_activity = self.get_state('remote.harmony_hub', attribute='current_activity')
if current_activity != self.ent_to_activity[ent]:
self.call_service('remote/turn_on', entity_id='remote.harmony_hub', activity=self.ent_to_activity[ent])
self.log("Turned on " + self.ent_to_activity[ent] + " because current_activity was " + current_activity)
if old == 'off':
self.call_service('media_player/volume_set', entity_id=entity, volume_level=1.0)
self.log('Turned ' + ent + ' to max volume')
def stream_off(self, entity, attribute, old, new, kwargs):
if old is not None:
device, ent = self.split_entity(entity)
self.log('stream_off called with ' + ent + ' new=' + new + ' old=' + old)
if ent in self.stream_devices:
current_activity = self.get_state('remote.harmony_hub', attribute='current_activity')
if current_activity == self.ent_to_activity[ent]:
self.log('checking if ' + entity +' is off in 5 seconds')
self.run_in(self.stream_still_off, 30, entity_id=entity)
self.log('left stream_off function')
def stream_still_off(self, kwargs):
if self.get_state(kwargs['entity_id']) == 'off':
self.log(kwargs['entity_id'] + ' is still off, turning off')
self.call_service('remote/turn_off', entity_id='remote.harmony_hub')
self.log("Turned off the stereo")
else:
self.log(kwargs['entity_id'] + 'is still on, leaving on')
def harmony_change(self, entity, attribute, old, new, kwargs):
self.log('Harmony_change called with old=' + str(old) + ' new=' + str(new))
if (old is not None) and (new is not None):
if new == 'Computer':
self.computer_control.computer_on()
self.log('called computer_control.computer_on()', level='INFO')
if new in self.activity_to_volume:
# self.call_service('remote/send_command', entity_id='remote.harmony_hub', device='53047637', command='VolumeDown', num_repeats=50, delay_secs=self.min_delay)
# self.call_service('remote/send_command', entity_id='remote.harmony_hub', device='53047637', command='VolumeUp', num_repeats=self.activity_to_volume[new], delay_secs=self.min_delay)
self.log('Set ' + new + ' volume to ' + str(self.activity_to_volume[new]))
if old == 'Computer' and new is not None:
self.computer_control.computer_off()
self.log('called computer_control.computer_off()', level='INFO')
if old in ['Chromecast', 'stereo music'] and new != 'None':
if old == 'Chromecast':
self.call_service('media_player/turn_off', entity_id='media_player.living_room_tv')
self.log('Turned off Living Room TV')
else:
self.call_service('media_player/turn_off', entity_id='media_player.surround')
self.call_service('media_player/turn_off', entity_id='media_player.whole_house')
self.log('Turned off Surround and Whole House')
|
StarcoderdataPython
|
3375934
|
"""Sherlock: Supported Site Listing
This module generates the listing of supported sites.
"""
import json
from collections import OrderedDict
with open("data.json", "r", encoding="utf-8") as data_file:
data = json.load(data_file)
sorted_json_data = json.dumps(data, indent=2, sort_keys=True)
with open("data.json", "w") as data_file:
data_file.write(sorted_json_data)
with open("sites.md", "w") as site_file:
site_file.write(f'## List Of Supported Sites ({len(data)} Sites In Total!)\n')
index = 1
for social_network in OrderedDict(sorted(data.items())):
url_main = data.get(social_network).get("urlMain")
site_file.write(f'{index}. [{social_network}]({url_main})\n')
index = index + 1
print("Finished updating supported site listing!")
|
StarcoderdataPython
|
325981
|
<reponame>XiaoSanGit/talking-head-anime-landing
import os
import sys
sys.path.append(os.getcwd())
import time
import numpy as np
import PIL.Image
import PIL.ImageTk
import cv2
import torch
import dlib
from poser.morph_rotate_combine_poser import MorphRotateCombinePoser256Param6
from puppet.head_pose_solver import HeadPoseSolver
from puppet.util import compute_left_eye_normalized_ratio, compute_right_eye_normalized_ratio, \
compute_mouth_normalized_ratio
from tha.combiner import CombinerSpec
from tha.face_morpher import FaceMorpherSpec
from tha.two_algo_face_rotator import TwoAlgoFaceRotatorSpec
from util import rgba_to_numpy_image, extract_pytorch_image_from_filelike
import torch.onnx
cuda = torch.device('cuda')
class Puppet_Core():
def __init__(self):
self.torch_device = cuda
self.poser = MorphRotateCombinePoser256Param6(
morph_module_spec=FaceMorpherSpec(),
morph_module_file_name="../data/face_morpher.pt",
rotate_module_spec=TwoAlgoFaceRotatorSpec(),
rotate_module_file_name="../data/two_algo_face_rotator.pt",
combine_module_spec=CombinerSpec(),
combine_module_file_name="../data/combiner.pt",
device=cuda)
# self.face_detector = dlib.get_frontal_face_detector()
# self.landmark_locator = dlib.shape_predictor("../data/shape_predictor_68_face_landmarks.dat")
self.head_pose_solver = HeadPoseSolver()
self.pose_size = len(self.poser.pose_parameters())
def run(self,source_img,frame,save_path):
# rgb_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
# faces = self.face_detector(rgb_frame)
# euler_angles = None
# face_landmarks = None
# if len(faces) > 0:
# face_rect = faces[0]
# face_landmarks = self.landmark_locator(rgb_frame, face_rect)
# face_box_points, euler_angles = self.head_pose_solver.solve_head_pose(face_landmarks)
# # np.save("euler_angle.npy",euler_angles)
# # euler_angles = np.load("euler_angle.npy")
#
# if euler_angles is not None and source_img is not None:
# self.current_pose = torch.zeros(self.pose_size, device=self.torch_device)
# self.current_pose[0] = max(min(-euler_angles.item(0) / 15.0, 1.0), -1.0)
# self.current_pose[1] = max(min(-euler_angles.item(1) / 15.0, 1.0), -1.0)
# self.current_pose[2] = max(min(euler_angles.item(2) / 15.0, 1.0), -1.0)
#
# # if self.last_pose is None:
# # self.last_pose = self.current_pose
# # else:
# # self.current_pose = self.current_pose * 0.5 + self.last_pose * 0.5 # smoothing
# # self.last_pose = self.current_pose
#
# eye_min_ratio = 0.15
# eye_max_ratio = 0.25
# left_eye_normalized_ratio = compute_left_eye_normalized_ratio(face_landmarks, eye_min_ratio, eye_max_ratio)
# self.current_pose[3] = 1 - left_eye_normalized_ratio
# right_eye_normalized_ratio = compute_right_eye_normalized_ratio(face_landmarks,
# eye_min_ratio,
# eye_max_ratio)
# self.current_pose[4] = 1 - right_eye_normalized_ratio
#
# min_mouth_ratio = 0.02
# max_mouth_ratio = 0.3
# mouth_normalized_ratio = compute_mouth_normalized_ratio(face_landmarks, min_mouth_ratio, max_mouth_ratio)
# self.current_pose[5] = mouth_normalized_ratio
# self.current_pose = self.current_pose.unsqueeze(dim=0)
# np.save("current_pose.npy",self.current_pose.cpu())
self.current_pose = torch.Tensor(np.load("current_pose.npy")).cuda()
st = time.time()
posed_image = self.poser.pose(source_image=source_img, pose=self.current_pose).detach().cpu()
print("Core Time(poser.pose): ", time.time()-st)
numpy_image = rgba_to_numpy_image(posed_image[0])
pil_image = PIL.Image.fromarray(np.uint8(np.rint(numpy_image * 255.0)), mode='RGBA')
pil_image.save(save_path)
# TODO Core of demo, compress this
return
if __name__ == '__main__':
demo = Puppet_Core()
img_file = r"E:\work\pycharm_v2\talking-head-anime-landing\data/illust/waifu_00_256.png"
source_img= extract_pytorch_image_from_filelike(img_file).to(cuda).unsqueeze(dim=0)
save_file = "../save_img.png"
frame = cv2.imread(r"E:\work\pycharm_v2\talking-head-anime-landing\my.png")
start_time = time.time()
demo.run(source_img,frame,save_file)
print("Total Run Time: ",time.time()-start_time)
# import torchvision.models as models
#
# resnext50_32x4d = models.resnext50_32x4d(pretrained=True)
# import torch
# BATCH_SIZE = 64
# dummy_input = torch.randn(BATCH_SIZE, 3, 224, 224)
# torch.onnx.export(resnext50_32x4d, dummy_input, "resnet50_onnx_model.onnx", verbose=False)
|
StarcoderdataPython
|
1803723
|
<reponame>evenh/azure-storage-azcopy
import json
import os
import shutil
import time
import urllib
from collections import namedtuple
import utility as util
import unittest
import filecmp
import os.path
class Service_2_Service_Copy_User_Scenario(unittest.TestCase):
def setUp(self):
# init bucket_name
common_prefix = 's2scopybucket'
# using different bucket_name to help to troubleshoot testing when checking real buckets
self.bucket_name = util.get_resource_name(common_prefix + 'blobblob')
self.bucket_name_blob_file = util.get_resource_name(common_prefix + 'blobfile')
self.bucket_name_file_blob = util.get_resource_name(common_prefix + 'fileblob')
self.bucket_name_s3_blob = util.get_resource_name(common_prefix + 's3blob')
self.bucket_name_block_append_page = util.get_resource_name(common_prefix + 'blockappendpage')
##################################
# Test from blob to blob copy.
##################################
def test_copy_single_1kb_file_from_blob_to_blob(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_single_file_from_x_to_x(src_container_url, "Blob", dst_container_url, "Blob", 1)
def test_copy_single_1kb_file_from_blob_to_blob_with_auth_env_var(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_single_file_from_x_to_x(src_container_url, "Blob", dst_container_url, "Blob", 1,
oAuth=True, credTypeOverride="OAuthToken")
def test_copy_single_512b_file_from_page_to_block_blob(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_single_file_from_x_to_x(src_container_url, "Blob", dst_container_url, "Blob", 512,
srcBlobType="PageBlob", dstBlobType="BlockBlob")
def test_copy_single_512b_file_from_block_to_page_blob(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_single_file_from_x_to_x(src_container_url, "Blob", dst_container_url, "Blob", 512,
srcBlobType="BlockBlob", dstBlobType="PageBlob")
def test_copy_single_512b_file_from_page_to_append_blob(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_single_file_from_x_to_x(src_container_url, "Blob", dst_container_url, "Blob", 512,
srcBlobType="PageBlob", dstBlobType="AppendBlob")
def test_copy_single_512b_file_from_append_to_page_blob(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_single_file_from_x_to_x(src_container_url, "Blob", dst_container_url, "Blob", 512,
srcBlobType="AppendBlob", dstBlobType="PageBlob")
def test_copy_single_512b_file_from_block_to_append_blob(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_single_file_from_x_to_x(src_container_url, "Blob", dst_container_url, "Blob", 512,
srcBlobType="BlockBlob", dstBlobType="AppendBlob")
def test_copy_single_512b_file_from_append_to_block_blob(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_single_file_from_x_to_x(src_container_url, "Blob", dst_container_url, "Blob", 512,
srcBlobType="AppendBlob", dstBlobType="BlockBlob")
def test_copy_single_0kb_file_from_blob_to_blob(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_single_file_from_x_to_x(src_container_url, "Blob", dst_container_url, "Blob", 0)
def test_copy_single_63mb_file_from_blob_to_blob(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_single_file_from_x_to_x(src_container_url, "Blob", dst_container_url, "Blob", 63 * 1024 * 1024)
def test_copy_10_files_from_blob_container_to_blob_container(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_n_files_from_x_bucket_to_x_bucket(src_container_url, "Blob", dst_container_url, "Blob")
def test_copy_file_from_blob_container_to_blob_container_strip_top_dir_recursive(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_file_from_x_bucket_to_x_bucket_strip_top_dir(src_container_url, "Blob", dst_container_url, "Blob", True)
def test_copy_file_from_blob_container_to_blob_container_strip_top_dir_non_recursive(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_file_from_x_bucket_to_x_bucket_strip_top_dir(src_container_url, "Blob", dst_container_url, "Blob", False)
def test_copy_n_files_from_blob_dir_to_blob_dir(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_n_files_from_x_dir_to_x_dir(src_container_url, "Blob", dst_container_url, "Blob")
def test_copy_n_files_from_blob_dir_to_blob_dir_strip_top_dir_recursive(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_n_files_from_x_dir_to_x_dir_strip_top_dir(src_container_url, "Blob", dst_container_url, "Blob", True)
def test_copy_n_files_from_blob_dir_to_blob_dir_strip_top_dir_non_recursive(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_n_files_from_x_dir_to_x_dir_strip_top_dir(src_container_url, "Blob", dst_container_url, "Blob", False)
def test_copy_files_from_blob_account_to_blob_account(self):
self.util_test_copy_files_from_x_account_to_x_account(
util.test_s2s_src_blob_account_url,
"Blob",
util.test_s2s_dst_blob_account_url,
"Blob",
self.bucket_name)
def test_copy_single_file_from_blob_to_blob_propertyandmetadata(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_single_file_from_x_to_x_propertyandmetadata(
src_container_url,
"Blob",
dst_container_url,
"Blob")
def test_copy_file_from_blob_container_to_blob_container_propertyandmetadata(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_copy_file_from_x_bucket_to_x_bucket_propertyandmetadata(
src_container_url,
"Blob",
dst_container_url,
"Blob")
def test_overwrite_copy_single_file_from_blob_to_blob(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_overwrite_copy_single_file_from_x_to_x(
src_container_url,
"Blob",
dst_container_url,
"Blob",
False,
True)
def test_non_overwrite_copy_single_file_from_blob_to_blob(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name)
self.util_test_overwrite_copy_single_file_from_x_to_x(
src_container_url,
"Blob",
dst_container_url,
"Blob",
False,
False)
# Test oauth support for service to service copy, where source is authenticated with SAS
# and destination is authenticated with OAuth token.
def test_copy_single_17mb_file_from_blob_to_blob_oauth(self):
src_container_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name)
# URL on next line was test_s2s_dst_blob_account_url, but for now its changed to
# be the main OAuth one, to simplify OAuth setup
dst_container_url = util.get_object_without_sas(util.test_oauth_container_url, self.bucket_name)
self.util_test_copy_single_file_from_x_to_x(src_container_url, "Blob", dst_container_url, "Blob", 17 * 1024 * 1024, True)
##################################
# Test from blob to file copy
# Note: tests go from dst blob to src file to avoid the extra config-- Ze's suggestion
##################################
def test_copy_single_1kb_file_from_blob_to_file(self):
src_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_blob_file)
dst_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_blob_file)
self.util_test_copy_single_file_from_x_to_x(src_container_url, "Blob", dst_share_url, "File", 1)
def test_copy_10_files_from_blob_container_to_file_share(self):
src_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_blob_file)
dst_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_blob_file)
self.util_test_copy_n_files_from_x_bucket_to_x_bucket(src_container_url, "Blob", dst_share_url, "File", 10, 1)
def test_copy_file_from_blob_to_file_properties_and_metadata(self):
src_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_blob_file)
dst_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_blob_file)
self.util_test_copy_single_file_from_x_to_x_propertyandmetadata(src_container_url, "Blob", dst_share_url, "File", True)
# not testing implicit container creation (w/out a container name in the dst) as that's tested by the FE tests
##################################
# Test from file to blob copy.
##################################
def test_copy_single_1kb_file_from_file_to_blob(self):
src_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_file_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_file_blob)
self.util_test_copy_single_file_from_x_to_x(src_share_url, "File", dst_container_url, "Blob", 1)
def test_copy_single_0kb_file_from_file_to_blob(self):
src_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_file_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_file_blob)
self.util_test_copy_single_file_from_x_to_x(src_share_url, "File", dst_container_url, "Blob", 0)
def test_copy_single_63mb_file_from_file_to_blob(self):
src_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_file_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_file_blob)
self.util_test_copy_single_file_from_x_to_x(src_share_url, "File", dst_container_url, "Blob", 63 * 1024 * 1024)
def test_copy_10_files_from_file_share_to_blob_container(self):
src_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_file_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_file_blob)
self.util_test_copy_n_files_from_x_bucket_to_x_bucket(src_share_url, "File", dst_container_url, "Blob")
def test_copy_file_from_file_share_to_blob_container_strip_top_dir_recursive(self):
src_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_file_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_file_blob)
self.util_test_copy_file_from_x_bucket_to_x_bucket_strip_top_dir(src_share_url, "File", dst_container_url, "Blob", True)
def test_copy_file_from_file_share_to_blob_container_strip_top_dir_non_recursive(self):
src_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_file_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_file_blob)
self.util_test_copy_file_from_x_bucket_to_x_bucket_strip_top_dir(src_share_url, "File", dst_container_url, "Blob", False)
def test_copy_n_files_from_file_dir_to_blob_dir(self):
src_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_file_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_file_blob)
self.util_test_copy_n_files_from_x_dir_to_x_dir(src_share_url, "File", dst_container_url, "Blob")
def test_copy_n_files_from_file_dir_to_blob_dir_strip_top_dir_recursive(self):
src_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_file_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_file_blob)
self.util_test_copy_n_files_from_x_dir_to_x_dir_strip_top_dir(src_share_url, "File", dst_container_url, "Blob", True)
def test_copy_n_files_from_file_dir_to_blob_dir_strip_top_dir_non_recursive(self):
src_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_file_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_file_blob)
self.util_test_copy_n_files_from_x_dir_to_x_dir_strip_top_dir(src_share_url, "File", dst_container_url, "Blob", False)
def test_copy_files_from_file_account_to_blob_account(self):
self.util_test_copy_files_from_x_account_to_x_account(
util.test_s2s_src_file_account_url,
"File",
util.test_s2s_dst_blob_account_url,
"Blob",
self.bucket_name_file_blob)
def test_copy_single_file_from_file_to_blob_propertyandmetadata(self):
src_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_file_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_file_blob)
self.util_test_copy_single_file_from_x_to_x_propertyandmetadata(
src_share_url,
"File",
dst_container_url,
"Blob")
def test_copy_file_from_file_share_to_blob_container_propertyandmetadata(self):
src_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_file_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_file_blob)
self.util_test_copy_file_from_x_bucket_to_x_bucket_propertyandmetadata(
src_share_url,
"File",
dst_container_url,
"Blob")
def test_copy_file_from_file_share_to_blob_container_no_preserve_propertyandmetadata(self):
src_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_file_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_file_blob)
self.util_test_copy_file_from_x_bucket_to_x_bucket_propertyandmetadata(
src_share_url,
"File",
dst_container_url,
"Blob",
False)
def test_overwrite_copy_single_file_from_file_to_blob(self):
src_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_file_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_file_blob)
self.util_test_overwrite_copy_single_file_from_x_to_x(
src_share_url,
"File",
dst_container_url,
"Blob",
False,
True)
def test_non_overwrite_copy_single_file_from_file_to_blob(self):
src_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_file_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_file_blob)
self.util_test_overwrite_copy_single_file_from_x_to_x(
src_share_url,
"File",
dst_container_url,
"Blob",
False,
False)
# Test oauth support for service to service copy, where source is authenticated with SAS
# and destination is authenticated with OAuth token.
@unittest.skip("coverd by blob to blob")
def test_copy_single_17mb_file_from_file_to_blob_oauth(self):
src_share_url = util.get_object_sas(util.test_s2s_src_file_account_url, self.bucket_name_file_blob)
dst_container_url = util.get_object_without_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_file_blob)
self.util_test_copy_single_file_from_x_to_x(src_share_url, "File", dst_container_url, "Blob", 17 * 1024 * 1024, True)
##################################
# Test from S3 to blob copy.
##################################
def test_copy_single_1kb_file_from_s3_to_blob(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_copy_single_file_from_x_to_x(src_bucket_url, "S3", dst_container_url, "Blob", 1)
def test_copy_single_0kb_file_from_s3_to_blob(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_copy_single_file_from_x_to_x(src_bucket_url, "S3", dst_container_url, "Blob", 0)
def test_copy_single_63mb_file_from_s3_to_blob(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_copy_single_file_from_x_to_x(src_bucket_url, "S3", dst_container_url, "Blob", 63 * 1024 * 1024)
def test_copy_10_files_from_s3_bucket_to_blob_container(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_copy_n_files_from_x_bucket_to_x_bucket(src_bucket_url, "S3", dst_container_url, "Blob")
def test_copy_10_files_from_s3_bucket_to_blob_account(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
self.util_test_copy_n_files_from_s3_bucket_to_blob_account(src_bucket_url, util.test_s2s_dst_blob_account_url)
def test_copy_file_from_s3_bucket_to_blob_container_strip_top_dir_recursive(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_copy_file_from_x_bucket_to_x_bucket_strip_top_dir(src_bucket_url, "S3", dst_container_url, "Blob", True)
def test_copy_file_from_s3_bucket_to_blob_container_strip_top_dir_non_recursive(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_copy_file_from_x_bucket_to_x_bucket_strip_top_dir(src_bucket_url, "S3", dst_container_url, "Blob", False)
def test_copy_n_files_from_s3_dir_to_blob_dir(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_copy_n_files_from_x_dir_to_x_dir(src_bucket_url, "S3", dst_container_url, "Blob")
def test_copy_n_files_from_s3_dir_to_blob_dir_strip_top_dir_recursive(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_copy_n_files_from_x_dir_to_x_dir_strip_top_dir(src_bucket_url, "S3", dst_container_url, "Blob", True)
def test_copy_n_files_from_s3_dir_to_blob_dir_strip_top_dir_non_recursive(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_copy_n_files_from_x_dir_to_x_dir_strip_top_dir(src_bucket_url, "S3", dst_container_url, "Blob", False)
def test_copy_files_from_s3_service_to_blob_account(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
self.util_test_copy_files_from_x_account_to_x_account(
util.test_s2s_src_s3_service_url,
"S3",
util.test_s2s_dst_blob_account_url,
"Blob",
self.bucket_name_s3_blob)
def test_copy_single_file_from_s3_to_blob_propertyandmetadata(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_copy_single_file_from_x_to_x_propertyandmetadata(
src_bucket_url,
"S3",
dst_container_url,
"Blob")
def test_copy_single_file_from_s3_to_blob_no_preserve_propertyandmetadata(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_copy_single_file_from_x_to_x_propertyandmetadata(
src_bucket_url,
"S3",
dst_container_url,
"Blob",
False)
def test_copy_file_from_s3_bucket_to_blob_container_propertyandmetadata(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_copy_file_from_x_bucket_to_x_bucket_propertyandmetadata(
src_bucket_url,
"S3",
dst_container_url,
"Blob")
def test_copy_file_from_s3_bucket_to_blob_container_no_preserve_propertyandmetadata(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_copy_file_from_x_bucket_to_x_bucket_propertyandmetadata(
src_bucket_url,
"S3",
dst_container_url,
"Blob",
False)
def test_overwrite_copy_single_file_from_s3_to_blob(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_overwrite_copy_single_file_from_x_to_x(
src_bucket_url,
"S3",
dst_container_url,
"Blob",
False,
True)
def test_non_overwrite_copy_single_file_from_s3_to_blob(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_overwrite_copy_single_file_from_x_to_x(
src_bucket_url,
"S3",
dst_container_url,
"Blob",
False,
False)
def test_copy_single_file_from_s3_to_blob_with_url_encoded_slash_as_filename(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_copy_single_file_from_x_to_x(
src_bucket_url,
"S3",
dst_container_url,
"Blob",
1,
False,
"%252F") #encoded name for %2F, as path will be decoded
def test_copy_single_file_from_s3_to_blob_excludeinvalidmetadata(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
self.util_test_copy_single_file_from_s3_to_blob_handleinvalidmetadata(
"", # By default it should be ExcludeIfInvalid
"1abc=jiac;$%^=width;description=test file",
"description=test file"
)
def test_copy_single_file_from_s3_to_blob_renameinvalidmetadata(self):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
self.util_test_copy_single_file_from_s3_to_blob_handleinvalidmetadata(
"RenameIfInvalid", # By default it should be ExcludeIfInvalid
"1abc=jiac;$%^=width;description=test file",
"rename_1abc=jiac;rename_key_1abc=1abc;description=test file;rename____=width;rename_key____=$%^"
)
# Test invalid metadata handling
def util_test_copy_single_file_from_s3_to_blob_handleinvalidmetadata(
self,
invalidMetadataHandleOption,
srcS3Metadata,
expectResolvedMetadata):
if 'S3_TESTS_OFF' in os.environ and os.environ['S3_TESTS_OFF'] != "":
self.skipTest('S3 testing is disabled for this smoke test run.')
srcBucketURL = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dstBucketURL = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
srcType = "S3"
# create bucket and create file with metadata and properties
result = util.Command("create").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \
add_flags("resourceType", "Bucket").execute_azcopy_create()
self.assertTrue(result)
fileName = "test_copy_single_file_from_s3_to_blob_handleinvalidmetadata_%s" % invalidMetadataHandleOption
srcFileURL = util.get_object_without_sas(srcBucketURL, fileName)
dstFileURL = util.get_object_sas(dstBucketURL, fileName)
result = util.Command("create").add_arguments(srcFileURL).add_flags("serviceType", srcType). \
add_flags("resourceType", "SingleFile"). \
add_flags("metadata", srcS3Metadata). \
execute_azcopy_create()
self.assertTrue(result)
# Copy file using azcopy from srcURL to destURL
cpCmd = util.Command("copy").add_arguments(srcFileURL).add_arguments(dstFileURL). \
add_flags("log-level", "info")
if invalidMetadataHandleOption == "" or invalidMetadataHandleOption == "ExcludeIfInvalid":
cpCmd.add_flags("s2s-handle-invalid-metadata", "ExcludeIfInvalid")
if invalidMetadataHandleOption == "FailIfInvalid":
cpCmd.add_flags("s2s-handle-invalid-metadata", "FailIfInvalid")
if invalidMetadataHandleOption == "RenameIfInvalid":
cpCmd.add_flags("s2s-handle-invalid-metadata", "RenameIfInvalid")
result = cpCmd.execute_azcopy_copy_command()
self.assertTrue(result)
# Downloading the copied file for validation
validate_dir_name = "validate_copy_single_file_from_s3_to_blob_handleinvalidmetadata_%s" % invalidMetadataHandleOption
local_validate_dest_dir = util.create_test_dir(validate_dir_name)
local_validate_dest = local_validate_dest_dir + fileName
result = util.Command("copy").add_arguments(dstFileURL).add_arguments(local_validate_dest). \
add_flags("log-level", "info").execute_azcopy_copy_command()
self.assertTrue(result)
validateCmd = util.Command("testBlob").add_arguments(local_validate_dest).add_arguments(dstFileURL).add_flags("no-guess-mime-type", "true"). \
add_flags("metadata", expectResolvedMetadata)
result = validateCmd.execute_azcopy_verify()
self.assertTrue(result)
# Test oauth support for service to service copy, where source is authenticated with access key for S3
# and destination is authenticated with OAuth token.
@unittest.skip("coverd by blob to blob")
def test_copy_single_17mb_file_from_s3_to_blob_oauth(self):
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_s3_blob)
dst_container_url = util.get_object_without_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_s3_blob)
self.util_test_copy_single_file_from_x_to_x(src_bucket_url, "S3", dst_container_url, "Blob", 17 * 1024 * 1024, True)
##################################
# Test scenarios related to blob type and blob tier.
##################################
def test_copy_single_file_from_blockblob_to_blockblob_with_blobtier_from_source(self):
src_bucket_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name_block_append_page)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_block_append_page)
blob_sizes = [0, 1, 8*1024*1024 - 1, 8 * 1024*1024]
for size in blob_sizes:
self.util_test_copy_single_file_from_x_to_blob_with_blobtype_blobtier(
src_bucket_url, "Blob", dst_container_url, "Blob", size)
self.util_test_copy_single_file_from_x_to_blob_with_blobtype_blobtier(
src_bucket_url, "Blob", dst_container_url, "Blob", 8*1024*1024+1, "BlockBlob", "Cool", "", "", "BlockBlob", "Cool")
def test_copy_single_file_from_blockblob_to_blockblob_with_no_preserve_blobtier(self):
src_bucket_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name_block_append_page)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_block_append_page)
self.util_test_copy_single_file_from_x_to_blob_with_blobtype_blobtier(
src_bucket_url, "Blob", dst_container_url, "Blob", 4*1024*1024+1, "BlockBlob", "Cool", "", "", "BlockBlob", "Hot", False)
def test_copy_single_file_from_pageblob_to_pageblob_with_blobtier_from_source(self):
src_bucket_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name_block_append_page)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_block_append_page)
blob_sizes = [0, 512, 1024, 4*1024*1024]
no_blob_tier = "" # don't validate tier for page blobs
for size in blob_sizes:
self.util_test_copy_single_file_from_x_to_blob_with_blobtype_blobtier(
src_bucket_url, "Blob", dst_container_url, "Blob", size, "PageBlob", "", "", "", "PageBlob", no_blob_tier)
def test_copy_single_file_from_appendblob_to_appendblob_from_source(self):
src_bucket_url = util.get_object_sas(util.test_s2s_src_blob_account_url, self.bucket_name_block_append_page)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_block_append_page)
blob_sizes = [0, 1, 8*1024*1024 - 1, 8 * 1024*1024, 8*1024*1024+1]
no_blob_tier = "" # blob-level tiering is not available for append blobs
for size in blob_sizes:
self.util_test_copy_single_file_from_x_to_blob_with_blobtype_blobtier(
src_bucket_url, "Blob", dst_container_url, "Blob", size, "AppendBlob", "", "", "", "AppendBlob", no_blob_tier)
def test_copy_single_file_from_s3_object_to_blockblob_with_default_blobtier(self):
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_block_append_page)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_block_append_page)
blob_sizes = [0, 1, 8*1024*1024 - 1, 8 * 1024*1024]
for size in blob_sizes:
self.util_test_copy_single_file_from_x_to_blob_with_blobtype_blobtier(
src_bucket_url, "S3", dst_container_url, "Blob", size)
@unittest.skip("override blob tier not enabled")
def test_copy_single_file_from_s3_object_to_blockblob_with_specified_blobtier(self):
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_block_append_page)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_block_append_page)
self.util_test_copy_single_file_from_x_to_blob_with_blobtype_blobtier(
src_bucket_url, "S3", dst_container_url, "Blob", 8*1024*1024+1, "", "", "BlockBlob", "Cool", "BlockBlob", "Cool")
@unittest.skip("override blob type not enabled")
def test_copy_single_file_from_s3_object_to_appendblob_from_source(self):
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_block_append_page)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_block_append_page)
blob_sizes = [0, 1, 8*1024*1024 - 1, 8 * 1024*1024, 8*1024*1024+1]
for size in blob_sizes:
self.util_test_copy_single_file_from_x_to_blob_with_blobtype_blobtier(
src_bucket_url, "S3", dst_container_url, "Blob", size, "", "", "AppendBlob", "", "AppendBlob")
@unittest.skip("override blob type not enabled")
def test_copy_single_file_from_s3_object_to_pageblob_with_blobtier_from_source(self):
src_bucket_url = util.get_object_without_sas(util.test_s2s_src_s3_service_url, self.bucket_name_block_append_page)
dst_container_url = util.get_object_sas(util.test_s2s_dst_blob_account_url, self.bucket_name_block_append_page)
blob_sizes = [0, 512, 1024, 8*1024*1024]
for size in blob_sizes:
self.util_test_copy_single_file_from_x_to_blob_with_blobtype_blobtier(
src_bucket_url, "S3", dst_container_url, "Blob", size, "", "", "PageBlob", "", "PageBlob")
##################################
# Test utils and reusable functions.
##################################
# common testing utils for service to service copy.
def util_are_dir_trees_equal(self, dir1, dir2):
dirs_cmp = filecmp.dircmp(dir1, dir2)
if len(dirs_cmp.left_only)>0 or len(dirs_cmp.right_only)>0 or \
len(dirs_cmp.funny_files)>0:
return False
(_, mismatch, errors) = filecmp.cmpfiles(
dir1, dir2, dirs_cmp.common_files, shallow=False)
if len(mismatch)>0 or len(errors)>0:
return False
for common_dir in dirs_cmp.common_dirs:
new_dir1 = os.path.join(dir1, common_dir)
new_dir2 = os.path.join(dir2, common_dir)
if not self.util_are_dir_trees_equal(new_dir1, new_dir2):
return False
return True
def util_upload_to_src(
self,
localFilePath,
srcType,
srcURLForCopy,
recursive=False,
blobType="",
blobTier=""):
if srcType == "S3":
cmd = util.Command("upload").add_arguments(localFilePath).add_arguments(srcURLForCopy)
else:
cmd = util.Command("copy").add_arguments(localFilePath).add_arguments(srcURLForCopy).add_flags("log-level", "info")
if blobType != "" :
cmd.add_flags("blob-type", blobType)
if blobType == "PageBlob" and blobTier != "" :
cmd.add_flags("page-blob-tier", blobTier)
if blobType == "BlockBlob" and blobTier != "" :
cmd.add_flags("block-blob-tier", blobTier)
if recursive:
cmd.add_flags("recursive", "true")
if srcType == "S3":
result = cmd.execute_testsuite_upload()
else:
result = cmd.execute_azcopy_copy_command()
self.assertTrue(result)
def util_test_copy_single_file_from_x_to_x(
self,
srcBucketURL,
srcType,
dstBucketURL,
dstType,
sizeInKB=1,
oAuth=False,
customizedFileName="",
srcBlobType="",
dstBlobType="",
credTypeOverride=""):
# create source bucket
result = util.Command("create").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \
add_flags("resourceType", "Bucket").execute_azcopy_create()
self.assertTrue(result)
# create file of size 1KB.
if customizedFileName != "":
filename = customizedFileName
else:
filename = "test_" + str(sizeInKB) + "kb_copy.txt"
file_path = util.create_test_file(filename, sizeInKB)
if srcType == "S3":
srcFileURL = util.get_object_without_sas(srcBucketURL, filename)
else:
srcFileURL = util.get_object_sas(srcBucketURL, filename)
if oAuth:
dstFileURL = util.get_object_without_sas(dstBucketURL, filename)
else:
dstFileURL = util.get_object_sas(dstBucketURL, filename)
# Upload file.
self.util_upload_to_src(file_path, srcType, srcFileURL, blobType=srcBlobType)
if credTypeOverride != "":
os.environ["AZCOPY_CRED_TYPE"] = credTypeOverride
# Copy file using azcopy from srcURL to destURL
result = util.Command("copy").add_arguments(srcFileURL).add_arguments(dstFileURL). \
add_flags("log-level", "info")
if dstBlobType != "":
result = result.add_flags("blob-type", dstBlobType)
r = result.execute_azcopy_copy_command() # nice "dynamic typing"
self.assertTrue(r)
if credTypeOverride != "":
os.environ["AZCOPY_CRED_TYPE"] = ""
# Downloading the copied file for validation
validate_dir_name = "validate_copy_single_%dKB_file_from_%s_to_%s_%s" % (sizeInKB, srcType, dstType, customizedFileName)
local_validate_dest_dir = util.create_test_dir(validate_dir_name)
local_validate_dest = os.path.join(local_validate_dest_dir, filename)
result = util.Command("copy").add_arguments(dstFileURL).add_arguments(local_validate_dest). \
add_flags("log-level", "info").execute_azcopy_copy_command()
self.assertTrue(result)
# Verifying the downloaded blob
result = filecmp.cmp(file_path, local_validate_dest, shallow=False)
self.assertTrue(result)
# clean up both source and destination bucket
# util.Command("clean").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \
# add_flags("resourceType", "Bucket").execute_azcopy_create()
# util.Command("clean").add_arguments(dstBucketURL).add_flags("serviceType", dstType). \
# add_flags("resourceType", "Bucket").execute_azcopy_create()
def util_test_copy_n_files_from_x_bucket_to_x_bucket(
self,
srcBucketURL,
srcType,
dstBucketURL,
dstType,
n=10,
sizeInKB=1):
# create source bucket
result = util.Command("create").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \
add_flags("resourceType", "Bucket").execute_azcopy_create()
self.assertTrue(result)
# create file of size n KBs in newly created directory.
src_dir_name = "copy_%d_%dKB_files_from_%s_bucket_to_%s_bucket" % (n, sizeInKB, srcType, dstType)
src_dir_path = util.create_test_n_files(sizeInKB*1024, n, src_dir_name)
# Upload file.
self.util_upload_to_src(src_dir_path, srcType, srcBucketURL, True)
# Copy files using azcopy from srcURL to destURL
result = util.Command("copy").add_arguments(srcBucketURL).add_arguments(dstBucketURL). \
add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
self.assertTrue(result)
# Downloading the copied files for validation
validate_dir_name = "validate_copy_%d_%dKB_files_from_%s_bucket_to_%s_bucket" % (n, sizeInKB, srcType, dstType)
local_validate_dest = util.create_test_dir(validate_dir_name)
dst_directory_url = util.get_object_sas(dstBucketURL, src_dir_name)
result = util.Command("copy").add_arguments(dst_directory_url).add_arguments(local_validate_dest). \
add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
self.assertTrue(result)
# Verifying the downloaded blob
result = self.util_are_dir_trees_equal(src_dir_path, os.path.join(local_validate_dest, src_dir_name))
self.assertTrue(result)
# clean up both source and destination bucket
# util.Command("clean").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \
# add_flags("resourceType", "Bucket").execute_azcopy_create()
# util.Command("clean").add_arguments(dstBucketURL).add_flags("serviceType", dstType). \
# add_flags("resourceType", "Bucket").execute_azcopy_create()
def util_test_copy_file_from_x_bucket_to_x_bucket_strip_top_dir(
self,
srcBucketURL,
srcType,
dstBucketURL,
dstType,
recursive=True):
# create source bucket
result = util.Command("create").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \
add_flags("resourceType", "Bucket").execute_azcopy_create()
self.assertTrue(result)
# create file.
filename = "copy_strip_top_dir_file.txt"
file_path = util.create_test_file(filename, 1)
if srcType == "S3":
srcFileURL = util.get_object_without_sas(srcBucketURL, filename)
else:
srcFileURL = util.get_object_sas(srcBucketURL, filename)
src_dir_url = srcFileURL.replace(filename, "*")
# Upload file.
self.util_upload_to_src(file_path, srcType, srcFileURL, False)
# Copy file using azcopy from srcURL to destURL
if recursive:
result = util.Command("copy").add_arguments(src_dir_url).add_arguments(dstBucketURL). \
add_flags("log-level", "info").add_flags("recursive", "true"). \
execute_azcopy_copy_command()
else:
result = util.Command("copy").add_arguments(src_dir_url).add_arguments(dstBucketURL). \
add_flags("log-level", "info").execute_azcopy_copy_command()
self.assertTrue(result)
# Downloading the copied files for validation
validate_dir_name = "validate_copy_file_from_%s_bucket_to_%s_bucket_strip_top_dir_recursive_%s" % (srcType, dstType, recursive)
local_validate_dest = util.create_test_dir(validate_dir_name)
dst_file_url = util.get_object_sas(dstBucketURL, filename)
result = util.Command("copy").add_arguments(dst_file_url).add_arguments(local_validate_dest). \
add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
self.assertTrue(result)
# Verifying the downloaded file
result = filecmp.cmp(file_path, os.path.join(local_validate_dest, filename), shallow=False)
self.assertTrue(result)
# clean up both source and destination bucket
# util.Command("clean").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \
# add_flags("resourceType", "Bucket").execute_azcopy_create()
# util.Command("clean").add_arguments(dstBucketURL).add_flags("serviceType", dstType). \
# add_flags("resourceType", "Bucket").execute_azcopy_create()
# TODO: ensure this scenario, when copy from directory to directory, src directory will be created in dest directory
# this is similar for blob download/upload.
def util_test_copy_n_files_from_x_dir_to_x_dir(self,
srcBucketURL,
srcType,
dstBucketURL,
dstType,
n=10,
sizeInKB=1):
# create source bucketa
result = util.Command("create").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \
add_flags("resourceType", "Bucket").execute_azcopy_create()
self.assertTrue(result)
# create file of size n KBs in newly created directory.
src_dir_name = "copy_%d_%dKB_files_from_%s_dir_to_%s_dir" % (n, sizeInKB, srcType, dstType)
src_dir_path = util.create_test_n_files(sizeInKB*1024, n, src_dir_name)
# Upload file.
self.util_upload_to_src(src_dir_path, srcType, srcBucketURL, True)
if srcType == "S3":
srcDirURL = util.get_object_without_sas(srcBucketURL, src_dir_name)
else:
srcDirURL = util.get_object_sas(srcBucketURL, src_dir_name)
dstDirURL = util.get_object_sas(dstBucketURL, src_dir_name)
# Copy files using azcopy from srcURL to destURL
result = util.Command("copy").add_arguments(srcDirURL).add_arguments(dstDirURL). \
add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
self.assertTrue(result)
# Downloading the copied files for validation
validate_dir_name = "validate_copy_%d_%dKB_files_from_%s_dir_to_%s_dir" % (n, sizeInKB, srcType, dstType)
local_validate_dest = util.create_test_dir(validate_dir_name)
result = util.Command("copy").add_arguments(dstDirURL).add_arguments(local_validate_dest). \
add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
self.assertTrue(result)
# Verifying the downloaded blob
# here is the special behavior need confirm
print(src_dir_path)
print(os.path.join(local_validate_dest, src_dir_name, src_dir_name))
result = self.util_are_dir_trees_equal(src_dir_path, os.path.join(local_validate_dest, src_dir_name, src_dir_name))
#result = self.util_are_dir_trees_equal(src_dir_path, local_validate_dest)
self.assertTrue(result)
# clean up both source and destination bucket
# util.Command("clean").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \
# add_flags("resourceType", "Bucket").execute_azcopy_create()
# util.Command("clean").add_arguments(dstBucketURL).add_flags("serviceType", dstType). \
# add_flags("resourceType", "Bucket").execute_azcopy_create()
def util_test_copy_n_files_from_x_dir_to_x_dir_strip_top_dir(self,
srcBucketURL,
srcType,
dstBucketURL,
dstType,
n=10,
sizeInKB=1,
recursive=True):
# create source bucket
result = util.Command("create").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \
add_flags("resourceType", "Bucket").execute_azcopy_create()
self.assertTrue(result)
# create file of size n KBs in newly created directory.
src_dir_name = "copy_%d_%dKB_files_from_%s_dir_to_%s_dir_recursive_%s" % (n, sizeInKB, srcType, dstType, recursive)
src_dir_path = util.create_test_n_files(sizeInKB*1024, n, src_dir_name)
src_sub_dir_name = src_dir_name + "/" + "subdir"
util.create_test_n_files(sizeInKB*1024,1, src_sub_dir_name)
# Upload file.
self.util_upload_to_src(src_dir_path, srcType, srcBucketURL, True)
if srcType == "S3":
src_dir_url = util.get_object_without_sas(srcBucketURL, src_dir_name + "/*")
else:
src_dir_url = util.get_object_sas(srcBucketURL, src_dir_name + "/*")
dstDirURL = util.get_object_sas(dstBucketURL, src_dir_name)
if recursive:
# Copy files using azcopy from srcURL to destURL
result = util.Command("copy").add_arguments(src_dir_url).add_arguments(dstDirURL). \
add_flags("log-level", "info").add_flags("recursive", "true"). \
execute_azcopy_copy_command()
self.assertTrue(result)
else:
# Copy files using azcopy from srcURL to destURL
result = util.Command("copy").add_arguments(src_dir_url).add_arguments(dstDirURL). \
add_flags("log-level", "info").execute_azcopy_copy_command()
self.assertTrue(result)
# Downloading the copied files for validation
validate_dir_name = "validate_copy_%d_%dKB_files_from_%s_dir_to_%s_dir" % (n, sizeInKB, srcType, dstType)
local_validate_dest = util.create_test_dir(validate_dir_name)
result = util.Command("copy").add_arguments(dstDirURL).add_arguments(local_validate_dest). \
add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
self.assertTrue(result)
# Verifying the downloaded blob
# here is the special behavior need confirm
if recursive:
result = self.util_are_dir_trees_equal(src_dir_path, os.path.join(local_validate_dest, src_dir_name))
else:
dirs_cmp = filecmp.dircmp(src_dir_path, os.path.join(local_validate_dest, src_dir_name))
if len(dirs_cmp.left_only) > 0 and len(dirs_cmp.common_files) == n:
result = True
else:
result = False
#result = self.util_are_dir_trees_equal(src_dir_path, local_validate_dest)
self.assertTrue(result)
# clean up both source and destination bucket
# util.Command("clean").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \
# add_flags("resourceType", "Bucket").execute_azcopy_create()
# util.Command("clean").add_arguments(dstBucketURL).add_flags("serviceType", dstType). \
# add_flags("resourceType", "Bucket").execute_azcopy_create()
def util_test_copy_files_from_x_account_to_x_account(self,
srcAccountURL,
srcType,
dstAccountURL,
dstType,
bucketNamePrefix):
# More enumerating scenarios could be covered with integration testing.
bucketName1 = bucketNamePrefix + "1"
bucketName2 = bucketNamePrefix + "2"
if srcType == "S3":
src_bucket_url1 = util.get_object_without_sas(srcAccountURL, bucketName1)
src_bucket_url2 = util.get_object_without_sas(srcAccountURL, bucketName2)
else:
src_bucket_url1 = util.get_object_sas(srcAccountURL, bucketName1)
src_bucket_url2 = util.get_object_sas(srcAccountURL, bucketName2)
# create source bucket
createBucketResult1 = util.Command("create").add_arguments(src_bucket_url1).add_flags("serviceType", srcType). \
add_flags("resourceType", "Bucket").execute_azcopy_create()
self.assertTrue(createBucketResult1)
createBucketResult2 = util.Command("create").add_arguments(src_bucket_url2).add_flags("serviceType", srcType). \
add_flags("resourceType", "Bucket").execute_azcopy_create()
self.assertTrue(createBucketResult2)
# create files of size n KBs.
src_dir_name1 = "copy_files_from_%s_account_to_%s_account_1" % (srcType, dstType)
src_dir_path1 = util.create_test_n_files(1*1024, 100, src_dir_name1)
src_dir_name2 = "copy_files_from_%s_account_to_%s_account_2" % (srcType, dstType)
src_dir_path2 = util.create_test_n_files(1, 2, src_dir_name2)
# Upload file.
self.util_upload_to_src(src_dir_path1, srcType, src_bucket_url1, True)
self.util_upload_to_src(src_dir_path2, srcType, src_bucket_url2, True)
# Copy files using azcopy from srcURL to destURL
result = util.Command("copy").add_arguments(srcAccountURL).add_arguments(dstAccountURL). \
add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
self.assertTrue(result)
# Downloading the copied files for validation
validate_dir_name1 = "validate_copy_files_from_%s_account_to_%s_account_1" % (srcType, dstType)
local_validate_dest1 = util.create_test_dir(validate_dir_name1)
dst_container_url1 = util.get_object_sas(dstAccountURL, bucketName1)
dst_directory_url1 = util.get_object_sas(dst_container_url1, src_dir_name1)
result = util.Command("copy").add_arguments(dst_directory_url1).add_arguments(local_validate_dest1). \
add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
self.assertTrue(result)
# Verifying the downloaded blob
result = self.util_are_dir_trees_equal(src_dir_path1, os.path.join(local_validate_dest1, src_dir_name1))
self.assertTrue(result)
validate_dir_name2 = "validate_copy_files_from_%s_account_to_%s_account_2" % (srcType, dstType)
local_validate_dest2 = util.create_test_dir(validate_dir_name2)
dst_container_url2 = util.get_object_sas(dstAccountURL, bucketName2)
dst_directory_url2 = util.get_object_sas(dst_container_url2, src_dir_name2)
result = util.Command("copy").add_arguments(dst_directory_url2).add_arguments(local_validate_dest2). \
add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
self.assertTrue(result)
# Verifying the downloaded blob
result = self.util_are_dir_trees_equal(src_dir_path2, os.path.join(local_validate_dest2, src_dir_name2))
self.assertTrue(result)
# clean up both source and destination bucket
# util.Command("clean").add_arguments(src_bucket_url).add_flags("serviceType", srcType). \
# add_flags("resourceType", "Bucket").execute_azcopy_create()
# util.Command("clean").add_arguments(validate_dst_container_url).add_flags("serviceType", dstType). \
# add_flags("resourceType", "Bucket").execute_azcopy_create()
def util_test_copy_single_file_from_x_to_x_propertyandmetadata(
self,
srcBucketURL,
srcType,
dstBucketURL,
dstType,
preserveProperties=True):
# create bucket and create file with metadata and properties
result = util.Command("create").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \
add_flags("resourceType", "Bucket").execute_azcopy_create()
self.assertTrue(result)
fileName = "single_file_propertyandmetadata_%s" % (preserveProperties)
if srcType == "S3":
srcFileURL = util.get_object_without_sas(srcBucketURL, fileName)
else:
srcFileURL = util.get_object_sas(srcBucketURL, fileName)
dstFileURL = util.get_object_sas(dstBucketURL, fileName)
result = util.Command("create").add_arguments(srcFileURL).add_flags("serviceType", srcType). \
add_flags("resourceType", "SingleFile"). \
add_flags("metadata", "author=jiac;viewport=width;description=test file"). \
add_flags("content-type", "testctype").add_flags("content-encoding", "testenc"). \
add_flags("content-disposition", "testcdis").add_flags("content-language", "testclang").\
add_flags("cache-control", "testcc").execute_azcopy_create()
self.assertTrue(result)
# Copy file using azcopy from srcURL to destURL
cpCmd = util.Command("copy").add_arguments(srcFileURL).add_arguments(dstFileURL). \
add_flags("log-level", "info")
if preserveProperties == False:
cpCmd.add_flags("s2s-preserve-properties", "false")
result = cpCmd.execute_azcopy_copy_command()
self.assertTrue(result)
# Downloading the copied file for validation
validate_dir_name = "validate_copy_single_file_from_%s_to_%s_propertyandmetadata_%s" % (srcType, dstType, preserveProperties)
local_validate_dest_dir = util.create_test_dir(validate_dir_name)
local_validate_dest = local_validate_dest_dir + fileName
if srcType == "S3":
result = util.Command("copy").add_arguments(dstFileURL).add_arguments(local_validate_dest). \
add_flags("log-level", "info").execute_azcopy_copy_command()
else:
result = util.Command("copy").add_arguments(srcFileURL).add_arguments(local_validate_dest). \
add_flags("log-level", "info").execute_azcopy_copy_command()
self.assertTrue(result)
# TODO: test different targets according to dstType
testCmdName = "testBlob" if dstType.lower() == "blob" else "testFile"
validateCmd = util.Command(testCmdName).add_arguments(local_validate_dest).add_arguments(dstFileURL).add_flags("no-guess-mime-type", "true")
if preserveProperties == True:
validateCmd.add_flags("metadata", "author=jiac;viewport=width;description=test file"). \
add_flags("content-type", "testctype").add_flags("content-encoding", "testenc"). \
add_flags("content-disposition", "testcdis").add_flags("content-language", "testclang"). \
add_flags("cache-control", "testcc")
else:
validateCmd.add_flags("metadata", ""). \
add_flags("content-type", "").add_flags("content-encoding", ""). \
add_flags("content-disposition", "").add_flags("content-language", ""). \
add_flags("cache-control", "")
# As head object doesn't return Content-MD5: https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html
# Escape Content-MD5 validation for S3
if srcType != "S3":
validateCmd.add_flags("check-content-md5", "true")
result = validateCmd.execute_azcopy_verify()
self.assertTrue(result)
def util_test_copy_file_from_x_bucket_to_x_bucket_propertyandmetadata(
self,
srcBucketURL,
srcType,
dstBucketURL,
dstType,
preserveProperties=True):
# create bucket and create file with metadata and properties
result = util.Command("create").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \
add_flags("resourceType", "Bucket").execute_azcopy_create()
self.assertTrue(result)
fileName = "bucket_file_propertyandmetadata_%s" % (preserveProperties)
if srcType == "S3":
srcFileURL = util.get_object_without_sas(srcBucketURL, fileName)
else:
srcFileURL = util.get_object_sas(srcBucketURL, fileName)
dstFileURL = util.get_object_sas(dstBucketURL, fileName)
result = util.Command("create").add_arguments(srcFileURL).add_flags("serviceType", srcType). \
add_flags("resourceType", "SingleFile"). \
add_flags("metadata", "author=jiac;viewport=width;description=test file"). \
add_flags("content-type", "testctype").add_flags("content-encoding", "testenc"). \
add_flags("content-disposition", "testcdis").add_flags("content-language", "testclang").\
add_flags("cache-control", "testcc").execute_azcopy_create()
self.assertTrue(result)
# Copy file using azcopy from srcURL to destURL
cpCmd = util.Command("copy").add_arguments(srcBucketURL).add_arguments(dstBucketURL). \
add_flags("log-level", "info").add_flags("recursive", "true")
if not preserveProperties:
cpCmd.add_flags("s2s-preserve-properties", "false")
result = cpCmd.execute_azcopy_copy_command()
self.assertTrue(result)
# Downloading the copied file for validation
validate_dir_name = "validate_copy_file_from_%s_bucket_to_%s_bucket_propertyandmetadata_%s" % (srcType, dstType, preserveProperties)
local_validate_dest_dir = util.create_test_dir(validate_dir_name)
local_validate_dest = local_validate_dest_dir + fileName
# Because the MD5 is checked early, we need to clear the check-md5 flag.
if srcType == "S3":
result = util.Command("copy").add_arguments(dstFileURL).add_arguments(local_validate_dest). \
add_flags("log-level", "info") # Temporarily set result to Command for the sake of modifying the md5 check
if not preserveProperties:
result.flags["check-md5"] = "NoCheck"
result = result.execute_azcopy_copy_command() # Wrangle result to a bool for checking
else:
result = util.Command("copy").add_arguments(srcFileURL).add_arguments(local_validate_dest). \
add_flags("log-level", "info") # Temporarily set result to Command for the sake of modifying the md5 check
if not preserveProperties:
result.flags["check-md5"] = "NoCheck"
result = result.execute_azcopy_copy_command() # Wrangle result to a bool for checking
self.assertTrue(result)
# TODO: test different targets according to dstType
validateCmd = util.Command("testBlob").add_arguments(local_validate_dest).add_arguments(dstFileURL).add_flags("no-guess-mime-type", "true")
if preserveProperties == True:
validateCmd.add_flags("metadata", "author=jiac;viewport=width;description=test file"). \
add_flags("content-type", "testctype").add_flags("content-encoding", "testenc"). \
add_flags("content-disposition", "testcdis").add_flags("content-language", "testclang"). \
add_flags("cache-control", "testcc")
else:
validateCmd.add_flags("metadata", ""). \
add_flags("content-type", "").add_flags("content-encoding", ""). \
add_flags("content-disposition", "").add_flags("content-language", ""). \
add_flags("cache-control", "")
# As head object doesn't return Content-MD5: https://docs.aws.amazon.com/AmazonS3/latest/API/RESTCommonResponseHeaders.html
# Escape Content-MD5 validation for S3
if srcType != "S3":
validateCmd.add_flags("check-content-md5", "true")
result = validateCmd.execute_azcopy_verify()
self.assertTrue(result)
def util_test_overwrite_copy_single_file_from_x_to_x(
self,
srcBucketURL,
srcType,
dstBucketURL,
dstType,
oAuth=False,
overwrite=True):
# create source bucket
result = util.Command("create").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \
add_flags("resourceType", "Bucket").execute_azcopy_create()
self.assertTrue(result)
result = util.Command("create").add_arguments(dstBucketURL).add_flags("serviceType", dstType). \
add_flags("resourceType", "Bucket").execute_azcopy_create()
self.assertTrue(result)
fileSize1 = 1
fileSize2 = 2
# create file of size 1KB.
destFileName = "test_copy.txt"
localFileName1 = "test_" + str(fileSize1) + "kb_copy.txt"
localFileName2 = "test_" + str(fileSize2) + "kb_copy.txt"
filePath1 = util.create_test_file(localFileName1, fileSize1)
filePath2 = util.create_test_file(localFileName2, fileSize2)
if srcType == "S3":
srcFileURL = util.get_object_without_sas(srcBucketURL, localFileName1)
else:
srcFileURL = util.get_object_sas(srcBucketURL, localFileName1)
if oAuth:
dstFileURL = util.get_object_without_sas(dstBucketURL, destFileName)
else:
dstFileURL = util.get_object_sas(dstBucketURL, destFileName)
# Upload file.
self.util_upload_to_src(filePath1, srcType, srcFileURL)
self.util_upload_to_src(filePath2, dstType, dstFileURL)
# Copy file using azcopy from srcURL to destURL
cpCmd = util.Command("copy").add_arguments(srcFileURL).add_arguments(dstFileURL). \
add_flags("log-level", "info")
if overwrite == False:
cpCmd.add_flags("overwrite", "false")
result = cpCmd.execute_azcopy_copy_command()
self.assertTrue(result)
# Downloading the copied file for validation
validate_dir_name = "validate_overwrite_%s_copy_single_file_from_%s_to_%s" % (overwrite, srcType, dstType)
local_validate_dest_dir = util.create_test_dir(validate_dir_name)
local_validate_dest = os.path.join(local_validate_dest_dir, destFileName)
result = util.Command("copy").add_arguments(dstFileURL).add_arguments(local_validate_dest). \
add_flags("log-level", "info").execute_azcopy_copy_command()
self.assertTrue(result)
# Verifying the downloaded blob
if overwrite:
result = filecmp.cmp(filePath1, local_validate_dest, shallow=False)
else:
result = filecmp.cmp(filePath2, local_validate_dest, shallow=False)
self.assertTrue(result)
def util_test_copy_n_files_from_s3_bucket_to_blob_account(
self,
srcBucketURL,
dstAccountURL,
n=10,
sizeInKB=1):
srcType = "S3"
# create source bucket
result = util.Command("create").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \
add_flags("resourceType", "Bucket").execute_azcopy_create()
self.assertTrue(result)
# create file of size n KBs in newly created directory.
src_dir_name = "copy_%d_%dKB_files_from_s3_bucket_to_blob_account" % (n, sizeInKB)
src_dir_path = util.create_test_n_files(sizeInKB*1024, n, src_dir_name)
# Upload file.
self.util_upload_to_src(src_dir_path, srcType, srcBucketURL, True)
# Copy files using azcopy from srcURL to destURL
result = util.Command("copy").add_arguments(srcBucketURL).add_arguments(dstAccountURL). \
add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
self.assertTrue(result)
# Downloading the copied files for validation
validate_dir_name = "validate_copy_%d_%dKB_files_from_s3_bucket_to_blob_account" % (n, sizeInKB)
local_validate_dest = util.create_test_dir(validate_dir_name)
validateDstBucketURL = util.get_object_sas(dstAccountURL, self.bucket_name_s3_blob)
dst_directory_url = util.get_object_sas(validateDstBucketURL, src_dir_name)
result = util.Command("copy").add_arguments(dst_directory_url).add_arguments(local_validate_dest). \
add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
self.assertTrue(result)
# Verifying the downloaded blob
result = self.util_are_dir_trees_equal(src_dir_path, os.path.join(local_validate_dest, src_dir_name))
self.assertTrue(result)
def util_test_copy_single_file_from_x_to_blob_with_blobtype_blobtier(
self,
srcBucketURL,
srcType,
dstBucketURL,
dstType,
sizeInKB=1,
srcBlobType="",
srcBlobTier="",
destBlobTypeOverride="",
destBlobTierOverride="",
blobTypeForValidation="BlockBlob",
blobTierForValidation="Hot",
preserveAccessTier=True):
# create source bucket
result = util.Command("create").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \
add_flags("resourceType", "Bucket").execute_azcopy_create()
self.assertTrue(result)
# create file of size 1KB.
filename = "test_%s_kb_%s_%s_%s_%s_%s_%s_%s_%s_copy.txt" % (str(sizeInKB), srcType, dstType, srcBlobType, srcBlobTier, destBlobTypeOverride, destBlobTierOverride, blobTypeForValidation, blobTierForValidation)
file_path = util.create_test_file(filename, sizeInKB)
if srcType == "S3":
srcFileURL = util.get_object_without_sas(srcBucketURL, filename)
else:
srcFileURL = util.get_object_sas(srcBucketURL, filename)
dstFileURL = util.get_object_sas(dstBucketURL, filename)
# upload file.
self.util_upload_to_src(file_path, srcType, srcFileURL, False, srcBlobType, srcBlobTier)
# copy file using azcopy from srcURL to destURL
copyCmd = util.Command("copy").add_arguments(srcFileURL).add_arguments(dstFileURL). \
add_flags("log-level", "info")
if destBlobTypeOverride != "":
copyCmd.add_flags("blob-type", destBlobTypeOverride)
if destBlobTierOverride != "":
if destBlobTypeOverride == "PageBlob" or (srcBlobType == "PageBlob" and destBlobTypeOverride == ""):
copyCmd.add_flags("page-blob-tier", destBlobTierOverride)
if destBlobTypeOverride == "BlockBlob" or (srcBlobType == "BlockBlob" and destBlobTypeOverride == ""):
copyCmd.add_flags("block-blob-tier", destBlobTierOverride)
if preserveAccessTier == False:
copyCmd.add_flags("s2s-preserve-access-tier", "false")
copyCmdResult = copyCmd.execute_azcopy_copy_command()
self.assertTrue(copyCmdResult)
# execute validator.
# don't check content-type, as it dependes on upload, and service behavior.
# cover content-type check in another test.
testBlobCmd = util.Command("testBlob").add_arguments(file_path).add_arguments(dstFileURL). \
add_flags("check-content-type", "false")
if blobTypeForValidation != "":
testBlobCmd.add_flags("blob-type", blobTypeForValidation)
if blobTierForValidation != "":
testBlobCmd.add_flags("blob-tier", blobTierForValidation)
testBlobResult = testBlobCmd.execute_azcopy_verify()
self.assertTrue(testBlobResult)
|
StarcoderdataPython
|
1920710
|
class A164:
pass
|
StarcoderdataPython
|
11331984
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
import insightconnect_plugin_runtime
import json
class Input:
API_KEY = "api_key"
API_KEY_ID = "api_key_id"
SECURITY_LEVEL = "security_level"
URL = "url"
class ConnectionSchema(insightconnect_plugin_runtime.Input):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"api_key": {
"$ref": "#/definitions/credential_secret_key",
"title": "API Key",
"description": "The Cortex XDR API Key that is generated when creating a new key",
"order": 3
},
"api_key_id": {
"type": "integer",
"title": "API Key ID",
"description": "The API Key ID shown in the Cortex XDR API Keys table in settings. e.g. 1, 2, 3",
"order": 2
},
"security_level": {
"type": "string",
"title": "Security Level",
"description": "The Security Level of the key provided. This can be found in the API Key settings table in the Cortex XDR settings",
"default": "Standard",
"enum": [
"Advanced",
"Standard"
],
"order": 4
},
"url": {
"type": "string",
"title": "URL",
"description": "Cortex XDR API URL",
"order": 1
}
},
"required": [
"api_key",
"api_key_id",
"security_level",
"url"
],
"definitions": {
"credential_secret_key": {
"id": "credential_secret_key",
"type": "object",
"title": "Credential: Secret Key",
"description": "A shared secret key",
"properties": {
"secretKey": {
"type": "string",
"title": "Secret Key",
"displayType": "password",
"description": "The shared secret key",
"format": "password"
}
},
"required": [
"secretKey"
]
}
}
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
|
StarcoderdataPython
|
9669684
|
import os
import json
import itertools
from flask import Blueprint, jsonify, request
from nameko.standalone.rpc import ClusterRpcProxy
from nameko.standalone.events import event_dispatcher
news = Blueprint('news', __name__)
BROKER_CONFIG = {'AMQP_URI': os.environ.get('QUEUE_HOST')}
@news.route('/<string:news_type>/<int:news_id>', methods=['GET'])
def get_single_news(news_type, news_id):
"""Get single user details"""
try:
response_object = rpc_get_news(news_type, news_id)
dispatcher = event_dispatcher(BROKER_CONFIG)
dispatcher('recommendation_sender', 'receiver', {
'user_id': request.cookies.get('user_id'),
'news': response_object['news'],
})
return jsonify(response_object), 200
except Exception as e:
return error_response(e, 500)
@news.route(
'/all/<int:num_page>/<int:limit>',
methods=['GET'])
def get_all_news(num_page, limit):
try:
response_famous = rpc_get_all_news(
'famous',
num_page,
limit
)
response_politics = rpc_get_all_news(
'politics',
num_page,
limit
)
response_sports = rpc_get_all_news(
'sports',
num_page,
limit
)
all_news = itertools.chain(
response_famous.get('news', []),
response_politics.get('news', []),
response_sports.get('news', []),
)
response_object = {
'status': 'success',
'news': list(all_news),
}
return jsonify(response_object), 200
except Exception as e:
return error_response(e, 500)
@news.route(
'/<string:news_type>/<int:num_page>/<int:limit>',
methods=['GET'])
def get_all_news_by_type(news_type, num_page, limit):
"""Get all new by type"""
try:
response_object = rpc_get_all_news(
news_type,
num_page,
limit
)
return jsonify(response_object), 200
except Exception as e:
return error_response(e, 500)
@news.route('/<string:news_type>', methods=['POST', 'PUT'])
def add_news(news_type):
post_data = request.get_json()
if not post_data:
return error_response('Invalid payload', 400)
try:
response_object = rpc_command(news_type, post_data)
return jsonify(response_object), 201
except Exception as e:
return error_response(e, 500)
def error_response(e, code):
response_object = {
'status': 'fail',
'message': str(e),
}
return jsonify(response_object), code
def rpc_get_news(news_type, news_id):
with ClusterRpcProxy(BROKER_CONFIG) as rpc:
if news_type == 'famous':
news = rpc.query_famous.get_news(news_id)
elif news_type == 'sports':
news = rpc.query_sports.get_news(news_id)
elif news_type == 'politics':
news = rpc.query_politics.get_news(news_id)
else:
return error_response('Invalid News type', 400)
return {
'status': 'success',
'news': json.loads(news)
}
def rpc_get_all_news(news_type, num_page, limit):
with ClusterRpcProxy(BROKER_CONFIG) as rpc:
if news_type == 'famous':
news = rpc.query_famous.get_all_news(num_page, limit)
elif news_type == 'sports':
news = rpc.query_sports.get_all_news(num_page, limit)
elif news_type == 'politics':
news = rpc.query_politics.get_all_news(num_page, limit)
else:
return error_response('Invalid News type', 400)
return {
'status': 'success',
'news': json.loads(news)
}
def rpc_command(news_type, data):
with ClusterRpcProxy(BROKER_CONFIG) as rpc:
if news_type == 'famous':
news = rpc.command_famous.add_news(data)
elif news_type == 'sports':
news = rpc.command_sports.add_news(data)
elif news_type == 'politics':
news = rpc.command_politics.add_news(data)
else:
return error_response('Invalid News type', 400)
return {
'status': 'success',
'news': news,
}
|
StarcoderdataPython
|
1976036
|
import rinobot_plugin as bot
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
matplotlib.rcParams['savefig.dpi'] = 2 * matplotlib.rcParams['savefig.dpi']
def main():
filepath = bot.filepath()
data = bot.loadfile(filepath)
x = data[:, 0]
y = data[:, 1:]
plt.plot(x, y)
xmin = bot.get_arg('xmin', type=float)
xmax = bot.get_arg('xmax', type=float)
ymin = bot.get_arg('ymin', type=float)
ymax = bot.get_arg('ymax', type=float)
xlabel = bot.get_arg('xlabel', type=str)
ylabel = bot.get_arg('ylabel', type=str)
plt.xlim([xmin, xmax])
plt.ylim([ymin, ymax])
if xlabel:
plt.xlabel(xlabel)
if ylabel:
plt.ylabel(ylabel)
outname = bot.no_extension() + '-line-plot.png'
outpath = bot.output_filepath(outname)
plt.savefig(outpath)
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
3353381
|
from dagster_aws.emr import emr_pyspark_step_launcher
from dagster_aws.s3 import s3_plus_default_storage_defs, s3_resource
from dagster_pyspark import DataFrame as DagsterPySparkDataFrame
from dagster_pyspark import pyspark_resource
from pyspark.sql import DataFrame, Row
from pyspark.sql.types import IntegerType, StringType, StructField, StructType
from dagster import (
ModeDefinition,
PresetDefinition,
make_python_type_usable_as_dagster_type,
pipeline,
repository,
solid,
)
from dagster.core.definitions.no_step_launcher import no_step_launcher
# Make pyspark.sql.DataFrame map to dagster_pyspark.DataFrame
make_python_type_usable_as_dagster_type(python_type=DataFrame, dagster_type=DagsterPySparkDataFrame)
@solid(required_resource_keys={'pyspark', 'pyspark_step_launcher'})
def make_people(context) -> DataFrame:
schema = StructType([StructField('name', StringType()), StructField('age', IntegerType())])
rows = [Row(name='Thom', age=51), Row(name='Jonny', age=48), Row(name='Nigel', age=49)]
return context.resources.pyspark.spark_session.createDataFrame(rows, schema)
@solid(required_resource_keys={'pyspark_step_launcher'})
def filter_over_50(_, people: DataFrame) -> DataFrame:
return people.filter(people['age'] > 50)
@solid(required_resource_keys={'pyspark_step_launcher'})
def count_people(_, people: DataFrame) -> int:
return people.count()
emr_mode = ModeDefinition(
name='emr',
resource_defs={
'pyspark_step_launcher': emr_pyspark_step_launcher,
'pyspark': pyspark_resource,
's3': s3_resource,
},
system_storage_defs=s3_plus_default_storage_defs,
)
emr_preset = PresetDefinition.from_pkg_resources(
name='emr',
mode='emr',
pkg_resource_defs=[('emr_pyspark', 'prod_resources.yaml'), ('emr_pyspark', 's3_storage.yaml')],
)
local_mode = ModeDefinition(
name='local',
resource_defs={'pyspark_step_launcher': no_step_launcher, 'pyspark': pyspark_resource},
)
@pipeline(
mode_defs=[emr_mode, local_mode], preset_defs=[emr_preset],
)
def my_pipeline():
count_people(filter_over_50(make_people()))
@repository
def emr_pyspark_example():
return [my_pipeline]
|
StarcoderdataPython
|
3230412
|
<filename>src/sentry/api/endpoints/relay_projectconfigs.py<gh_stars>0
from __future__ import absolute_import
import six
from rest_framework.response import Response
from sentry_sdk import Hub
from sentry_sdk.tracing import Span
from sentry.api.base import Endpoint
from sentry.api.permissions import RelayPermission
from sentry.api.authentication import RelayAuthentication
from sentry.relay import config
from sentry.models import Project, Organization, OrganizationOption
from sentry.utils import metrics
class RelayProjectConfigsEndpoint(Endpoint):
authentication_classes = (RelayAuthentication,)
permission_classes = (RelayPermission,)
def post(self, request):
with Hub.current.start_span(
Span(op="http.server", transaction="RelayProjectConfigsEndpoint", sampled=True)
):
return self._post(request)
def _post(self, request):
relay = request.relay
assert relay is not None # should be provided during Authentication
full_config_requested = request.relay_request_data.get("fullConfig")
if full_config_requested and not relay.is_internal:
return Response("Relay unauthorized for full config information", 403)
with Hub.current.start_span(op="relay_fetch_projects"):
project_ids = set(request.relay_request_data.get("projects") or ())
if project_ids:
with metrics.timer("relay_project_configs.fetching_projects.duration"):
projects = {p.id: p for p in Project.objects.filter(pk__in=project_ids)}
else:
projects = {}
with Hub.current.start_span(op="relay_fetch_orgs"):
# Preload all organizations and their options to prevent repeated
# database access when computing the project configuration.
org_ids = set(project.organization_id for project in six.itervalues(projects))
if org_ids:
with metrics.timer("relay_project_configs.fetching_orgs.duration"):
orgs = {
o.id: o
for o in Organization.objects.filter(pk__in=org_ids)
if request.relay.has_org_access(o)
}
else:
orgs = {}
org_options = {
i: OrganizationOption.objects.get_all_values(i) for i in six.iterkeys(orgs)
}
metrics.timing("relay_project_configs.projects_requested", len(project_ids))
metrics.timing("relay_project_configs.projects_fetched", len(projects))
metrics.timing("relay_project_configs.orgs_fetched", len(orgs))
configs = {}
for project_id in project_ids:
configs[six.text_type(project_id)] = None
project = projects.get(int(project_id))
if project is None:
continue
organization = orgs.get(project.organization_id)
if organization is None:
continue
project.organization = organization
org_opts = org_options.get(organization.id) or {}
with Hub.current.start_span(op="get_config"):
with metrics.timer("relay_project_configs.get_config.duration"):
project_config = config.get_project_config(
project, org_options=org_opts, full_config=full_config_requested
)
configs[six.text_type(project_id)] = project_config.to_camel_case_dict()
return Response({"configs": configs}, status=200)
|
StarcoderdataPython
|
3450892
|
<gh_stars>0
from unittest.mock import MagicMock
from kleat.evidence.do_bridge import do_fwd_ctg_lt_bdg, do_fwd_ctg_rt_bdg
import kleat.misc.settings as S
###################################################
# test different situations for do_fwd_ctg_lt_bdg #
###################################################
def test_do_fwd_ctg_lt_bdg_with_left_hard_clipping():
"""
TTT
└ACG <-left-tail read
\\XXACGX <-contig
01234567 <-contig coord
^ctg_offset
...XXACGX... <-reference genome
456789 <-genome coord
| ^ref_clv
^starting the contig2genome alignment
"""
read = MagicMock()
read.reference_start = 4
read.reference_end = 7
read.cigartuples = ((S.BAM_CSOFT_CLIP, 3), (S.BAM_CMATCH, 3))
contig = MagicMock()
contig.cigartuples = ((S.BAM_CHARD_CLIP, 2), (S.BAM_CMATCH, 6))
contig.infer_query_length.return_value = 8 # including hardclip
ctg_offset = 2 # 4 -2
tail_len = 3
assert do_fwd_ctg_lt_bdg(read, contig) == ('-', ctg_offset, tail_len)
def test_do_fwd_ctg_lt_bdg_with_left_hard_clipping_right_after_ctg_clv():
"""
TTT
└ACG <-left-tail read
\\\\\CGX <-contig, the "\\\\\" part would appear in another position in genome, although the read is well aligned to it
012345678 <-contig coord
^ctg_offset(won't be captured by this read)
...CGX... <-reference genome
78901 <-genome coord
^starting the contig2genome alignment
ctg_offset would be 4 (ctg_clv) - 5 (hardclip) = -1 < 0, so this read
won't capture the genome offset of the clv, but its mate potentially will.
"""
read = MagicMock()
read.reference_start = 4
read.reference_end = 7
read.cigartuples = ((S.BAM_CSOFT_CLIP, 3), (S.BAM_CMATCH, 3))
contig = MagicMock()
contig.cigartuples = ((S.BAM_CHARD_CLIP, 5), (S.BAM_CMATCH, 3))
contig.infer_query_length.return_value = 8 # including hardclip
assert do_fwd_ctg_lt_bdg(read, contig) is None
def test_do_fwd_ctg_lt_bdg_with_left_hard_clipping_right_before_ctg_clv():
"""
TTT
└ACG <-left-tail read
\\\\ACGX <-contig
012345678 <-contig coord
^ctg_offset
...ACGX... <-reference genome
78901 <-genome coord
^starting the contig2genome alignment
"""
read = MagicMock()
read.reference_start = 4
read.reference_end = 7
read.cigartuples = ((S.BAM_CSOFT_CLIP, 3), (S.BAM_CMATCH, 3))
contig = MagicMock()
contig.cigartuples = ((S.BAM_CHARD_CLIP, 4), (S.BAM_CMATCH, 4))
contig.infer_query_length.return_value = 8 # including hardclip
ctg_offset = 0 # due to hardclipping
tail_len = 3
assert do_fwd_ctg_lt_bdg(read, contig) == ('-', ctg_offset, tail_len)
def test_do_fwd_ctg_lt_bdg_with_left_hard_clipping_1bp_before_ctg_clv():
"""
TTT
└ACG <-left-tail read
\\\GACGX <-contig
01234567 <-contig coord
^ctg_offset
...GACGX... <-reference genome
678901 <-genome coord
^starting the contig2genome alignment
"""
read = MagicMock()
read.reference_start = 4
read.reference_end = 7
read.cigartuples = ((S.BAM_CSOFT_CLIP, 3), (S.BAM_CMATCH, 3))
contig = MagicMock()
contig.cigartuples = ((S.BAM_CHARD_CLIP, 3), (S.BAM_CMATCH, 5))
contig.infer_query_length.return_value = 8 # including hardclip
ctg_offset = 1 # due to hardclipping
tail_len = 3
assert do_fwd_ctg_lt_bdg(read, contig) == ('-', ctg_offset, tail_len)
def test_do_fwd_ctg_lt_bdg_with_right_hard_clipping():
"""
such right hardclipping (not passing the ctg_clv) won't have an effect in such case
TT
└AC <-left-tail read
XXXACGXX// <-contig
0123456789 <-contig coord
^ctg_offset
..XXXACGXX... <-reference genome
34567890 <-genome coord
| ^ref_clv
^starting the contig2genome alignment
"""
read = MagicMock()
read.reference_start = 3
read.reference_end = 5
read.cigartuples = ((S.BAM_CSOFT_CLIP, 2), (S.BAM_CMATCH, 2))
contig = MagicMock()
contig.cigartuples = ((S.BAM_CMATCH, 8), (S.BAM_CHARD_CLIP, 2))
contig.infer_query_length.return_value = 10 # including hardclip
ctg_offset = 3
tail_len = 2
assert do_fwd_ctg_lt_bdg(read, contig) == ('-', ctg_offset, tail_len)
def test_do_fwd_ctg_lt_bdg_with_right_hard_clipping_passing_ctg_clv():
"""
TT
└AC <-left-tail read
XX//// <-contig
012345 <-contig coord
^ctg_offset
..XX... <-reference genome
34 <-genome coord
| ^ref_clv
^starting the contig2genome alignment
ctg_offset would be 3 (ctg_clv) - 5 (hardclip) = -1 < 0, so this read
"""
read = MagicMock()
read.reference_start = 3
read.reference_end = 5
read.cigartuples = ((S.BAM_CSOFT_CLIP, 2), (S.BAM_CMATCH, 2))
contig = MagicMock()
contig.cigartuples = ((S.BAM_CMATCH, 2), (S.BAM_CHARD_CLIP, 4))
contig.infer_query_length.return_value = 6 # including hardclip
assert do_fwd_ctg_lt_bdg(read, contig) is None
def test_do_fwd_ctg_lt_bdg_with_right_hard_clipping_right_on_ctg_clv_edgecase():
"""
TT
└AC <-left-tail read
XX/// <-contig
012345 <-contig coord
^ctg_offset
...XX.. <-reference genome
34 <-genome coord
| ^ref_clv
^starting the contig2genome alignment
"""
read = MagicMock()
read.reference_start = 2
read.reference_end = 4
read.cigartuples = ((S.BAM_CSOFT_CLIP, 2), (S.BAM_CMATCH, 2))
contig = MagicMock()
contig.cigartuples = ((S.BAM_CMATCH, 2), (S.BAM_CHARD_CLIP, 3))
contig.infer_query_length.return_value = 5 # including hardclip
assert do_fwd_ctg_lt_bdg(read, contig) is None
def test_do_fwd_ctg_lt_bdg_with_right_hard_clipping_right_after_ctg_clv_edgecase():
"""
TT
└AC <-left-tail read
XXXA// <-contig
012345 <-contig coord
^ctg_offset
..XX... <-reference genome
34 <-genome coord
| ^ref_clv
^starting the contig2genome alignment
"""
read = MagicMock()
read.reference_start = 3
read.reference_end = 5
read.cigartuples = ((S.BAM_CSOFT_CLIP, 2), (S.BAM_CMATCH, 2))
contig = MagicMock()
contig.cigartuples = ((S.BAM_CMATCH, 4), (S.BAM_CHARD_CLIP, 2))
contig.infer_query_length.return_value = 6 # including hardclip
ctg_offset = 3
tail_len = 2
assert do_fwd_ctg_lt_bdg(read, contig) == ('-', ctg_offset, tail_len)
###################################################
# test different situations for do_fwd_ctg_rt_bdg #
###################################################
def test_do_fwd_ctg_rt_bdg_with_left_hardclipping():
"""
AA
CCG┘ <-right-tail read
\\\XCCGXX <-contig
0123456789 <-contig coord
| ^ctg_offset
...XXXXCCGXX... <-reference genome
4567890 <-genome coord
| ^ref_clv
^starting the contig2genome alignment
"""
read = MagicMock()
read.reference_start = 4
read.reference_end = 7
read.cigartuples = ((S.BAM_CMATCH, 3), (S.BAM_CSOFT_CLIP, 2))
contig = MagicMock()
contig.cigartuples = ((S.BAM_CHARD_CLIP, 3), (S.BAM_CMATCH, 7))
contig.infer_query_length.return_value = 9
ctg_offset = 3
tail_len = 2
assert do_fwd_ctg_rt_bdg(read, contig) == ('+', ctg_offset, tail_len)
def test_do_fwd_ctg_rt_bdg_with_left_hardclipping_passing_ctg_clv():
"""
AA
CCG┘ <-right-tail read
\\\\\X <-contig
0123456 <-contig coord
^ctg_offset
...X... <-reference genome
45678 <-genome coord
ref_clv^ ^starting the contig2genome alignment
ref_clv won't be captured by this bridge read
"""
read = MagicMock()
read.reference_start = 1
read.reference_end = 4
read.cigartuples = ((S.BAM_CMATCH, 3), (S.BAM_CSOFT_CLIP, 2))
contig = MagicMock()
contig.cigartuples = ((S.BAM_CHARD_CLIP, 5), (S.BAM_CMATCH, 1))
contig.infer_query_length.return_value = 6
assert do_fwd_ctg_rt_bdg(read, contig) is None
def test_do_fwd_ctg_rt_bdg_with_left_hardclipping_right_on_ctg_clv():
"""
AA
CCG┘ <-right-tail read
\\\\XX <-contig
0123456 <-contig coord
^ctg_offset
...XX... <-reference genome
45678 <-genome coord
ref_clv^ ^starting the contig2genome alignment
ref_clv won't be captured by this bridge read
"""
read = MagicMock()
read.reference_start = 1
read.reference_end = 4
read.cigartuples = ((S.BAM_CMATCH, 3), (S.BAM_CSOFT_CLIP, 2))
contig = MagicMock()
contig.cigartuples = ((S.BAM_CHARD_CLIP, 4), (S.BAM_CMATCH, 2))
contig.infer_query_length.return_value = 6
assert do_fwd_ctg_rt_bdg(read, contig) is None
def test_do_fwd_ctg_rt_bdg_with_right_hardclipping():
"""
right hardclipping won't have an effect in such case
AAAAA
CG┘ <-right-tail read
XCCGXX// <-contig
012345678 <-contig coord
| ^ctg_offset
...XCCGXX... <-reference genome
345678901 <-genome coord
| ^ref_clv
^starting the contig2genome alignment
"""
read = MagicMock()
read.reference_start = 2
read.reference_end = 4
read.cigartuples = ((S.BAM_CMATCH, 2), (S.BAM_CSOFT_CLIP, 5))
contig = MagicMock()
contig.infer_query_length.return_value = 8
contig.cigartuples = (
(S.BAM_CMATCH, 6),
(S.BAM_CHARD_CLIP, 2),
)
ctg_offset = 3
tail_len = 5
assert do_fwd_ctg_rt_bdg(read, contig) == ('+', ctg_offset, tail_len)
def test_do_fwd_ctg_rt_bdg_with_right_hardclipping_right_before_ctg_clv():
"""
right hardclipping won't have an effect in such case
AAAAA
CG┘ <-right-tail read
XCCG// <-contig
0123456 <-contig coord
| ^ctg_offset
...XCCG... <-reference genome
34567 <-genome coord
| ^ref_clv
^starting the contig2genome alignment
"""
read = MagicMock()
read.reference_start = 2
read.reference_end = 4
read.cigartuples = ((S.BAM_CMATCH, 2), (S.BAM_CSOFT_CLIP, 5))
contig = MagicMock()
contig.infer_query_length.return_value = 6
contig.cigartuples = (
(S.BAM_CMATCH, 4),
(S.BAM_CHARD_CLIP, 2),
)
ctg_offset = 3
tail_len = 5
assert do_fwd_ctg_rt_bdg(read, contig) == ('+', ctg_offset, tail_len)
def test_do_fwd_ctg_rt_bdg_with_right_hardclipping_passing_ctg_clv():
"""
AAAAA
CG┘ <-right-tail read
XC/// <-contig
012345678 <-contig coord
| ^ctg_offset
...XC... <-reference genome
345678901 <-genome coord
| ^ref_clv (won't be captured by this bridge read)
^starting the contig2genome alignment
"""
read = MagicMock()
read.reference_start = 2
read.reference_end = 4
read.cigartuples = ((S.BAM_CMATCH, 2), (S.BAM_CSOFT_CLIP, 5))
contig = MagicMock()
contig.infer_query_length.return_value = 5
contig.cigartuples = (
(S.BAM_CMATCH, 2),
(S.BAM_CHARD_CLIP, 3),
)
assert do_fwd_ctg_rt_bdg(read, contig) is None
def test_do_fwd_ctg_rt_bdg_with_right_hardclipping_right_on_ctg_clv():
"""
AAAAA
CG┘ <-right-tail read
XCC/// <-contig
0123456 <-contig coord
| ^ctg_offset
...XCC... <-reference genome
3456789 <-genome coord
| ^ref_clv
^starting the contig2genome alignment
"""
read = MagicMock()
read.reference_start = 2
read.reference_end = 4
read.cigartuples = ((S.BAM_CMATCH, 2), (S.BAM_CSOFT_CLIP, 5))
contig = MagicMock()
contig.infer_query_length.return_value = 6
contig.cigartuples = (
(S.BAM_CMATCH, 3),
(S.BAM_CHARD_CLIP, 3),
)
assert do_fwd_ctg_rt_bdg(read, contig) is None
|
StarcoderdataPython
|
6605272
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import math
import numpy as np
from .Util import *
import torch.distributed as dist
class ConvBlock_ablation(nn.Module):
def __init__(
self,
inputSize,
outputSize,
hiddenSize,
kernelSize,
stride = 1,
dropoutProb = 0.0
):
super().__init__()
if dist.is_available() and dist.is_initialized():
print("syncBatchnorm enabled")
from .SyncBN import SynchronizedBatchNorm2d
BatchNorm2d = SynchronizedBatchNorm2d
else:
BatchNorm2d = nn.BatchNorm2d
if isinstance(stride,int):
stride = (stride, stride)
nPad = (kernelSize-1, kernelSize-1)
self.conv1 = nn.Conv2d(inputSize, hiddenSize, kernelSize, padding = kernelSize//2)
self.bn1 = BatchNorm2d(hiddenSize, momentum = 0.01 )
self.conv2 = nn.Conv2d(hiddenSize, outputSize, kernelSize, padding = kernelSize//2)
self.bn2 = BatchNorm2d(outputSize, momentum = 0.01)
self.downSampler = nn.Identity()
if stride != (1,1):
self.downSampler = nn.AvgPool2d(stride, stride=stride)
self.dropout = nn.Dropout(dropoutProb)
def forward(self, x):
z = self.conv1(x.view_as(x))
z = self.bn1(z)
z = F.gelu(z)
z = self.conv2(z.view_as(z))
z = self.bn2(z)
z = F.gelu(z)
z = self.downSampler(z)
return z
class SimpleRNN(nn.Module):
def __init__(self, inputSize, hiddenSize, outputSize, nLayers, dropoutProb):
super().__init__()
self.grus = nn.GRU(inputSize, hiddenSize, num_layers = nLayers, bidirectional=True, dropout=dropoutProb)
self.outProj = nn.Linear(hiddenSize*2, outputSize)
def forward(self,x ):
if self.training:
checkpoint = torch.utils.checkpoint.checkpoint
else:
checkpoint = checkpointByPass
y, _ = checkpoint(self.grus,x)
y = self.outProj(y)
return y
class ScoreMatrixPostProcessor(nn.Module):
def __init__(self, nTarget, nHidden, dropoutProb):
super().__init__()
self.map = nn.Sequential(
nn.Conv2d(nTarget, nHidden, 3, padding= 2),
nn.GELU(),
nn.Dropout(dropoutProb),
nn.Conv2d(nHidden, nTarget, 3)
)
def forward(self, S):
if self.training:
checkpointSequential = torch.utils.checkpoint.checkpoint_sequential
else:
checkpointSequential = checkpointSequentialByPass
S = S.permute(2, 3, 0,1)
S = checkpointSequential(self.map,2, S)
S = S.permute(2,3, 0, 1).contiguous()
return S
class PairwiseFeatureBatch(nn.Module):
def __init__(self,
inputSize,
outputSize,
dropoutProb = 0.0,
lengthScaling=True,
postConv=True,
disableUnitary=False,
hiddenSize = None
):
super().__init__()
if hiddenSize is None:
hiddenSize = outputSize*4
self.scoreMap = nn.Sequential(
nn.Linear(inputSize*6, hiddenSize),
nn.GELU(),
nn.Dropout(dropoutProb),
nn.Linear(hiddenSize, hiddenSize),
nn.GELU(),
nn.Dropout(dropoutProb),
nn.Linear(hiddenSize, outputSize),
)
self.scoreMapSkip = nn.Sequential(
nn.Linear(inputSize*3, hiddenSize),
nn.GELU(),
nn.Dropout(dropoutProb),
nn.Linear(hiddenSize, hiddenSize),
nn.GELU(),
nn.Dropout(dropoutProb),
nn.Linear(hiddenSize, outputSize)
)
self.lengthScaling = lengthScaling
self.disableUnitary=disableUnitary
self.post = nn.Identity()
if postConv:
self.post = ScoreMatrixPostProcessor(outputSize, outputSize*3, dropoutProb)
def computeChunk(self, x, x_cum, x_sqr_cum,x_cube_cum, idxA, idxB):
# A: end
# B: begin
curA = x[idxA]
curB = x[idxB]
lengthBA = (idxA-idxB)+1
lengthBA = lengthBA.view(-1,1, 1)
moment1 = (x_cum[idxA+1]- x_cum[idxB])/lengthBA
moment2 = (x_sqr_cum[idxA+1]- x_sqr_cum[idxB])/lengthBA
moment3 = (x_cube_cum[idxA+1]- x_cube_cum[idxB])/lengthBA
curInput = torch.cat([curA, curB, curA*curB, moment1, moment2, moment3], dim = -1)
curScore = self.scoreMap(curInput)
return curScore
def computeSkipScore(self, x):
curA = x[:-1]
curB = x[1:]
curInput = torch.cat([curA, curB, curA*curB], dim = -1)
curScore = self.scoreMapSkip(curInput)
return curScore
def forward(self, x, nBlock = 4000):
if self.training:
checkpoint = torch.utils.checkpoint.checkpoint
else:
checkpoint = checkpointByPass
# input shape: [T, nBatch, .]
assert(len(x.shape)==3)
nEntry = x.shape[0]
indices = torch.tril_indices(nEntry,nEntry, device = x.device)
nTotal = indices.shape[1]
S_all = []
x_cum = torch.cumsum(F.pad(x, (0,0,0,0,1,0)), dim =0)
x_sqr_cum = torch.cumsum(F.pad(x.pow(2), (0,0,0,0,1,0)), dim =0)
x_cube_cum = torch.cumsum(F.pad(x.pow(3), (0,0,0,0,1,0)), dim =0)
for lIdx in range(0, nTotal, nBlock):
if lIdx+nBlock< nTotal:
idxA = indices[0, lIdx:lIdx+nBlock]
idxB = indices[1, lIdx:lIdx+nBlock]
else:
idxA = indices[0, lIdx:]
idxB = indices[1, lIdx:]
# curScore = self.computeChunk(x, idxA, idxB)
curScore = checkpoint(self.computeChunk, x, x_cum, x_sqr_cum, x_cube_cum, idxA, idxB)
S_all.append(curScore)
s_val = torch.cat(S_all, dim = 0)
S_coo = torch.sparse_coo_tensor(indices, s_val, (nEntry, nEntry, s_val.shape[-2], s_val.shape[-1]))
S = S_coo.to_dense()
# print(S.std(), S.max(), S.min())
S = self.post(S)
# print(S.std(), S.max(), S.min())
if self.lengthScaling:
tmpIdx = torch.arange(nEntry, device = S.device)
lenBA = (tmpIdx.unsqueeze(-1)- tmpIdx.unsqueeze(0)).abs().clamp(1)
S = lenBA.unsqueeze(-1).unsqueeze(-1)*S
# curScore = lengthBA*curScore
S_skip = self.computeSkipScore(x)
if self.disableUnitary:
S_skip =S_skip*0
return S, S_skip
|
StarcoderdataPython
|
12852883
|
<reponame>0lru/p3ui
from p3ui import *
import matplotlib.pyplot as plt
import numpy as np
def gradient_image(ax, extent, direction=0.3, cmap_range=(0, 1), **kwargs):
phi = direction * np.pi / 2
v = np.array([np.cos(phi), np.sin(phi)])
X = np.array([[v @ [1, 0], v @ [1, 1]],
[v @ [0, 0], v @ [0, 1]]])
a, b = cmap_range
X = a + (b - a) / X.max() * X
im = ax.imshow(X, extent=extent, interpolation='bicubic',
vmin=0, vmax=1, **kwargs)
return im
def gradient_bar(ax, x, y, width=0.5, bottom=0):
for left, top in zip(x, y):
right = left + width
gradient_image(ax, extent=(left, right, bottom, top),
cmap=plt.cm.Blues_r, cmap_range=(0, 0.8))
class GradientChart(MatplotlibSurface):
# https://matplotlib.org/stable/gallery/lines_bars_and_markers/bar_stacked.html#sphx-glr-gallery-lines-bars-and-markers-bar-stacked-py
def __init__(self, **kwargs):
width = kwargs.pop('width', (auto, 1, 1))
height = kwargs.pop('height', (auto, 1, 1))
super().__init__(width=width, height=height, **kwargs)
self._update()
def _update(self):
with self as figure:
np.random.seed(19680801)
figure.clear()
ax = figure.add_subplot()
ax.set(xlim=(0, 10), ylim=(0, 1), autoscale_on=False)
gradient_image(ax, direction=1, extent=(0, 1, 0, 1), transform=ax.transAxes,
cmap=plt.cm.RdYlGn, cmap_range=(0.2, 0.8), alpha=0.5)
N = 10
x = np.arange(N) + 0.15
y = np.random.rand(N)
gradient_bar(ax, x, y, width=0.7)
ax.set_aspect('auto')
async def update(self):
self._update()
|
StarcoderdataPython
|
1699023
|
<filename>Medium/1079.LetterTilePossibilities.py
'''
You have n tiles, where each tile has one letter tiles[i]
printed on it.
Return the number of possible non-empty sequences of
letters you can make using the letters printed on those
tiles.
Example:
Input: tiles = "AAB"
Output: 8
Explanation: The possible sequences are "A", "B", "AA",
"AB", "BA", "AAB", "ABA", "BAA".
Example:
Input: tiles = "AAABBC"
Output: 188
Example:
Input: tiles = "V"
Output: 1
Constraints:
- 1 <= tiles.length <= 7
- tiles consists of uppercase English letters.
'''
#Difficulty: Medium
#86 / 86 test cases passed.
#Runtime: 80 ms
#Memory Usage: 15.3 MB
#Runtime: 80 ms, faster than 63.69% of Python3 online submissions for Letter Tile Possibilities.
#Memory Usage: 15.3 MB, less than 57.87% of Python3 online submissions for Letter Tile Possibilities.
from itertools import permutations
class Solution:
def numTilePossibilities(self, tiles: str) -> int:
n = 0
for i in range(1, len(tiles)+1):
n += len(set(permutations(tiles, i)))
return n
|
StarcoderdataPython
|
6688274
|
#! /usr/bin/env python3
import os
ANGLER_API_BASE_URL = 'https://angler.heliohost.org/'
ANGLER_BASE_URL = 'https://en.ff14angler.com'
# Integer number of seconds
ANGLER_DELAY_BETWEEN_REQUESTS_DURATION = 3
# Integer number of seconds
ANGLER_PAGE_LOAD_WAIT_DURATION = 180
ANGLER_SPEARFISHING_BAIT_ITEM_ID = 17726 # Spearfishing Gig Offhand Item
ANGLER_SPEARFISHING_BAIT_ITEM_LEVEL = 61
DEBUG_SERVER = False
MODULE_DIRECTORY = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'))
SQLITE_DIRECTORY = os.path.join(MODULE_DIRECTORY, 'sqlite_db')
SQLITE_DATABASE = os.path.join(SQLITE_DIRECTORY, 'angler_api.db')
XIVAPI_BASE_URL = 'https://xivapi.com'
config_settings = {
'EXPORT_DIRECTORY': os.path.join(os.path.abspath(os.path.join(MODULE_DIRECTORY, '..')), 'static')
}
|
StarcoderdataPython
|
4810540
|
# Evaluate the expression that is guaranteed to have no parentheses
def aoc_raw_eval(expression: str) -> int:
S = expression.split('*')
result = 1
for s in S:
sm = map(int, s.split('+'))
ss = sum(sm)
result *= ss
return result
def aoc_eval(expression: str) -> int:
new_expression = ""
i = 0
while i < len(expression):
ch = expression[i]
if ch == '(':
lp = 1
i += 1
ri = i
while lp > 0:
if expression[ri] == '(':
lp += 1
elif expression[ri] == ')':
lp -= 1
ri += 1
new_expression += str(aoc_eval(expression[i:ri-1]))
i = ri-1
else:
new_expression += ch
i += 1
return aoc_raw_eval(new_expression)
def run_solution(expressions: list[str]):
results_sum = 0
stack = []
for expression in expressions:
results_sum += aoc_eval(expression)
print(f"The sum of results is {results_sum}")
|
StarcoderdataPython
|
1830525
|
"""
The MIT License (MIT)
Copyright (c) Serenity Software, LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# pylint: disable=invalid-name,too-many-public-methods,missing-docstring
from cahoots.confidence.normalizers.character import CharacterWithoutBoolean
from cahoots.result import ParseResult
import unittest
class CharacterWithoutBooleanTests(unittest.TestCase):
def test_test(self):
self.assertFalse(CharacterWithoutBoolean.test(
['Character', 'Boolean'], []
))
self.assertTrue(CharacterWithoutBoolean.test(
['Character', 'Postal Code'], []
))
def test_normalizer(self):
char_result = ParseResult('Character', None, 25)
pc_result = ParseResult('Postal Code', None, 80)
results = CharacterWithoutBoolean.normalize([char_result, pc_result])
count = 0
for res in results:
if res.type == 'Character':
count += 1
self.assertEqual(res.confidence, 100)
elif res.type == "Postal Code":
count += 1
self.assertEqual(res.confidence, 80)
self.assertEqual(count, len(results))
|
StarcoderdataPython
|
222136
|
<gh_stars>0
from typing import Any, Dict, List, Optional, Tuple, Union, cast
# NB: we cannot use the standard Enum, because after "class Color(Enum): RED = 1"
# the value of Color.RED is like {'_value_': 1, '_name_': 'RED', '__objclass__': etc}
# and we need it to be 1, literally (that's what we'll get from the client)
class Enum:
pass
#:alias An array of 4 elements representing extent coordinates [minx, miny, maxx, maxy]
Extent = Tuple[float, float, float, float]
#:alias Point coordinates [x, y]
Point = Tuple[float, float]
#:alias Size [width, height]
Size = Tuple[float, float]
#:alias A value with a unit
Measurement = Tuple[float, str]
#:alias An XML generator tag
Tag = tuple
class Axis(Enum):
"""Axis orientation."""
xy = 'xy'
yx = 'yx'
#:alias Verbatim literal type
Literal = str
#:alias Valid readable file path on the server
FilePath = str
#:alias Valid readable directory path on the server
DirPath = str
#:alias String like "1w 2d 3h 4m 5s" or a number of seconds
Duration = str
#:alias CSS color name
Color = str
#:alias Regular expression, as used in Python
Regex = str
#:alias String with {attribute} placeholders
FormatStr = str
#:alias CRS code like "EPSG:3857
Crs = str
#:alias ISO date like "2019-01-30"
Date = str
#:alias ISO date/time like "2019-01-30 01:02:03"
DateTime = str
#:alias Http or https URL
Url = str
# dummy classes to support extension typing
class ext:
class action:
class Config:
pass
class Props:
pass
class auth:
class method:
class Config:
pass
class provider:
class Config:
pass
class template:
class Config:
pass
class Props:
pass
class db:
class provider:
class Config:
pass
class layer:
class Config:
pass
class Props:
pass
class search:
class provider:
class Config:
pass
class storage:
class Config:
pass
class helper:
class Config:
pass
class ows:
class provider:
class Config:
pass
class service:
class Config:
pass
# basic data type
class Data:
"""Basic data object."""
def __init__(self, *args, **kwargs):
self._extend(args, kwargs)
def __repr__(self):
return repr(vars(self))
def __getattr__(self, item):
if item.startswith('_'):
# do not use None fallback for special props
raise AttributeError()
return None
def get(self, k, default=None):
return vars(self).get(k, default)
def _extend(self, args, kwargs):
d = {}
for a in args:
if isinstance(a, dict):
d.update(a)
elif isinstance(a, Data):
d.update(vars(a))
d.update(kwargs)
vars(self).update(d)
# configuration primitives
class Config(Data):
"""Configuration base type"""
uid: str = '' #: unique ID
class WithType(Config):
type: str #: object type
class AccessType(Enum):
allow = 'allow'
deny = 'deny'
class Access(Config):
"""Access rights definition for authorization roles"""
type: AccessType #: access type (deny or allow)
role: str #: a role to which this rule applies
class WithAccess(Config):
access: Optional[List[Access]] #: access rights
class WithTypeAndAccess(Config):
type: str #: object type
access: Optional[List[Access]] #: access rights
# attributes
class AttributeType(Enum):
bool = 'bool'
bytes = 'bytes'
date = 'date'
datetime = 'datetime'
float = 'float'
floatlist = 'floatlist'
geometry = 'geometry'
int = 'int'
intlist = 'intlist'
str = 'str'
strlist = 'strlist'
text = 'text'
time = 'time'
class GeometryType(Enum):
curve = 'CURVE'
geomcollection = 'GEOMCOLLECTION'
geometry = 'GEOMETRY'
linestring = 'LINESTRING'
multicurve = 'MULTICURVE'
multilinestring = 'MULTILINESTRING'
multipoint = 'MULTIPOINT'
multipolygon = 'MULTIPOLYGON'
multisurface = 'MULTISURFACE'
point = 'POINT'
polygon = 'POLYGON'
polyhedralsurface = 'POLYHEDRALSURFACE'
surface = 'SURFACE'
class Attribute(Data):
name: str
title: str = ''
type: AttributeType = 'str'
value: Optional[Any]
editable: bool = True
# request params and responses
class Params(Data):
projectUid: Optional[str] #: project uid
localeUid: Optional[str] #: locale for this request
class NoParams(Data):
pass
class ResponseError(Data):
status: int
info: str
class Response(Data):
error: Optional[ResponseError]
class HttpResponse(Response):
mime: str
content: str
status: int
class FileResponse(Response):
mime: str
path: str
status: int
attachment_name: str
# props baseclass
class Props(Data):
"""Properties base type"""
pass
class Bounds(Data):
crs: 'Crs'
extent: 'Extent'
class CorsOptions(Data):
allow_credentials: bool
allow_headers: Optional[List[str]]
allow_origin: str
class DocumentRoot(Data):
allow_mime: Optional[List[str]]
deny_mime: Optional[List[str]]
dir: 'DirPath'
class FeatureProps(Data):
attributes: Optional[List[Attribute]]
elements: Optional[dict]
layerUid: Optional[str]
shape: Optional['ShapeProps']
style: Optional['StyleProps']
uid: Optional[str]
class IBaseRequest:
data: Optional[bytes]
environ: dict
input_struct_type: int
is_secure: bool
method: str
output_struct_type: int
params: dict
root: 'IRootObject'
site: 'IWebSite'
text: Optional[str]
def cookie(self, key: str, default: str = None) -> str: pass
def env(self, key: str, default: str = None) -> str: pass
def error_response(self, err) -> 'IResponse': pass
def file_response(self, path: str, mimetype: str, status: int = 200, attachment_name: str = None) -> 'IResponse': pass
def has_param(self, key: str) -> bool: pass
def header(self, key: str, default: str = None) -> str: pass
def init(self): pass
def param(self, key: str, default: str = None) -> str: pass
def redirect_response(self, location, status=302): pass
def response(self, content: str, mimetype: str, status: int = 200) -> 'IResponse': pass
def struct_response(self, data: 'Response', status: int = 200) -> 'IResponse': pass
def url_for(self, url: 'Url') -> 'Url': pass
class IFeature:
attr_dict: dict
attributes: List[Attribute]
category: str
data_model: Optional['IModel']
elements: dict
full_uid: str
layer: Optional['ILayer']
props: 'FeatureProps'
props_for_render: 'FeatureProps'
shape: Optional['IShape']
style: Optional['IStyle']
template_context: dict
templates: Optional[List['ITemplate']]
uid: str
def apply_data_model(self, model: 'IModel' = None) -> 'IFeature': pass
def apply_templates(self, templates: List['ITemplate'] = None, extra_context: dict = None, keys: List[str] = None) -> 'IFeature': pass
def attr(self, name: str): pass
def to_geojson(self) -> dict: pass
def to_svg(self, rv: 'MapRenderView', style: 'IStyle' = None) -> str: pass
def to_svg_tags(self, rv: 'MapRenderView', style: 'IStyle' = None) -> List['Tag']: pass
def transform_to(self, crs) -> 'IFeature': pass
class IObject:
access: 'Access'
children: List['IObject']
config: Config
parent: 'IObject'
props: Props
root: 'IRootObject'
uid: str
def append_child(self, obj: 'IObject') -> 'IObject': pass
def create_child(self, klass, cfg) -> 'IObject': pass
def get_children(self, klass) -> List['IObject']: pass
def get_closest(self, klass) -> 'IObject': pass
def initialize(self, cfg): pass
def is_a(self, klass): pass
def post_configure(self): pass
def post_initialize(self): pass
def props_for(self, user) -> Optional[dict]: pass
def set_uid(self, uid): pass
def var(self, key, default=None, parent=False): pass
class IResponse:
def add_header(self, key, value): pass
def delete_cookie(self, key, **kwargs): pass
def set_cookie(self, key, **kwargs): pass
class IRole:
def can_use(self, obj, parent=None): pass
class ISession:
changed: bool
data: dict
method: 'IAuthMethod'
type: str
uid: str
user: 'IUser'
def get(self, key, default=None): pass
def set(self, key, val): pass
class IShape:
area: float
bounds: 'Bounds'
centroid: 'IShape'
crs: str
ewkb: bytes
ewkb_hex: str
ewkt: str
extent: 'Extent'
props: 'ShapeProps'
srid: int
type: 'GeometryType'
wkb: bytes
wkb_hex: str
wkt: str
x: float
y: float
def intersects(self, shape: 'IShape') -> bool: pass
def to_multi(self) -> 'IShape': pass
def to_type(self, new_type: 'GeometryType') -> 'IShape': pass
def tolerance_polygon(self, tolerance, resolution=None) -> 'IShape': pass
def transformed_to(self, to_crs, **kwargs) -> 'IShape': pass
class IStyle:
name: str
props: 'StyleProps'
text: str
type: 'StyleType'
values: 'StyleValues'
class IUser:
attributes: dict
display_name: str
fid: str
is_guest: bool
props: 'UserProps'
provider: 'IAuthProvider'
roles: List[str]
uid: str
def attribute(self, key: str, default: str = '') -> str: pass
def can_use(self, obj, parent=None) -> bool: pass
def has_role(self, role: str) -> bool: pass
def init_from_data(self, provider, uid, roles, attributes) -> 'IUser': pass
def init_from_source(self, provider, uid, roles=None, attributes=None) -> 'IUser': pass
class LayerLegend(Data):
enabled: bool
path: str
template: 'ITemplate'
url: str
class MapRenderInput(Data):
background_color: int
items: List['MapRenderInputItem']
view: 'MapRenderView'
class MapRenderInputItem(Data):
dpi: int
features: List['IFeature']
fragment: 'SvgFragment'
layer: 'ILayer'
opacity: float
print_as_vector: bool
style: 'IStyle'
sub_layers: List[str]
type: str
class MapRenderInputItemType(Enum):
features = 'features'
fragment = 'fragment'
image = 'image'
image_layer = 'image_layer'
svg_layer = 'svg_layer'
class MapRenderOutput(Data):
base_dir: str
items: List['MapRenderOutputItem']
view: 'MapRenderView'
class MapRenderOutputItem(Data):
path: str
tags: List['Tag']
type: str
class MapRenderView(Data):
bounds: 'Bounds'
center: 'Point'
dpi: int
rotation: int
scale: int
size_mm: 'Size'
size_px: 'Size'
class MetaContact(Data):
address: str
area: str
city: str
country: str
email: str
fax: str
organization: str
person: str
phone: str
position: str
role: str
url: str
zip: str
class MetaData(Data):
abstract: str
accessConstraints: str
attribution: str
authorityIdentifier: str
authorityName: str
authorityUrl: 'Url'
catalogCitationUid: str
catalogUid: str
contact: 'MetaContact'
dateBegin: 'DateTime'
dateCreated: 'DateTime'
dateEnd: 'DateTime'
dateUpdated: 'DateTime'
fees: str
image: 'Url'
insipreKeywords: List['MetaInspireMandatoryKeyword']
insipreMandatoryKeyword: 'MetaInspireMandatoryKeyword'
inspireDegreeOfConformity: 'MetaInspireDegreeOfConformity'
inspireResourceType: 'MetaInspireResourceType'
inspireSpatialDataServiceType: 'MetaInspireSpatialDataServiceType'
inspireSpatialScope: 'MetaInspireSpatialScope'
inspireTheme: 'MetaInspireTheme'
inspireThemeName: str
inspireThemeNameEn: str
isoMaintenanceFrequencyCode: 'MetaIsoMaintenanceFrequencyCode'
isoQualityConformance: 'MetaIsoQualityConformance'
isoQualityLineage: 'MetaIsoQualityLineage'
isoRestrictionCode: str
isoScope: 'MetaIsoScope'
isoScopeName: str
isoSpatialRepresentationType: 'MetaIsoSpatialRepresentationType'
isoTopicCategory: 'MetaIsoTopicCategory'
keywords: List[str]
language: str
license: str
links: List['MetaLink']
name: str
serviceUrl: 'Url'
title: str
url: 'Url'
urlFormat: str
urlType: str
class MetaInspireDegreeOfConformity(Enum):
conformant = 'conformant'
notConformant = 'notConformant'
notEvaluated = 'notEvaluated'
class MetaInspireMandatoryKeyword(Enum):
chainDefinitionService = 'chainDefinitionService'
comEncodingService = 'comEncodingService'
comGeographicCompressionService = 'comGeographicCompressionService'
comGeographicFormatConversionService = 'comGeographicFormatConversionService'
comMessagingService = 'comMessagingService'
comRemoteFileAndExecutableManagement = 'comRemoteFileAndExecutableManagement'
comService = 'comService'
comTransferService = 'comTransferService'
humanCatalogueViewer = 'humanCatalogueViewer'
humanChainDefinitionEditor = 'humanChainDefinitionEditor'
humanFeatureGeneralizationEditor = 'humanFeatureGeneralizationEditor'
humanGeographicDataStructureViewer = 'humanGeographicDataStructureViewer'
humanGeographicFeatureEditor = 'humanGeographicFeatureEditor'
humanGeographicSpreadsheetViewer = 'humanGeographicSpreadsheetViewer'
humanGeographicSymbolEditor = 'humanGeographicSymbolEditor'
humanGeographicViewer = 'humanGeographicViewer'
humanInteractionService = 'humanInteractionService'
humanServiceEditor = 'humanServiceEditor'
humanWorkflowEnactmentManager = 'humanWorkflowEnactmentManager'
infoCatalogueService = 'infoCatalogueService'
infoCoverageAccessService = 'infoCoverageAccessService'
infoFeatureAccessService = 'infoFeatureAccessService'
infoFeatureTypeService = 'infoFeatureTypeService'
infoGazetteerService = 'infoGazetteerService'
infoManagementService = 'infoManagementService'
infoMapAccessService = 'infoMapAccessService'
infoOrderHandlingService = 'infoOrderHandlingService'
infoProductAccessService = 'infoProductAccessService'
infoRegistryService = 'infoRegistryService'
infoSensorDescriptionService = 'infoSensorDescriptionService'
infoStandingOrderService = 'infoStandingOrderService'
metadataGeographicAnnotationService = 'metadataGeographicAnnotationService'
metadataProcessingService = 'metadataProcessingService'
metadataStatisticalCalculationService = 'metadataStatisticalCalculationService'
spatialCoordinateConversionService = 'spatialCoordinateConversionService'
spatialCoordinateTransformationService = 'spatialCoordinateTransformationService'
spatialCoverageVectorConversionService = 'spatialCoverageVectorConversionService'
spatialDimensionMeasurementService = 'spatialDimensionMeasurementService'
spatialFeatureGeneralizationService = 'spatialFeatureGeneralizationService'
spatialFeatureManipulationService = 'spatialFeatureManipulationService'
spatialFeatureMatchingService = 'spatialFeatureMatchingService'
spatialImageCoordinateConversionService = 'spatialImageCoordinateConversionService'
spatialImageGeometryModelConversionService = 'spatialImageGeometryModelConversionService'
spatialOrthorectificationService = 'spatialOrthorectificationService'
spatialPositioningService = 'spatialPositioningService'
spatialProcessingService = 'spatialProcessingService'
spatialProximityAnalysisService = 'spatialProximityAnalysisService'
spatialRectificationService = 'spatialRectificationService'
spatialRouteDeterminationService = 'spatialRouteDeterminationService'
spatialSamplingService = 'spatialSamplingService'
spatialSensorGeometryModelAdjustmentService = 'spatialSensorGeometryModelAdjustmentService'
spatialSubsettingService = 'spatialSubsettingService'
spatialTilingChangeService = 'spatialTilingChangeService'
subscriptionService = 'subscriptionService'
taskManagementService = 'taskManagementService'
temporalProcessingService = 'temporalProcessingService'
temporalProximityAnalysisService = 'temporalProximityAnalysisService'
temporalReferenceSystemTransformationService = 'temporalReferenceSystemTransformationService'
temporalSamplingService = 'temporalSamplingService'
temporalSubsettingService = 'temporalSubsettingService'
thematicChangeDetectionService = 'thematicChangeDetectionService'
thematicClassificationService = 'thematicClassificationService'
thematicFeatureGeneralizationService = 'thematicFeatureGeneralizationService'
thematicGeocodingService = 'thematicGeocodingService'
thematicGeographicInformationExtractionService = 'thematicGeographicInformationExtractionService'
thematicGeoparsingService = 'thematicGeoparsingService'
thematicGoparameterCalculationService = 'thematicGoparameterCalculationService'
thematicImageManipulationService = 'thematicImageManipulationService'
thematicImageProcessingService = 'thematicImageProcessingService'
thematicImageSynthesisService = 'thematicImageSynthesisService'
thematicImageUnderstandingService = 'thematicImageUnderstandingService'
thematicMultibandImageManipulationService = 'thematicMultibandImageManipulationService'
thematicObjectDetectionService = 'thematicObjectDetectionService'
thematicProcessingService = 'thematicProcessingService'
thematicReducedResolutionGenerationService = 'thematicReducedResolutionGenerationService'
thematicSpatialCountingService = 'thematicSpatialCountingService'
thematicSubsettingService = 'thematicSubsettingService'
workflowEnactmentService = 'workflowEnactmentService'
class MetaInspireResourceType(Enum):
dataset = 'dataset'
series = 'series'
service = 'service'
class MetaInspireSpatialDataServiceType(Enum):
discovery = 'discovery'
download = 'download'
invoke = 'invoke'
other = 'other'
transformation = 'transformation'
view = 'view'
class MetaInspireSpatialScope(Enum):
european = 'european'
global_ = 'global'
local = 'local'
national = 'national'
regional = 'regional'
class MetaInspireTheme(Enum):
ac = 'ac'
ad = 'ad'
af = 'af'
am = 'am'
au = 'au'
br = 'br'
bu = 'bu'
cp = 'cp'
ef = 'ef'
el = 'el'
er = 'er'
ge = 'ge'
gg = 'gg'
gn = 'gn'
hb = 'hb'
hh = 'hh'
hy = 'hy'
lc = 'lc'
lu = 'lu'
mf = 'mf'
mr = 'mr'
nz = 'nz'
of = 'of'
oi = 'oi'
pd = 'pd'
pf = 'pf'
ps = 'ps'
rs = 'rs'
sd = 'sd'
so = 'so'
sr = 'sr'
su = 'su'
tn = 'tn'
us = 'us'
class MetaIsoMaintenanceFrequencyCode(Enum):
annually = 'annually'
asNeeded = 'asNeeded'
biannually = 'biannually'
continual = 'continual'
daily = 'daily'
fortnightly = 'fortnightly'
irregular = 'irregular'
monthly = 'monthly'
notPlanned = 'notPlanned'
quarterly = 'quarterly'
unknown = 'unknown'
weekly = 'weekly'
class MetaIsoOnLineFunction(Enum):
download = 'download'
information = 'information'
offlineAccess = 'offlineAccess'
order = 'order'
search = 'search'
class MetaIsoQualityConformance(Data):
explanation: str
qualityPass: bool
specificationDate: str
specificationTitle: str
class MetaIsoQualityLineage(Data):
source: str
sourceScale: int
statement: str
class MetaIsoRestrictionCode(Enum):
copyright = 'copyright'
intellectualPropertyRights = 'intellectualPropertyRights'
license = 'license'
otherRestrictions = 'otherRestrictions'
patent = 'patent'
patentPending = 'patentPending'
restricted = 'restricted'
trademark = 'trademark'
class MetaIsoScope(Enum):
attribute = 'attribute'
attributeType = 'attributeType'
collectionHardware = 'collectionHardware'
collectionSession = 'collectionSession'
dataset = 'dataset'
dimensionGroup = 'dimensionGroup'
feature = 'feature'
featureType = 'featureType'
fieldSession = 'fieldSession'
initiative = 'initiative'
model = 'model'
nonGeographicDataset = 'nonGeographicDataset'
otherAggregate = 'otherAggregate'
platformSeries = 'platformSeries'
productionSeries = 'productionSeries'
propertyType = 'propertyType'
sensor = 'sensor'
sensorSeries = 'sensorSeries'
series = 'series'
service = 'service'
software = 'software'
stereomate = 'stereomate'
tile = 'tile'
transferAggregate = 'transferAggregate'
class MetaIsoSpatialRepresentationType(Enum):
grid = 'grid'
stereoModel = 'stereoModel'
textTable = 'textTable'
tin = 'tin'
vector = 'vector'
video = 'video'
class MetaIsoTopicCategory(Enum):
biota = 'biota'
boundaries = 'boundaries'
climatologyMeteorologyAtmosphere = 'climatologyMeteorologyAtmosphere'
economy = 'economy'
elevation = 'elevation'
environment = 'environment'
farming = 'farming'
geoscientificInformation = 'geoscientificInformation'
health = 'health'
imageryBaseMapsEarthCover = 'imageryBaseMapsEarthCover'
inlandWaters = 'inlandWaters'
intelligenceMilitary = 'intelligenceMilitary'
location = 'location'
oceans = 'oceans'
planningCadastre = 'planningCadastre'
society = 'society'
structure = 'structure'
transportation = 'transportation'
utilitiesCommunication = 'utilitiesCommunication'
class MetaLink(Data):
formatName: str
formatValue: str
function: 'MetaIsoOnLineFunction'
scheme: str
url: 'Url'
class ModelEditor(Data):
accept: Optional[str]
items: Optional[Any]
max: Optional[float]
min: Optional[float]
multiple: Optional[bool]
pattern: Optional[str]
type: str
class ModelRule(Data):
editable: bool
editor: Optional['ModelEditor']
expression: str
format: 'FormatStr'
name: str
source: str
title: str
type: 'AttributeType'
value: Optional[str]
class OwsOperation:
formats: List[str]
get_url: 'Url'
name: str
parameters: dict
post_url: 'Url'
class Projection(Data):
epsg: str
is_geographic: bool
proj4text: str
srid: int
units: str
uri: str
url: str
urn: str
urnx: str
class RewriteRule(Data):
match: 'Regex'
options: Optional[dict]
target: str
class SearchArgs(Data):
axis: str
bounds: 'Bounds'
filter: Optional['SearchFilter']
keyword: Optional[str]
layers: List['ILayer']
limit: int
params: dict
project: 'IProject'
resolution: float
shapes: List['IShape']
source_layer_names: List[str]
tolerance: 'Measurement'
class SearchFilter(Data):
name: str
operator: str
shape: 'IShape'
sub: List['SearchFilter']
value: str
class SearchSpatialContext(Enum):
map = 'map'
view = 'view'
class SelectArgs(Data):
extra_where: Optional[list]
keyword: Optional[str]
limit: Optional[int]
map_tolerance: Optional[float]
shape: Optional['IShape']
sort: Optional[str]
table: 'SqlTable'
uids: Optional[List[str]]
class ShapeProps(Props):
crs: str
geometry: dict
class SourceLayer(Data):
a_level: int
a_path: str
a_uid: str
data_source: dict
is_expanded: bool
is_group: bool
is_image: bool
is_queryable: bool
is_visible: bool
layers: List['SourceLayer']
legend: str
meta: 'MetaData'
name: str
opacity: int
resource_urls: dict
scale_range: List[float]
styles: List['SourceStyle']
supported_bounds: List['Bounds']
supported_crs: List['Crs']
title: str
class SourceStyle(Data):
is_default: bool
legend: 'Url'
meta: 'MetaData'
name: str
class SpecValidator:
def method_spec(self, name): pass
def read_value(self, val, type_name, path='', strict=True): pass
class SqlTable(Data):
geometry_column: str
geometry_crs: 'Crs'
geometry_type: 'GeometryType'
key_column: str
name: str
search_column: str
class SqlTableColumn(Data):
crs: 'Crs'
geom_type: 'GeometryType'
is_geometry: bool
is_key: bool
name: str
native_type: str
type: 'AttributeType'
class StorageDirectory(Data):
category: str
entries: List['StorageEntry']
readable: bool
writable: bool
class StorageElement(Data):
data: dict
entry: 'StorageEntry'
class StorageEntry(Data):
category: str
name: str
class StorageRecord(Data):
category: str
created: int
data: str
name: str
updated: int
user_fid: str
class StyleGeometryOption(Enum):
all = 'all'
none = 'none'
class StyleLabelAlign(Enum):
center = 'center'
left = 'left'
right = 'right'
class StyleLabelFontStyle(Enum):
italic = 'italic'
normal = 'normal'
class StyleLabelFontWeight(Enum):
bold = 'bold'
normal = 'normal'
class StyleLabelOption(Enum):
all = 'all'
none = 'none'
class StyleLabelPlacement(Enum):
end = 'end'
middle = 'middle'
start = 'start'
class StyleMarker(Enum):
arrow = 'arrow'
circle = 'circle'
cross = 'cross'
square = 'square'
class StyleProps(Props):
name: Optional[str]
text: Optional[str]
type: 'StyleType'
values: Optional['StyleValues']
class StyleStrokeLineCap(Enum):
butt = 'butt'
round = 'round'
square = 'square'
class StyleStrokeLineJoin(Enum):
bevel = 'bevel'
miter = 'miter'
round = 'round'
class StyleType(Enum):
css = 'css'
cssSelector = 'cssSelector'
class StyleValues(Data):
fill: Optional['Color']
icon: Optional[str]
label_align: Optional['StyleLabelAlign']
label_background: Optional['Color']
label_fill: Optional['Color']
label_font_family: Optional[str]
label_font_size: Optional[int]
label_font_style: Optional['StyleLabelFontStyle']
label_font_weight: Optional['StyleLabelFontWeight']
label_line_height: Optional[int]
label_max_scale: Optional[int]
label_min_scale: Optional[int]
label_offset_x: Optional[int]
label_offset_y: Optional[int]
label_padding: Optional[List[int]]
label_placement: Optional['StyleLabelPlacement']
label_stroke: Optional['Color']
label_stroke_dasharray: Optional[List[int]]
label_stroke_dashoffset: Optional[int]
label_stroke_linecap: Optional['StyleStrokeLineCap']
label_stroke_linejoin: Optional['StyleStrokeLineJoin']
label_stroke_miterlimit: Optional[int]
label_stroke_width: Optional[int]
marker: Optional['StyleMarker']
marker_fill: Optional['Color']
marker_size: Optional[int]
marker_stroke: Optional['Color']
marker_stroke_dasharray: Optional[List[int]]
marker_stroke_dashoffset: Optional[int]
marker_stroke_linecap: Optional['StyleStrokeLineCap']
marker_stroke_linejoin: Optional['StyleStrokeLineJoin']
marker_stroke_miterlimit: Optional[int]
marker_stroke_width: Optional[int]
offset_x: Optional[int]
offset_y: Optional[int]
point_size: Optional[int]
stroke: Optional['Color']
stroke_dasharray: Optional[List[int]]
stroke_dashoffset: Optional[int]
stroke_linecap: Optional['StyleStrokeLineCap']
stroke_linejoin: Optional['StyleStrokeLineJoin']
stroke_miterlimit: Optional[int]
stroke_width: Optional[int]
with_geometry: Optional['StyleGeometryOption']
with_label: Optional['StyleLabelOption']
class SvgFragment(Data):
points: List['Point']
styles: Optional[List['IStyle']]
tags: List['Tag']
class TemplateLegendMode(Enum):
html = 'html'
image = 'image'
class TemplateOutput(Data):
content: str
mime: str
path: str
class TemplateQualityLevel(Data):
dpi: int
name: str
class UserProps(Data):
displayName: str
class IApi(IObject):
actions: dict
class IApplication(IObject):
api: 'IApi'
auth: 'IAuthManager'
client: Optional['IClient']
meta: 'MetaData'
monitor: 'IMonitor'
qgis_version: str
version: str
web_sites: List['IWebSite']
def developer_option(self, name): pass
def find_action(self, action_type, project_uid=None): pass
def require_helper(self, key): pass
class IAuthManager(IObject):
guest_user: 'IUser'
methods: List['IAuthMethod']
providers: List['IAuthProvider']
sys: 'IAuthProvider'
def authenticate(self, method: 'IAuthMethod', login, password, **kw) -> Optional['IUser']: pass
def close_session(self, sess: 'ISession', req: 'IRequest', res: 'IResponse') -> 'ISession': pass
def create_stored_session(self, type: str, method: 'IAuthMethod', user: 'IUser') -> 'ISession': pass
def delete_stored_sessions(self): pass
def destroy_stored_session(self, sess: 'ISession'): pass
def find_stored_session(self, uid): pass
def get_method(self, type: str) -> Optional['IAuthMethod']: pass
def get_provider(self, uid: str) -> Optional['IAuthProvider']: pass
def get_role(self, name: str) -> 'IRole': pass
def get_user(self, user_fid: str) -> Optional['IUser']: pass
def login(self, method: 'IAuthMethod', login: str, password: str, req: 'IRequest') -> 'ISession': pass
def logout(self, sess: 'ISession', req: 'IRequest') -> 'ISession': pass
def new_session(self, **kwargs): pass
def open_session(self, req: 'IRequest') -> 'ISession': pass
def save_stored_session(self, sess: 'ISession'): pass
def serialize_user(self, user: 'IUser') -> str: pass
def stored_session_records(self) -> List[dict]: pass
def unserialize_user(self, s: str) -> 'IUser': pass
class IAuthMethod(IObject):
type: str
def close_session(self, auth: 'IAuthManager', sess: 'ISession', req: 'IRequest', res: 'IResponse'): pass
def login(self, auth: 'IAuthManager', login: str, password: str, req: 'IRequest') -> Optional['ISession']: pass
def logout(self, auth: 'IAuthManager', sess: 'ISession', req: 'IRequest') -> 'ISession': pass
def open_session(self, auth: 'IAuthManager', req: 'IRequest') -> Optional['ISession']: pass
class IAuthProvider(IObject):
allowed_methods: List[str]
def authenticate(self, method: 'IAuthMethod', login: str, password: str, **kwargs) -> Optional['IUser']: pass
def get_user(self, user_uid: str) -> Optional['IUser']: pass
def user_from_dict(self, d: dict) -> 'IUser': pass
def user_to_dict(self, u: 'IUser') -> dict: pass
class IClient(IObject):
pass
class IDbProvider(IObject):
pass
class ILayer(IObject):
cache_uid: str
can_render_box: bool
can_render_svg: bool
can_render_xyz: bool
crs: str
data_model: Optional['IModel']
default_search_provider: Optional['ISearchProvider']
description: str
description_template: 'ITemplate'
display: str
edit_data_model: Optional['IModel']
edit_options: Data
edit_style: Optional['IStyle']
extent: Optional['Extent']
geometry_type: Optional['GeometryType']
grid_uid: str
has_cache: bool
has_legend: bool
has_search: bool
image_format: str
is_editable: bool
is_group: bool
is_public: bool
layers: List['ILayer']
legend: 'LayerLegend'
map: 'IMap'
meta: 'MetaData'
opacity: float
own_bounds: Optional['Bounds']
ows_feature_name: str
ows_name: str
resolutions: List[float]
style: 'IStyle'
supports_wfs: bool
supports_wms: bool
templates: List['ITemplate']
title: str
def configure_legend(self) -> 'LayerLegend': pass
def configure_metadata(self, provider_meta=None) -> 'MetaData': pass
def configure_search(self): pass
def edit_access(self, user): pass
def edit_operation(self, operation: str, feature_props: List['FeatureProps']) -> List['IFeature']: pass
def get_features(self, bounds: 'Bounds', limit: int = 0) -> List['IFeature']: pass
def mapproxy_config(self, mc): pass
def ows_enabled(self, service: 'IOwsService') -> bool: pass
def render_box(self, rv: 'MapRenderView', extra_params=None): pass
def render_html_legend(self, context=None) -> str: pass
def render_legend(self, context=None) -> Optional[str]: pass
def render_legend_image(self, context=None) -> bytes: pass
def render_svg(self, rv: 'MapRenderView', style: 'IStyle' = None) -> str: pass
def render_svg_tags(self, rv: 'MapRenderView', style: 'IStyle' = None) -> List['Tag']: pass
def render_xyz(self, x, y, z): pass
class IMap(IObject):
bounds: 'Bounds'
center: 'Point'
coordinate_precision: float
crs: 'Crs'
extent: 'Extent'
init_resolution: float
layers: List['ILayer']
resolutions: List[float]
class IModel(IObject):
attribute_names: List[str]
geometry_crs: 'Crs'
geometry_type: 'GeometryType'
rules: List['ModelRule']
def apply(self, atts: List[Attribute]) -> List[Attribute]: pass
def apply_to_dict(self, d: dict) -> List[Attribute]: pass
class IMonitor(IObject):
path_stats: dict
watch_dirs: dict
watch_files: dict
def add_directory(self, path, pattern): pass
def add_path(self, path): pass
def start(self): pass
class IOwsProvider(IObject):
invert_axis_crs: List[str]
meta: 'MetaData'
operations: List['OwsOperation']
source_layers: List['SourceLayer']
supported_crs: List['Crs']
type: str
url: 'Url'
version: str
def find_features(self, args: 'SearchArgs') -> List['IFeature']: pass
def operation(self, name: str) -> 'OwsOperation': pass
class IOwsService(IObject):
meta: 'MetaData'
type: str
version: str
def error_response(self, err: 'Exception') -> 'HttpResponse': pass
def handle(self, req: 'IRequest') -> 'HttpResponse': pass
class IPrinter(IObject):
templates: List['ITemplate']
class IProject(IObject):
api: Optional['IApi']
assets_root: Optional['DocumentRoot']
client: Optional['IClient']
locale_uids: List[str]
map: Optional['IMap']
meta: 'MetaData'
overview_map: Optional['IMap']
printer: Optional['IPrinter']
templates: List['ITemplate']
title: str
class IRequest(IBaseRequest):
auth: 'IAuthManager'
session: 'ISession'
user: 'IUser'
def acquire(self, klass: str, uid: str) -> Optional['IObject']: pass
def auth_close(self, res: 'IResponse'): pass
def auth_open(self): pass
def login(self, login: str, password: str): pass
def logout(self): pass
def require(self, klass: str, uid: str) -> 'IObject': pass
def require_layer(self, uid: str) -> 'ILayer': pass
def require_project(self, uid: str) -> 'IProject': pass
class IRootObject(IObject):
all_objects: list
all_types: dict
application: 'IApplication'
shared_objects: dict
validator: 'SpecValidator'
def create(self, klass, cfg=None): pass
def create_object(self, klass, cfg, parent=None): pass
def create_shared_object(self, klass, uid, cfg): pass
def create_unbound_object(self, klass, cfg): pass
def find(self, klass, uid=None) -> 'IObject': pass
def find_all(self, klass=None) -> List['IObject']: pass
def find_by_uid(self, uid) -> 'IObject': pass
def find_first(self, klass) -> 'IObject': pass
class ISearchProvider(IObject):
active: bool
capabilties: int
data_model: Optional['IModel']
spatial_context: 'SearchSpatialContext'
templates: List['ITemplate']
title: str
tolerance: 'Measurement'
with_geometry: bool
with_keyword: bool
def can_run(self, args: 'SearchArgs'): pass
def context_shape(self, args: 'SearchArgs') -> 'IShape': pass
def run(self, layer: 'ILayer', args: 'SearchArgs') -> List['IFeature']: pass
class ITemplate(IObject):
category: str
data_model: Optional['IModel']
key: str
legend_layer_uids: List[str]
legend_mode: Optional['TemplateLegendMode']
map_size: 'Size'
mime_types: List[str]
page_size: 'Size'
path: str
subject: str
text: str
title: str
def add_headers_and_footers(self, context: dict, in_path: str, out_path: str, format: str) -> str: pass
def dpi_for_quality(self, quality): pass
def prepare_context(self, context: dict) -> dict: pass
def render(self, context: dict, mro: 'MapRenderOutput' = None, out_path: str = None, legends: dict = None, format: str = None) -> 'TemplateOutput': pass
class IWebSite(IObject):
assets_root: 'DocumentRoot'
cors: 'CorsOptions'
error_page: Optional['ITemplate']
host: str
reversed_host: str
reversed_rewrite_rules: List['RewriteRule']
rewrite_rules: List['RewriteRule']
ssl: bool
static_root: 'DocumentRoot'
def url_for(self, req, url): pass
class ISqlProvider(IDbProvider):
def describe(self, table: 'SqlTable') -> Dict[str, 'SqlTableColumn']: pass
def edit_operation(self, operation: str, table: 'SqlTable', features: List['IFeature']) -> List['IFeature']: pass
def select(self, args: 'SelectArgs', extra_connect_params: dict = None) -> List['IFeature']: pass
class IVectorLayer(ILayer):
def connect_feature(self, feature: 'IFeature') -> 'IFeature': pass
|
StarcoderdataPython
|
6672548
|
import json
import os
from django.utils.translation import gettext_lazy as _
from . import BASE_DIR
# Secret settings
secret = json.loads(open(os.path.join(BASE_DIR, 'secret.json')).read())
SECRET_KEY = secret['SECRET_KEY']
ALLOWED_HOSTS = secret['ALLOWED_HOSTS']
DATABASES = secret['DATABASES']
DEBUG = secret['DEBUG']
CELERY_BROKER_URL = secret['CELERY_BROKER_URL']
EMAIL_HOST = secret['EMAIL_HOST']
EMAIL_HOST_USER = secret['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = secret['EMAIL_HOST_PASSWORD']
EMAIL_PORT = secret['EMAIL_PORT']
EMAIL_USE_TLS = secret['EMAIL_USE_TLS']
EMAIL_NO_REPLY = secret['EMAIL_NO_REPLY']
EMAIL_CUSTOMER_SERVICE = secret['EMAIL_CUSTOMER_SERVICE']
LINE_NOTIFY_ACCESS_TOKEN = secret['LINE_NOTIFY_ACCESS_TOKEN']
LINE_CHANNEL_ACCESS_TOKEN = secret['LINE_CHANNEL_ACCESS_TOKEN']
LINE_CHANNEL_SECRET = secret['LINE_CHANNEL_SECRET']
GOOGLE_MAPS_API_KEY = secret['GOOGLE_MAPS_API_KEY']
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'django.contrib.humanize',
]
INSTALLED_APPS += [
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.facebook',
'allauth.socialaccount.providers.google',
'allauth.socialaccount.providers.kakao',
'allauth.socialaccount.providers.naver',
'member.socialaccount.providers.line',
'easy_thumbnails',
]
INSTALLED_APPS += [
'member',
'magazine',
'board',
'help',
'booking',
'event',
'chatbot',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'conf.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'conf', 'templates'),
os.path.join(BASE_DIR, 'allauth', 'templates'),
os.path.join(BASE_DIR, 'golf', 'templates'),
os.path.join(BASE_DIR, 'magazine', 'templates'),
os.path.join(BASE_DIR, 'help', 'templates'),
os.path.join(BASE_DIR, 'board', 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'conf.wsgi.application'
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', },
{'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', },
{'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', },
{'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', },
]
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
)
SITE_ID = 1
# django.contrib.auth settings for allauth
PASSWORD_RESET_TIMEOUT_DAYS = 1 # default=3
LOGIN_URL = '/accounts/login/' # default=/accounts/login/
LOGOUT_URL = '/accounts/logout/' # default=/accounts/logout/
LOGIN_REDIRECT_URL = '/' # default=/accounts/profile/
# LOGOUT_REDIRECT_URL = '/'
# django-allauth
DEFAULT_FROM_EMAIL = secret['EMAIL_NO_REPLY']
ACCOUNT_ADAPTER = 'member.adapters.MyAccountAdapter'
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
ACCOUNT_LOGIN_ATTEMPTS_LIMIT = 5
ACCOUNT_LOGIN_ATTEMPTS_TIMEOUT = 300
ACCOUNT_FORMS = {
'add_email': 'member.forms.MemberAddEmailForm',
'change_password': '<PASSWORD>',
'set_password': '<PASSWORD>',
'reset_password': '<PASSWORD>',
}
ACCOUNT_LOGOUT_ON_PASSWORD_CHANGE = True # default=False
ACCOUNT_EMAIL_SUBJECT_PREFIX = _('[WITH THAI] ')
ACCOUNT_AUTHENTICATED_LOGIN_REDIRECTS = False
SOCIALACCOUNT_AUTO_SIGNUP = False
SOCIALACCOUNT_ADAPTER = 'member.adapters.MySocialAccountAdapter'
SOCIALACCOUNT_FORMS = {
'signup': 'member.forms.MemberSignupForm',
}
SOCIALACCOUNT_PROVIDERS = {
'facebook': {},
'google': {},
'kakao': {},
'naver': {},
'line': {
'SCOPE': [
'profile',
'openid',
'email',
],
},
}
|
StarcoderdataPython
|
6572012
|
from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='PyGB',
version='0.0.1',
author= '<NAME> & <NAME>',
author_email= '<EMAIL>',
description= 'A command-line application to prepare GenBank sequence submission',
long_description= long_description,
long_description_content_type='text/markdown',
# url='https://github.com/hhandika/',
packages=find_packages(),
include_package_data=True,
classifiers=[
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
],
python_requires='>=3.6',
install_requires=[
'Click',
],
entry_points='''
[console_scripts]
pygb=pgb.py_gb_preparator:main
''',
)
|
StarcoderdataPython
|
1698957
|
from torch.nn import functional as F
from torch import nn
import torch
class _Scorer(nn.Module):
def __init__(self, n_classes, soft=False, apply_softmax=True, skip_first_class=True, smooth=1e-7):
super(_Scorer, self).__init__()
self.register_buffer('eye', torch.eye(n_classes))
self.soft = soft
self.n_classes = n_classes
self.apply_softmax = apply_softmax
self.skip_first_class = skip_first_class
self.smooth = smooth
def one_hot(self, x):
# squeeze channels and convert to one hot, then move classes to second dimension
x = self.eye[x.long()].permute(0, 3, 1, 2)
if self.skip_first_class:
x = x[:, 1-self.n_classes:, :, :] # skip background (class 0)
return x
def transform_inputs(self, inputs: torch.Tensor, truth: torch.Tensor):
truth = self.one_hot(truth)
if self.apply_softmax:
inputs = F.softmax(inputs, dim=1)
if not self.soft:
inputs = torch.argmax(inputs, dim=1)
inputs = self.one_hot(inputs)
elif self.skip_first_class:
inputs = inputs[:, 1:, :, :] # skip background
return inputs, truth
class DiceScore(_Scorer):
r"""Sørensen–Dice Score
The Dice coefficient, or Dice-Sørensen coefficient, is a common metric for pixel segmentation that can also be
modified to act as a loss function:
.. math::
DSC(X, Y) = \frac{2 \left| X + Y \right|}{\left| X \right| + \left| Y \right|}
Inspired by https://github.com/kevinzakka/pytorch-goodies/blob/master/losses.py
"""
def __init__(self, n_classes, soft=False, apply_softmax=True, skip_first_class=True, smooth=1e-7):
super(DiceScore, self).__init__(n_classes, soft, apply_softmax, skip_first_class, smooth)
def forward(self, inputs: torch.Tensor, truth: torch.Tensor):
inputs, truth = self.transform_inputs(inputs, truth)
intersection = torch.sum(inputs * truth, dim=(0, 2, 3))
cardinality = torch.sum(inputs ** 2 + truth ** 2, dim=(0, 2, 3))
dice_coefficient = 2. * intersection / (cardinality + self.smooth)
return dice_coefficient.mean()
class TverskyIndex(_Scorer):
r"""Tversky Index
The Tversky Index (TI) is a asymmetric similarity measure that is a
generalisation of the dice coefficient and the Jaccard index.
.. math::
TI = \frac{TP}{TP + \alpha FN + \beta FP}
"""
def __init__(self, n_classes, alpha=0.5, beta=0.5, soft=False, apply_softmax=True, skip_first_class=True, smooth=1e-7):
super(TverskyIndex, self).__init__(n_classes, soft, apply_softmax, skip_first_class, smooth)
self.alpha = alpha
self.beta = beta
def forward(self, inputs: torch.Tensor, truth: torch.Tensor):
inputs, truth = self.transform_inputs(inputs, truth)
intersection = torch.sum(inputs * truth, dim=(0, 2, 3))
fps = torch.sum(inputs * (1 - truth), dim=(0, 2, 3))
fns = torch.sum((1 - inputs) * truth, dim=(0, 2, 3))
return (intersection / (intersection + (self.alpha * fps) + (self.beta * fns) + self.smooth)).mean()
class JaccardIndex(_Scorer):
r"""The Jaccard index, also known as the Jaccard similarity coefficient or Intersection Over Union
.. math::
J(A,B) = \frac{|A \cap B|}{|A \cup B|} = \frac{|A \cap B|}{|A| + |B| - |A \cap B|}.
"""
def __init__(self, n_classes, soft=False, apply_softmax=True, skip_first_class=True, smooth=1e-7):
super(JaccardIndex, self).__init__(n_classes, soft, apply_softmax, skip_first_class, smooth)
def forward(self, inputs: torch.Tensor, truth: torch.Tensor):
inputs, truth = self.transform_inputs(inputs, truth)
intersection = torch.sum(inputs * truth, dim=(0, 2, 3))
union = torch.sum(inputs + truth, dim=(0, 2, 3)) - intersection
iou = (intersection / (union + self.smooth))
return iou.mean()
class Metrics(nn.Module):
def __init__(self, buffer_size, num_classes, loss, device=None):
super(Metrics, self).__init__()
self.register_buffer("_losses", torch.zeros(buffer_size, dtype=torch.float32, device=device))
self.register_buffer("_scores_iou", torch.zeros(buffer_size, dtype=torch.float32, device=device))
self.register_buffer("_scores_dice", torch.zeros(buffer_size, dtype=torch.float32, device=device))
self.register_buffer("_scores_soft_dice", torch.zeros(buffer_size, dtype=torch.float32, device=device))
# self.register_buffer("_scores_hausdorff", torch.zeros(buffer_size, dtype=torch.double, device=device))
self._loss = loss
self._dice = DiceScore(num_classes)
self._soft_dice = DiceScore(num_classes, soft=True)
self._iou = JaccardIndex(num_classes)
# self._hausdorff = AveragedHausdorffLoss()
pass
def collect_metrics_only(self, batch_index, net_predictions, segmentation_classes):
self._scores_iou[batch_index] = self._iou(net_predictions, segmentation_classes).detach()
self._scores_dice[batch_index] = self._dice(net_predictions, segmentation_classes).detach()
self._scores_soft_dice[batch_index] = self._soft_dice(net_predictions, segmentation_classes).detach()
# self._scores_hausdorff[batch_index] = self._hausdorff(net_predictions, segmentation_classes).detach()
def collect_and_get_loss(self, batch_index, net_predictions, segmentation_classes):
self.collect_metrics_only(batch_index, net_predictions, segmentation_classes)
loss_value = self._loss(net_predictions, segmentation_classes)
self._losses[batch_index] = loss_value.detach()
return loss_value
def collect(self, batch_index, net_predictions, segmentation_classes):
self.collect_metrics_only(batch_index, net_predictions, segmentation_classes)
self._losses[batch_index] = self._loss(net_predictions, segmentation_classes).detach()
def get_loss(self, net_predictions, segmentation_classes):
return self._loss(net_predictions, segmentation_classes)
@property
def loss(self):
return self._losses.mean().item()
@property
def iou(self):
return self._scores_iou.mean().item()
@property
def dice(self):
return self._scores_dice.mean().item()
@property
def soft_dice(self):
return self._scores_soft_dice.mean().item()
# @property
# def hausdorff(self):
# return self._scores_hausdorff.mean().item()
def get_metrics(self):
return self.loss, self.iou, self.dice, self.soft_dice # , self.hausdorff
if __name__ == '__main__':
from torch import Tensor
dc = DiceScore(3)
gt = Tensor([[[1, 0], [0, 2]]])
pred = Tensor([[
[[.1, .8],
[.8, .1]],
[[.8, .1],
[.1, .1]],
[[.1, .1],
[.1, .8]]
]])
print(pred)
pred = torch.argmax(pred, dim=1)
pred = torch.eye(3)[pred.long()]
pred = pred.permute(0, 3, 1, 2) # move classes to second dimension
print(pred)
# print(dc(pred, gt))
|
StarcoderdataPython
|
9662767
|
import re
from collections import Counter
'''
The flow is like the following :
#1 - search errors and if any match with the main DB, write them to a new list file
#2 - count matched errors
#3 - print the matched error together with it's recommended action
'''
# this dict keys contain errors and value contains action for the found errors
errors_actions_db = {'Error publishing analytics events': 'Check why could not execute the request to Events Service',
'PKIX path building failed: sun.security.provider.certpath.SunCertPathBuilderException: unable to find valid certification path to requested target': '''It happens when the EUM Server tries to connect to the Analytics or Events Service, either directly or via a proxy server over SSL, and the EUM Server does not trust the incoming certificate. This is due to a missing intermediate or root certificate in the trust keystore of the EUM Server.
To resolve this issue, we need to Import the intermediate/root certificates into the trust keystore of the EUM Server.
Example:
<EUM_HOME>/jre/lib/security/cacerts
Import the root & intermediate certificate to the "cacerts" trust keystore
../jre/bin/keytool -import -trustcacerts -alias myorg-rootca -keystore cacerts -file /path/to/CA-cert.txt -storepass changeit
../jre/bin/keytool -import -trustcacerts -alias myorg-interca -keystore cacerts -file
Start the EUM Server. From the eum-processor directory by running
bin/eum.sh start
Verify the new security certificate works by opening the following page in a browser:
https://<hostname>:7002/eumcollector/get-version''',
'Error in custom provider, com.singularity.ee.util.security.credentialstore.exception.CryptoOperationFailureException: Failed to decrypt: Input length must be multiple of 16 when decrypting with padded cipher': '''
check what is set in eum.properties file for onprem.dbPassword. Or you can try to change Credential Keystore Password for the EUM Database: https://docs.appdynamics.com/display/PRO45/Secure+the+EUM+Server#SecuretheEUMServer-ChangetheCredentialKeystorePasswordfortheEUMDatabase''',
"java.sql.SQLException: Access denied for user 'root'@'localhost' (using password: <PASSWORD>)": '''
what can be workaround is to start EUM server with the plain text password.
comment out these lines:
#onprem.dbPassword=-<PASSWORD>==
#onprem.useEncryptedCredentials=true
and add below lines
# mysql is an example password, your EUM db password for root user will be different
onprem.dbPassword=mysql
onprem.useEncryptedCredentials=false
and try starting the EUM server and check if this helps.
''',
}
# with the help of below function we search errors in every lines. Errors are the keys of dict
def errors_and_actions(main_db): # line here means every line of the log file
file = open('eum-processorp.log', 'r').readlines()
errors_list = []
errors_actions_list = []
# looking for dict keys in each lines, and if finds any match, then printing error line and action relatively.
for line in file:
for error in main_db.keys():
if re.search(error, line):
errors_list.append(error)
errors_actions_list.append('MATCHED ERROR: ' + error + '&&' + 'RECOMMENDED STEPS: ' + main_db[error])
#print('ERROR : ' + line + '\n' + 'ACTION : ' + errors_actions[error] + '\n' * 2 + '*' * 30)
if errors_list:
print('The Total Errors and Amount of Times each of Them Observed:')
print(Counter(errors_list)) # counting each matched error and printing th
print('\n' * 2)
for i in list(set(errors_actions_list)):
print('\n'.join(i.split('&&')))
else:
print('No error found in the logs')
errors_and_actions(errors_actions_db)
|
StarcoderdataPython
|
5125967
|
<gh_stars>1-10
import logging
import pickle
import sqlite3
from cref.utils import Database
logger = logging.getLogger('CReF')
class TorsionAnglesDB(Database):
"""
Cache torsion angles calculation
"""
def create(self):
parent = super(TorsionAnglesDB, self)
parent.execute(
"""
CREATE TABLE IF NOT EXISTS pdb_torsions (
pdb text, residues text, phi blob, psi blob
)
"""
)
parent.execute(
"""
CREATE INDEX IF NOT EXISTS IdxPDB ON pdb_torsions(pdb)
"""
)
def save(self, pdb_code, residues, phi, psi):
query = """
INSERT INTO pdb_torsions VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
"""
args = (
pdb_code.upper(),
residues,
sqlite3.Binary(pickle.dumps(phi)),
sqlite3.Binary(pickle.dumps(psi)),
)
super(TorsionAnglesDB, self).execute(query, args)
def retrieve(self, pdb_code):
result = super(TorsionAnglesDB, self).retrieve(
"""
SELECT residues, phi, psi
FROM pdb_torsions WHERE pdb = '{}'
""".format(pdb_code.upper())
)
if result:
result = dict(
residues=result[0],
phi=pickle.loads(result[1]),
psi=pickle.loads(result[2]),
)
else:
result = None
return result
class TorsionsCalculator:
def __init__(self, cache_db='data/torsions.db'):
self.torsions_db = TorsionAnglesDB(cache_db)
def get_angles(self, pdb_code):
angles = self.torsions_db.retrieve(pdb_code)
if not angles:
raise KeyError('Torsion angles for {} not found'.format(pdb_code))
return angles
|
StarcoderdataPython
|
9697340
|
from .disease import Disease
from .immunity import Immunity
from .infection import Infection
|
StarcoderdataPython
|
11325371
|
import concurrent.futures
import datetime
import json
import time
from typing import Dict, Optional # noqa
from sebs.gcp.gcp import GCP
from sebs.faas.function import ExecutionResult, Trigger
class LibraryTrigger(Trigger):
def __init__(self, fname: str, deployment_client: Optional[GCP] = None):
super().__init__()
self.name = fname
self._deployment_client = deployment_client
@staticmethod
def typename() -> str:
return "GCP.LibraryTrigger"
@property
def deployment_client(self) -> GCP:
assert self._deployment_client
return self._deployment_client
@deployment_client.setter
def deployment_client(self, deployment_client: GCP):
self._deployment_client = deployment_client
@staticmethod
def trigger_type() -> Trigger.TriggerType:
return Trigger.TriggerType.LIBRARY
def sync_invoke(self, payload: dict) -> ExecutionResult:
self.logging.info(f"Invoke function {self.name}")
# Verify that the function is deployed
deployed = False
while not deployed:
if self.deployment_client.is_deployed(self.name):
deployed = True
else:
time.sleep(5)
# GCP's fixed style for a function name
config = self.deployment_client.config
full_func_name = (
f"projects/{config.project_name}/locations/" f"{config.region}/functions/{self.name}"
)
function_client = self.deployment_client.get_function_client()
req = (
function_client.projects()
.locations()
.functions()
.call(name=full_func_name, body={"data": json.dumps(payload)})
)
begin = datetime.datetime.now()
res = req.execute()
end = datetime.datetime.now()
gcp_result = ExecutionResult.from_times(begin, end)
gcp_result.request_id = res["executionId"]
if "error" in res.keys() and res["error"] != "":
self.logging.error("Invocation of {} failed!".format(self.name))
self.logging.error("Input: {}".format(payload))
gcp_result.stats.failure = True
return gcp_result
output = json.loads(res["result"])
gcp_result.parse_benchmark_output(output)
return gcp_result
def async_invoke(self, payload: dict):
raise NotImplementedError()
def serialize(self) -> dict:
return {"type": "Library", "name": self.name}
@staticmethod
def deserialize(obj: dict) -> Trigger:
return LibraryTrigger(obj["name"])
class HTTPTrigger(Trigger):
def __init__(self, url: str):
super().__init__()
self.url = url
@staticmethod
def typename() -> str:
return "GCP.HTTPTrigger"
@staticmethod
def trigger_type() -> Trigger.TriggerType:
return Trigger.TriggerType.HTTP
def sync_invoke(self, payload: dict) -> ExecutionResult:
self.logging.debug(f"Invoke function {self.url}")
return self._http_invoke(payload, self.url)
def async_invoke(self, payload: dict) -> concurrent.futures.Future:
pool = concurrent.futures.ThreadPoolExecutor()
fut = pool.submit(self.sync_invoke, payload)
return fut
def serialize(self) -> dict:
return {"type": "HTTP", "url": self.url}
@staticmethod
def deserialize(obj: dict) -> Trigger:
return HTTPTrigger(obj["url"])
|
StarcoderdataPython
|
8029746
|
<gh_stars>0
#!/usr/bin/python3
__author__ = "<NAME>"
__copyright__ = "Copyright 2021, National University of S'pore and A*STAR"
__credits__ = ["<NAME>", "<NAME>", "<NAME>", "<NAME>"]
__license__ = "MIT"
# Import publicly published & installed packages
import tensorflow as tf
from numpy.random import seed
import os, time, csv, shutil, math, time
from tensorflow.python.eager.monitoring import Sampler
# Import in-house classes
from paoding.sampler import Sampler
from paoding.evaluator import Evaluator
from paoding.utility.option import SamplingMode, ModelType
import paoding.utility.utils as utils
import paoding.utility.bcolors as bcolors
import paoding.utility.simulated_propagation as simprop
class Pruner:
constant = 0
model = None
optimizer = None
sampler = None
robustness_evaluator = None
model_path = None
test_set = None
pruning_target = None
pruning_step = None
model_type = -1
lo_bound = 0
hi_bound = 1
def __init__(self, path, test_set=None, target=0.5, step=0.025, sample_strategy=None, input_interval=(0,1), model_type=ModelType.XRAY, seed_val=None):
"""
Initializes `Pruner` class.
Args:
path: The path of neural network model to be pruned.
test_set: The tuple of test features and labels used for evaluation purpose.
target: The percentage value of expected pruning goal (optional, 0.50 by default).
step: The percentage value of pruning portion during each epoch (optional, 0.025 by default).
sample_strategy: The sampling strategy specified for pruning (optional).
alpha: The value of alpha parameters to be used in stochastic mode (optional, 0.75 by default).
input_interval: The value range of an legal input (optional, [0,1] by default).
model_type: The enumerated value that specifies the model type (optional, binary classification model by default).
[PS] 4 modes are supported in the Alpha release, refer to the ``paoding.utility.option.ModelType`` for the technical definition.
seed: The seed for randomization for the reproducibility purpose (optional, to use only for the experimental purpose)
"""
if sample_strategy == None:
self.sampler = Sampler()
else:
self.sampler = sample_strategy
self.robustness_evaluator = Evaluator()
self.model_path = path
# Specify a random seed
if seed_val is not None:
seed(seed_val)
tf.random.set_seed(seed_val)
self.model_type = model_type
self.target_adv_epsilons = [0.5]
self.pruning_target = target
self.pruning_step = step
self.evaluation_batch = 50
# E.g. EPOCHS_PER_CHECKPOINT = 5 means we save the pruned model as a checkpoint after each five
# epochs and at the end of pruning
self.EPOCHS_PER_CHECKPOINT = 15
self.test_set = test_set
(self.lo_bound, self.hi_bound) = input_interval
#self.first_mlp_layer_size = first_mlp_layer_size
def load_model(self, optimizer=None):
"""
Load the model.
Args:
optimizer: The optimizer specified for evaluation purpose (optional, RMSprop with lr=0.01 by default).
"""
self.model = tf.keras.models.load_model(self.model_path)
print(self.model.summary())
if optimizer is None:
self.optimizer = tf.keras.optimizers.RMSprop(learning_rate=0.01)
else:
self.optimizer = optimizer
def save_model(self, path):
"""
Save the model to the path specified.
Args:
path: The path that the model to be saved.
"""
if os.path.exists(path):
shutil.rmtree(path)
print("Overwriting existing pruned model ...")
self.model.save(path)
print(" >>> Pruned model saved")
def evaluate(self, metrics=['accuracy']):
"""
Evaluate the model performance.
Args:
metrics: The list of TF compatible metrics (optional, accuracy (only) by default).
Returns:
A tuple of loss and accuracy values
"""
if self.test_set is None:
print("Test set not provided, evaluation aborted...")
return 0, 0
test_features, test_labels = self.test_set
# self.model.compile(optimizer=self.optimizer, loss='binary_crossentropy', metrics=metrics)
loss, accuracy = self.model.evaluate(test_features, test_labels, verbose=2)
print("Evaluation accomplished -- [ACC]", accuracy, "[LOSS]", loss)
return loss, accuracy
def prune(self, evaluator=None, pruned_model_path=None):
"""
Perform pruning and save the pruned model to a specified location.
Args:
evaluator: The evaluation configuration (optional, no evaluation requested by default).
pruned_model_path: The location to save the pruned model (optional, a fixed path by default).
"""
if evaluator is not None:
self.robustness_evaluator = evaluator
self.target_adv_epsilons = evaluator.epsilons
self.evaluation_batch = evaluator.batch_size
test_images, test_labels = self.test_set
utils.create_dir_if_not_exist("paoding/logs/")
# utils.create_dir_if_not_exist("paoding/save_figs/")
if pruned_model_path is None:
pruned_model_path=self.model_path+"_pruned"
# Define a list to record each pruning decision
tape_of_moves = []
# Define a list to record benchmark & evaluation per pruning epoch (begins with original model)
score_board = []
accuracy_board = []
################################################################
# Launch a pruning epoch #
################################################################
epoch_couter = 0
num_units_pruned = 0
percentage_been_pruned = 0
stop_condition = False
neurons_manipulated =None
target_scores = None
pruned_pairs = None
cumulative_impact_intervals = None
saliency_matrix=None
model = self.model
big_map = simprop.get_definition_map(model, input_interval=(self.lo_bound, self.hi_bound))
# Start elapsed time counting
start_time = time.time()
while(not stop_condition):
pruning_result_dict = self.sampler.nominate(model,big_map,
prune_percentage=self.pruning_step,
cumulative_impact_intervals=cumulative_impact_intervals,
neurons_manipulated=neurons_manipulated, saliency_matrix=saliency_matrix,
bias_aware=True)
model = pruning_result_dict['model']
neurons_manipulated = pruning_result_dict['neurons_manipulated']
target_scores = pruning_result_dict['target_scores']
pruned_pairs = pruning_result_dict['pruned_pairs']
cumulative_impact_intervals = pruning_result_dict['cumulative_impact_intervals']
saliency_matrix = pruning_result_dict['saliency_matrix']
score_dicts = pruning_result_dict['pruning_pairs_dict_overall_scores']
epoch_couter += 1
# Check if the list of pruned pair is empty or not - empty means no more pruning is feasible
num_pruned_curr_batch = 0
if pruned_pairs is not None:
for layer, pairs in enumerate(pruned_pairs):
if len(pairs) > 0:
num_pruned_curr_batch += len(pairs)
if num_pruned_curr_batch == 0:
stop_condition = True
print(" >> No more hidden unit could be pruned, we stop at EPOCH", epoch_couter)
else:
if not self.sampler.mode == SamplingMode.BASELINE:
print(" >> Cumulative impact as intervals after this epoch:")
print(cumulative_impact_intervals)
percentage_been_pruned += self.pruning_step
print(" >> Pruning progress:", bcolors.BOLD, str(round(percentage_been_pruned * 100, 2)) + "%", bcolors.ENDC)
model.compile(optimizer="rmsprop", loss='binary_crossentropy', metrics=['accuracy'])
if evaluator is not None and self.test_set is not None:
robust_preservation = self.robustness_evaluator.evaluate_robustness(model, (test_images, test_labels), self.model_type)
#loss, accuracy = model.evaluate(test_images, test_labels, verbose=2)
loss, accuracy = self.evaluate()
# Update score_board and tape_of_moves
score_board.append(robust_preservation)
accuracy_board.append((round(loss, 4), round(accuracy, 4)))
print(bcolors.OKGREEN + "[Epoch " + str(epoch_couter) + "]" + str(robust_preservation) + bcolors.ENDC)
tape_of_moves.append(pruned_pairs)
pruned_pairs = None
# Check if have pruned enough number of hidden units
if self.sampler.mode == SamplingMode.BASELINE and percentage_been_pruned >= 0.5:
print(" >> Maximum pruning percentage has been reached")
stop_condition = True
elif not stop_condition and percentage_been_pruned >= self.pruning_target:
print(" >> Target pruning percentage has been reached")
stop_condition = True
# Save the pruned model at each checkpoint or after the last pruning epoch
if epoch_couter % self.EPOCHS_PER_CHECKPOINT == 0 or stop_condition:
curr_pruned_model_path = pruned_model_path + "_ckpt_" + str(math.ceil(epoch_couter/self.EPOCHS_PER_CHECKPOINT))
if os.path.exists(curr_pruned_model_path):
shutil.rmtree(curr_pruned_model_path)
print("Overwriting existing pruned model ...")
model.save(curr_pruned_model_path)
print(" >>> Pruned model saved")
# Stop elapsed time counting
end_time = time.time()
print("Elapsed time: ", round((end_time - start_time)/60.0, 3), "minutes /", int(end_time - start_time), "seconds")
################################################################
# Save the tape of moves #
################################################################
# Obtain a timestamp
local_time = time.localtime()
timestamp = time.strftime('%b-%d-%H%M', local_time)
tape_filename = "paoding/logs/chest-" + timestamp + "-" + str(self.evaluation_batch)
if evaluator is None:
tape_filename = tape_filename+"-BENCHMARK"
if self.sampler.mode == SamplingMode.BASELINE:
tape_filename += "_tape_baseline.csv"
else:
tape_filename = tape_filename + "_tape_" + self.sampler.mode.name + ".csv"
if os.path.exists(tape_filename):
os.remove(tape_filename)
with open(tape_filename, 'w+', newline='') as csv_file:
csv_writer = csv.writer(csv_file, delimiter=',')
csv_line = [str(eps) for eps in self.target_adv_epsilons]
csv_line.append('moves,loss,accuracy')
csv_writer.writerow(csv_line)
for index, item in enumerate(score_board):
rob_pres_stat = [item[k] for k in self.target_adv_epsilons]
rob_pres_stat.append(tape_of_moves[index])
rob_pres_stat.append(accuracy_board[index])
csv_writer.writerow(rob_pres_stat)
if evaluator is None:
csv_writer.writerow(["Elapsed time: ", round((end_time - start_time) / 60.0, 3), "minutes /", int(end_time - start_time), "seconds"])
print("Pruning accomplished")
|
StarcoderdataPython
|
388778
|
from .endpoint import Endpoint, decorator, get_json
class Policystore(Endpoint):
def __init__(self):
Endpoint.__init__(self)
self.app = 'base'
@decorator
def policystoreReadMetadataRoles(self, args):
self.method = 'GET'
self.endpoint = '/policystore/metadataroles'
self.params = {"api-version": "2021-07-01"}
@decorator
def policystoreReadMetadataPolicy(self, args):
self.method = 'GET'
self.endpoint = f'/policystore/collections/{args["--collectionName"]}/metadataPolicy' if args["--policyId"] is None else f'/policystore/metadataPolicies/{args["--policyId"]}'
self.params = {"api-version": "2021-07-01"}
@decorator
def policystoreReadMetadataPolicies(self, args):
self.method = 'GET'
self.endpoint = '/policystore/metadataPolicies'
self.params = {"api-version": "2021-07-01"}
@decorator
def policystorePutMetadataPolicy(self, args):
self.method = 'PUT'
self.endpoint = f'/policystore/metadataPolicies/{args["--policyId"]}'
self.params = {"api-version": "2021-07-01"}
self.payload = get_json(args, '--payload-file')
|
StarcoderdataPython
|
11368733
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Convenience wrapper for running backup.py directly from source tree."""
import sys
from backup.backup import main
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
# Exit on KeyboardInterrupt
# http://stackoverflow.com/a/21144662/639133
print()
print('Keyboard interrupt')
sys.exit(0)
|
StarcoderdataPython
|
9759046
|
import subprocess, dotbot, json
from os import path, remove
from dotbot.util import module
class Sudo(dotbot.Plugin):
_directive = 'sudo'
def can_handle(self, directive):
return self._directive == directive
def handle(self, directive, data):
if directive != self._directive:
raise ValueError('sudo cannot handle directive %s' %
directive)
app = self._find_dotbot()
base_directory = self._context.base_directory()
data = [{'defaults': self._context.defaults()}] + data
plugins = self._collect_plugins()
sudo_conf = path.join(path.dirname(__file__), 'sudo.conf.json')
self._write_conf_file(sudo_conf, data)
proc_args = [
'sudo', app,
'--base-directory', base_directory,
'--config-file', sudo_conf
] + plugins
self._log.debug('sudo: args to pass: {}'.format(proc_args))
try:
self._log.lowinfo('sudo: begin subprocess')
subprocess.check_call(
proc_args,
stdin=subprocess.PIPE)
self._log.lowinfo('sudo: end subprocess')
self._delete_conf_file(sudo_conf)
return True
except subprocess.CalledProcessError as e:
self._log.lowinfo('sudo: end subprocess')
self._log.error(e)
return False
def _collect_plugins(self):
ret = []
for plugin in module.loaded_modules:
# HACK should we compare to something other than _directive?
if plugin.__name__ != self._directive:
ret.extend(iter(['--plugin', plugin.__file__]))
return ret
def _delete_conf_file(self, conf_file):
if path.exists(conf_file):
remove(conf_file)
def _find_dotbot(self):
base = path.dirname(path.dirname(dotbot.__file__))
ret = path.join(base, 'bin', 'dotbot')
self._log.debug('sudo: dotbot app path: {}'.format(ret))
return ret
def _write_conf_file(self, conf_file, data):
self._delete_conf_file(conf_file)
with open(conf_file, 'w') as jfile:
json.dump(data, jfile, ensure_ascii=False)
|
StarcoderdataPython
|
1966028
|
"""
SoftLayer.tests.CLI.modules.rwhois_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:license: MIT, see LICENSE for more details.
"""
from SoftLayer.CLI import exceptions
from SoftLayer import testing
import json
class RWhoisTests(testing.TestCase):
def test_edit_nothing(self):
result = self.run_command(['rwhois', 'edit'])
self.assertEqual(result.exit_code, 2)
self.assertIsInstance(result.exception, exceptions.CLIAbort)
def test_edit(self):
result = self.run_command(['rwhois', 'edit',
'--abuse=<EMAIL>',
'--address1=address line 1',
'--address2=address line 2',
'--company=Company, Inc',
'--city=Dallas',
'--country=United States',
'--firstname=John',
'--lastname=Smith',
'--postal=12345',
'--state=TX',
'--state=TX',
'--private'])
self.assert_no_fail(result)
self.assertEqual(result.output, "")
self.assert_called_with('SoftLayer_Network_Subnet_Rwhois_Data',
'editObject',
args=({'city': 'Dallas',
'firstName': 'John',
'companyName': 'Company, Inc',
'address1': 'address line 1',
'address2': 'address line 2',
'lastName': 'Smith',
'abuseEmail': '<EMAIL>',
'state': 'TX',
'country': 'United States',
'postalCode': '12345',
'privateResidenceFlag': True},),
identifier='id')
def test_edit_public(self):
result = self.run_command(['rwhois', 'edit', '--public'])
self.assert_no_fail(result)
self.assertEqual(result.output, "")
self.assert_called_with('SoftLayer_Network_Subnet_Rwhois_Data',
'editObject',
args=({'privateResidenceFlag': False},),
identifier='id')
def test_show(self):
self.maxDiff = 100000
result = self.run_command(['rwhois', 'show'])
expected = {'Abuse Email': 'abuseEmail',
'Address 1': 'address1',
'Address 2': 'address2',
'City': 'city',
'Company': 'companyName',
'Country': 'country',
'Name': 'firstName lastName',
'Postal Code': 'postalCode',
'State': '-',
'Private Residence': True}
self.assert_no_fail(result)
self.assertEqual(json.loads(result.output), expected)
|
StarcoderdataPython
|
1946128
|
#!/bin/python
# This script uses the Satellite API to generate different HTML reports like below example reports:
#
# * hosts_by_usergroup (You need to assign User Groups as owner to hosts for this)
# * hosts_by_lifecycle_environment
# * hosts_by_environment
# * hosts_by_model
# * hosts_by_domain
# * hosts_by_operatingsystem
# * hosts_by_fact_java_version
# * hosts_by_fact_uptime_days
# * hosts_by_fact_selinux_current_mode
# * hosts_by_hypervisor
# * hosts_by_myparam (Add a global parameter with a comma seperated list of valid values and group hosts by host parameter)
# * hosts_by_errata_critical_applicable
# * hosts_by_errata_critical_installable
#
# Example usage: satellite-report-html.py hosts_by_lifecycle_environment >/var/www/html/pub/reports/hosts_by_lifecycle_environment.html
#
# Add a new report by just copying one of the functions for example reports to the bottom of this script
#
import requests.packages.urllib3
requests.packages.urllib3.disable_warnings()
import json
import sys, getopt
import os
import fnmatch
import datetime
import urllib
SAT_SERVER = 'mysatellite'
SAT_API = 'https://' + SAT_SERVER + '/api/v2/'
KAT_API = 'https://' + SAT_SERVER + '/katello/api/v2/'
USERNAME = "myuser"
PASSWORD = "<PASSWORD>"
SSL_VERIFY = "/etc/pki/katello/certs/katello-server-ca.crt"
POST_HEADERS = {'content-type': 'application/json'}
def print_html_header():
html_header = '''
<html>
<head>
<style>
button.accordion {
background-color: #eee;
color: #444;
cursor: pointer;
padding: 18px;
width: 100%;
border: none;
text-align: left;
outline: none;
font-size: 15px;
transition: 0.4s;
}
button.accordion.active, button.accordion:hover {
background-color: #ccc;
}
button.accordion:after {
content: "\\002B";
color: #777;
font-weight: bold;
float: right;
margin-left: 5px;
}
button.accordion.active:after {
content: "\\2212";
}
div.panel {
padding: 0 18px;
background-color: white;
max-height: 0;
overflow: hidden;
transition: max-height 0.2s ease-out;
}
</style>
</head>
<body>
'''
print html_header
def print_html_footer():
html_footer_1 = '''
<script>
var acc = document.getElementsByClassName("accordion");
var i;
for (i = 0; i < acc.length; i++) {
acc[i].addEventListener("click", function() {
this.classList.toggle("active");
var panel = this.nextElementSibling;
if (panel.style.maxHeight){
panel.style.maxHeight = null;
} else {
panel.style.maxHeight = panel.scrollHeight + "px";
}
});
}
</script>
'''
print html_footer_1
now = datetime.datetime.now()
print '<i>' + now.strftime('Generated at %Y-%m-%d %H:%M') + '</i>'
html_footer_2 = '''
</body>
</html>
'''
print html_footer_2
def post_json(location, json_data):
"""
Performs a POST and passes the data to the URL location
"""
result = requests.post(
location,
json=json_data,
auth=(USERNAME, PASSWORD),
verify=SSL_VERIFY,
headers=POST_HEADERS).json()
if result.get('error'):
print("Error: ")
print(result) #['error']['message']
else:
return result
return None
def get_json(url):
# Performs a GET using the passed URL location
r = requests.get(url, auth=(USERNAME, PASSWORD), verify=SSL_VERIFY)
return r.json()
def get_results(url):
"""
Performs a GET and returns the data / error message
"""
jsn = get_json(url)
if jsn.get('error'):
print "Error: " + jsn['error']['message']
else:
if jsn.get('results'):
return jsn['results']
elif 'results' not in jsn:
return jsn
return None
def list_items(url,item_name="name"):
"""
List an element ('name' is the default, can be 'id', 'title' and more) of all the items of a specific url location
"""
result = get_results(url)
item_list = []
if result is None:
return ['']
for item in result:
item_list.append(str(unicode(item[item_name])))
return item_list
def check_exists(location):
"""
Check existance of an element in the Satellite
"""
if list_items(location,'id') != ['']:
return True
else:
return False
# EXAMPLE REPORTS START HERE
def hosts_by_usergroup():
usergroups=get_results(SAT_API+'usergroups')
my_usergroups = []
for usergroup in usergroups:
host_list=list_items(SAT_API+'hosts?per_page=1000&search=owner_type%20%3D%20Usergroup%20and%20owner_id%20%3D%20'+str(usergroup['id']),'name')
my_usergroups.append(str(usergroup['id']))
if any(host_list):
item_count=str(len(host_list))
else:
item_count="0"
print "<button class='accordion'>" + usergroup['name'].encode('utf-8') + " (" + item_count + ")</button>"
print "<div class='panel'>"
for host in host_list:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
# hosts assigned to other owners than User Groups
# exclude virt-who hosts
searchstr="name%20%21~%20virt-who%25%20"
for my_usergroup in my_usergroups:
searchstr+="%20and%20owner_id%20%21%3D%20" + my_usergroup + "%20"
host_list_unassigned=list_items(SAT_API+'hosts?per_page=1000&search='+searchstr,'name')
if any(host_list_unassigned):
item_count=str(len(host_list_unassigned))
else:
item_count="0"
print "<button class='accordion'>Unassigned (" + item_count + ")</button>"
print "<div class='panel'>"
for host_unassigned in host_list_unassigned:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host_unassigned + "'>" + host_unassigned + "</a><br/>"
print "</div>"
def hosts_by_lifecycle_environment():
lifecycle_environments=get_results(KAT_API+'environments?organization_id=1')
for lifecycle_environment in lifecycle_environments:
host_list=list_items(SAT_API+'hosts?per_page=1000&search=lifecycle_environment%20%3D%20'+str(lifecycle_environment['name']),'name')
if any(host_list):
item_count=str(len(host_list))
else:
item_count="0"
print "<button class='accordion'>" + lifecycle_environment['name'].encode('utf-8') + " (" + item_count + ")</button>"
print "<div class='panel'>"
for host in host_list:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
def hosts_by_environment():
environments=get_results(SAT_API+'environments')
for environment in environments:
host_list=list_items(SAT_API+'hosts?per_page=1000&search=environment%20%3D%20'+str(environment['name']),'name')
if any(host_list):
item_count=str(len(host_list))
else:
item_count="0"
print "<button class='accordion'>" + environment['name'].encode('utf-8') + " (" + item_count + ")</button>"
print "<div class='panel'>"
for host in host_list:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
def hosts_by_model():
models=get_results(SAT_API+'models')
for model in models:
model_name = { 'model' : '"' + str(model['name']) + '"'}
model_urlencoded = urllib.urlencode(model_name)
host_list=list_items(SAT_API+'hosts?per_page=1000&search='+model_urlencoded,'name')
if any(host_list):
item_count=str(len(host_list))
else:
item_count="0"
print "<button class='accordion'>" + model['name'].encode('utf-8') + " (" + item_count + ")</button>"
print "<div class='panel'>"
for host in host_list:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
def hosts_by_domain():
domains=get_results(SAT_API+'domains')
for domain in domains:
host_list=list_items(SAT_API+'hosts?per_page=1000&search=domain%20%3D%20'+str(domain['name']),'name')
if any(host_list):
item_count=str(len(host_list))
else:
item_count="0"
print "<button class='accordion'>" + domain['name'].encode('utf-8') + " (" + item_count + ")</button>"
print "<div class='panel'>"
for host in host_list:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
def hosts_by_operatingsystem():
operatingsystems=get_results(SAT_API+'operatingsystems')
for operatingsystem in operatingsystems:
os_name = { 'os_title' : '"' + str(operatingsystem['title']) + '"'}
os_urlencoded = urllib.urlencode(os_name)
host_list=list_items(SAT_API+'hosts?per_page=1000&search='+os_urlencoded,'name')
if any(host_list):
item_count=str(len(host_list))
else:
item_count="0"
print "<button class='accordion'>" + operatingsystem['title'].encode('utf-8') + " (" + item_count + ")</button>"
print "<div class='panel'>"
for host in host_list:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
def hosts_by_fact_java_version():
fact_values=get_results(SAT_API+'fact_values?per_page=1000&search=fact+%3D+java_version')
java_versions = []
for k1, v1 in fact_values.iteritems():
for k2, v2 in v1.iteritems():
if v2 not in java_versions:
java_versions.append(v2)
for java_version in java_versions:
host_list=list_items(SAT_API+'hosts?per_page=1000&search=facts.java_version%20%3D%20'+java_version,'name')
if any(host_list):
item_count=str(len(host_list))
else:
item_count="0"
print "<button class='accordion'>" + java_version + " (" + item_count + ")</button>"
print "<div class='panel'>"
for host in host_list:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
def hosts_by_fact_uptime_days():
# a bit ugly sorry, TODO
for uptime_start in range(1000,0,-100):
uptime_end = uptime_start+100
host_list=list_items(SAT_API+'hosts?per_page=1000&search=facts.system_uptime%3A%3Adays%20%3E%3D%20'+str(uptime_start)+'%20and%20facts.system_uptime%3A%3Adays%20%3C%20'+str(uptime_end),'name')
if any(host_list):
item_count=str(len(host_list))
print "<button class='accordion'>" + str(uptime_start)+'-'+str(uptime_end) + " days (" + item_count + ")</button>"
print "<div class='panel'>"
for host in host_list:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
uptime_start=50
uptime_end = uptime_start+50
host_list=list_items(SAT_API+'hosts?per_page=1000&search=facts.system_uptime%3A%3Adays%20%3E%3D%20'+str(uptime_start)+'%20and%20facts.system_uptime%3A%3Adays%20%3C%20'+str(uptime_end),'name')
if any(host_list):
item_count=str(len(host_list))
print "<button class='accordion'>" + str(uptime_start)+'-'+str(uptime_end) + " days (" + item_count + ")</button>"
print "<div class='panel'>"
for host in host_list:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
uptime_start=10
uptime_end = uptime_start+40
host_list=list_items(SAT_API+'hosts?per_page=1000&search=facts.system_uptime%3A%3Adays%20%3E%3D%20'+str(uptime_start)+'%20and%20facts.system_uptime%3A%3Adays%20%3C%20'+str(uptime_end),'name')
if any(host_list):
item_count=str(len(host_list))
print "<button class='accordion'>" + str(uptime_start)+'-'+str(uptime_end) + " days (" + item_count + ")</button>"
print "<div class='panel'>"
for host in host_list:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
uptime_start=1
uptime_end = uptime_start+9
host_list=list_items(SAT_API+'hosts?per_page=1000&search=facts.system_uptime%3A%3Adays%20%3E%3D%20'+str(uptime_start)+'%20and%20facts.system_uptime%3A%3Adays%20%3C%20'+str(uptime_end),'name')
if any(host_list):
item_count=str(len(host_list))
print "<button class='accordion'>" + str(uptime_start)+'-'+str(uptime_end) + " days (" + item_count + ")</button>"
print "<div class='panel'>"
for host in host_list:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
uptime=0
host_list=list_items(SAT_API+'hosts?per_page=1000&search=facts.system_uptime%3A%3Adays%20%3D%20'+str(uptime),'name')
if any(host_list):
item_count=str(len(host_list))
print "<button class='accordion'>" + str(uptime)+ " days (" + item_count + ")</button>"
print "<div class='panel'>"
for host in host_list:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
def hosts_by_fact_selinux_current_mode():
selinux_current_modes = ['enforcing','permissive','disabled']
for selinux_current_mode in selinux_current_modes:
host_list=list_items(SAT_API+'hosts?per_page=1000&search=facts.selinux_current_mode%20%3D%20'+selinux_current_mode,'name')
if any(host_list):
item_count=str(len(host_list))
else:
item_count="0"
print "<button class='accordion'>" + selinux_current_mode + " (" + item_count + ")</button>"
print "<div class='panel'>"
for host in host_list:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
def hosts_by_hypervisor():
hosts=get_results(SAT_API+'hosts?per_page=10000&search=name%20%20%21~%20virt-who%25')
hypervisor_list = {}
for host in hosts:
host_details=get_results(SAT_API+'hosts/' + str(host['id']))
if 'subscription_facet_attributes' in host_details:
for key, value in host_details['subscription_facet_attributes'].iteritems():
if key == 'virtual_host':
if value:
hypervisor = value['name'].encode('utf-8')
if hypervisor in hypervisor_list:
hypervisor_list[hypervisor].append(host_details['name'])
else:
hypervisor_list[hypervisor] = [host_details['name']]
for hypervisor_name, hypervisor_guests in hypervisor_list.iteritems():
if any(hypervisor_guests):
item_count=str(len(hypervisor_guests))
else:
item_count="0"
print "<button class='accordion'>" + hypervisor_name.replace('virt-who-','') + " (" + item_count + ")</button>"
print "<div class='panel'>"
for host in hypervisor_guests:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
def hosts_by_myparam():
params=get_results(SAT_API+'common_parameters?search=name%20%3D%20myparams')
try:
paramvalue = params[0]['value']
except KeyError:
print "No valid value returned from Satellite"
sys.exit(1)
myparams = paramvalue.split(',')
for myparam in my_params:
host_list=list_items(SAT_API+'hosts?per_page=1000&search=params.myparam+%3D+'+myparam,'name')
if any(host_list):
item_count=str(len(host_list))
else:
item_count="0"
print "<button class='accordion'>" + myparam + " (" + item_count + ")</button>"
print "<div class='panel'>"
for host in host_list:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
def hosts_by_errata_critical_applicable():
erratas=list_items(KAT_API+'errata?per_page=1000&order=issued+desc&organization_id=1&errata_restrict_applicable=true&search=id%20~%20RH%25%20and%20severity%20%3D%20Critical', 'errata_id')
for errata in erratas:
host_list=list_items(SAT_API+'hosts?per_page=1000&search=applicable_errata%20%3D%20'+errata,'name')
if any(host_list):
item_count=str(len(host_list))
else:
item_count="0"
print "<button class='accordion'>" + errata + " (" + item_count + ")</button>"
print "<div class='panel'>"
for host in host_list:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
def hosts_by_errata_critical_installable():
erratas=list_items(KAT_API+'errata?per_page=1000&order=issued+desc&organization_id=1&errata_restrict_installable=true&search=id%20~%20RH%25%20and%20severity%20%3D%20Critical', 'errata_id')
for errata in erratas:
host_list=list_items(SAT_API+'hosts?per_page=1000&search=installable_errata%20%3D%20'+errata,'name')
if any(host_list):
item_count=str(len(host_list))
else:
item_count="0"
print "<button class='accordion'>" + errata + " (" + item_count + ")</button>"
print "<div class='panel'>"
for host in host_list:
print "<a href='https://" + SAT_SERVER + "/hosts/" + host + "'>" + host + "</a><br/>"
print "</div>"
def main():
try:
reportname=sys.argv[1]
except IndexError:
print 'Must supply reportname!'
sys.exit(1)
print_html_header()
eval(reportname+"()")
print_html_footer()
sys.exit(0)
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
1705498
|
<filename>KAT7/observation/f_engine_phased_beamform_dualpol.py
#!/usr/bin/python
# Dual polarisation beamforming: Track target and possibly calibrator for beamforming.
# The *with* keyword is standard in Python 2.6, but has to be explicitly
# imported in Python 2.5
from __future__ import with_statement
import time
import StringIO
import logging
import numpy as np
from katcorelib import (standard_script_options, verify_and_connect,
collect_targets, start_session, user_logger, ant_array)
import fbf_katcp_wrapper as fbf
class BeamformerReceiver(fbf.FBFClient):
"""KATCP client to beamformer receiver, with added metadata."""
def __init__(self, name, server, rx_port, pol, meta_port, data_port, data_drive):
user_logger.info('Connecting to server %r for beam %r...' % (server, name))
logger = logging.getLogger('katcp')
super(BeamformerReceiver, self).__init__(host=server, port=rx_port,
timeout=60, logger=logger)
while not self.is_connected():
user_logger.info('Waiting for TCP link to receiver server...')
time.sleep(1)
user_logger.info('Connected to server %r for beam %r' % (server, name))
self.name = name
self.pol = pol
self.meta_port = meta_port
self.data_port = data_port
self.data_drive = data_drive
self.obs_meta = {}
def __repr__(self):
return "<BeamformerReceiver %r -> %r at 0x%x>" % (self.name, self.pol, id(self))
@property
def inputs(self):
return self.obs_meta.get('ants', [])
# Server where beamformer receivers are run
server = 'kat-dc2.karoo'
# beams = {'bf0': {'pol':'h', 'meta_port':'7152', 'data_port':'7150', 'rx_port':1235, 'data_drive':'/data1'},
# 'bf1': {'pol':'v', 'meta_port':'7153', 'data_port':'7151', 'rx_port':1236, 'data_drive':'/data2'}}
beams = [BeamformerReceiver('bf0', server, rx_port=1235, pol='h', meta_port=7152,
data_port=7150, data_drive='/data1'),
BeamformerReceiver('bf1', server, rx_port=1236, pol='v', meta_port=7153,
data_port=7151, data_drive='/data2')]
def bf_inputs(cbf, bf):
"""Input labels associated with specified beamformer (*all* inputs)."""
reply = cbf.req.dbe_label_input()
return [] if not reply.succeeded else \
[m.arguments[0] for m in reply.messages[1:] if m.arguments[3] == bf]
def select_ant(cbf, input, bf='bf0'):
"""Only use one antenna in specified beamformer."""
# Iterate over *all* inputs going into the given beam
for inp in bf_inputs(cbf, bf):
status = 'beamformer input ' + inp + ':'
weight = '1' if inp == input else '0'
status += ' kept' if inp == input else ' zeroed'
cbf.req.dbe_k7_beam_weights(bf, inp, *(1024 * [weight]))
user_logger.info(status)
def get_weights(cbf):
"""Retrieve the latest gain corrections and their corresponding update times."""
weights, times = {}, {}
for sensor_name in cbf.sensor:
if sensor_name.endswith('_gain_correction_per_channel'):
sensor = cbf.sensor[sensor_name]
input_name = sensor_name.split('_')[1]
reading = sensor.get_reading()
weights[input_name] = reading.value
times[input_name] = reading.timestamp
return weights, times
def phase_up(cbf, weights, inputs=None, bf='bf0', style='flatten'):
"""Phase up a group of antennas using latest gain corrections.
The *style* parameter determines how the complex gain corrections obtained
on the latest calibrator source will be turned into beamformer weights:
- 'norm': Apply the complex gain corrections unchanged as weights,
thereby normalising both amplitude and phase per channel.
- 'phase': Only apply the phase correction, leaving the weight amplitudes
equal to 1. This has the advantage of not boosting weaker inputs that
will increase the noise level, but it also does not flatten the band.
- 'flatten': Apply both amplitude and phase corrections, but preserve
mean gain of each input. This flattens the band while also not boosting
noise levels on weaker inputs.
- 'scramble': Apply random phase corrections, just for the heck of it.
Parameters
----------
cbf : client object
Object providing access to CBF (typically via proxy)
weights : string->string mapping
Gain corrections per input as returned by appropriate sensor
inputs : None or sequence of strings, optional
Names of inputs in use in given beamformer (default=all)
bf : string, optional
Name of beamformer instrument (one per polarisation)
style : {'flatten', 'norm', 'phase', 'scramble'}, optional
Processing done to gain corrections to turn them into weights
"""
# Iterate over *all* inputs going into the given beam
all_inputs = bf_inputs(cbf, bf)
num_inputs = len(all_inputs)
for inp in all_inputs:
status = 'beamformer input ' + inp + ':'
if (inputs is None or inp in inputs) and inp in weights and weights[inp]:
# Extract array of complex weights from string representation
weights_str = weights[inp]
f = StringIO.StringIO(weights_str)
orig_weights = np.loadtxt(f, dtype=np.complex, delimiter=' ')
amp_weights = np.abs(orig_weights)
phase_weights = orig_weights / amp_weights
if style == 'norm':
new_weights = orig_weights
status += ' normed'
elif style == 'phase':
new_weights = phase_weights
status += ' phased'
elif style == 'flatten':
# Get the average gain in the KAT-7 passband
avg_amp = np.median(amp_weights[256:768])
new_weights = orig_weights / avg_amp
status += ' flattened'
elif style == 'scramble':
new_weights = np.exp(2j * np.pi * np.random.rand(1024))
status += ' scrambled'
else:
raise ValueError('Unknown phasing-up style %r' % (style,))
# Normalise weights by number of inputs to avoid overflow
new_weights /= num_inputs
# Reconstruct string representation of weights from array
weights_str = ' '.join([('%+5.3f%+5.3fj' % (w.real, w.imag))
for w in new_weights])
else:
# Zero the inputs that are not in use in the beamformer
weights_str = ' '.join(1024 * ['0'])
status += ' zeroed'
cbf.req.dbe_k7_beam_weights(bf, inp, weights_str)
user_logger.info(status)
def report_compact_traceback(tb):
"""Produce a compact traceback report."""
print '--------------------------------------------------------'
print 'Session interrupted while doing (most recent call last):'
print '--------------------------------------------------------'
while tb:
f = tb.tb_frame
print '%s %s(), line %d' % (f.f_code.co_filename, f.f_code.co_name, f.f_lineno)
tb = tb.tb_next
print '--------------------------------------------------------'
class BeamformerSession(object):
"""Context manager that ensures that beamformer is switched off."""
def __init__(self, cbf, beams):
self.cbf = cbf
self.beams = beams
def __enter__(self):
"""Enter the data capturing session."""
return self
def __exit__(self, exc_type, exc_value, traceback):
"""Exit the data capturing session, closing all streams."""
if exc_value is not None:
exc_msg = str(exc_value)
msg = "Session interrupted by exception (%s%s)" % \
(exc_value.__class__.__name__,
(": '%s'" % (exc_msg,)) if exc_msg else '')
if exc_type is KeyboardInterrupt:
user_logger.warning(msg)
else:
user_logger.error(msg, exc_info=True)
self.capture_stop()
# Suppress KeyboardInterrupt so as not to scare the lay user,
# but allow other exceptions that occurred in the body of with-statement
if exc_type is KeyboardInterrupt:
report_compact_traceback(traceback)
return True
else:
return False
def instrument_start(self, instrument):
"""Start given CBF instrument."""
self.cbf.req.dbe_capture_start(instrument)
user_logger.info('waiting 10s for stream %r to start' % (instrument,))
time.sleep(10)
def instrument_stop(self, instrument):
"""Stop given CBF instrument."""
self.cbf.req.dbe_capture_stop(instrument)
user_logger.info('waiting 10s for stream %r to stop' % (instrument,))
time.sleep(10)
def capture_start(self):
"""Enter the data capturing session, starting capture."""
user_logger.info('Starting correlator (used for signal displays)')
# Starting streams will issue metadata for capture
# Allow long 10sec intervals to allow enough time to initiate data capture and to capture metadata
# Else there will be collisions between the 2 beams
for beam in self.beams:
# Initialise receiver and setup server for data capture
user_logger.info('Initialising receiver and stream for beam %r' %
(beam.name,))
if not beam.rx_init(beam.data_drive, beam.obs_meta['half_band'],
beam.obs_meta['transpose']):
raise RuntimeError('Could not initialise %r receiver' %
(beam.name,))
# Start metadata receiver before starting data transmit
beam.rx_meta_init(beam.meta_port) # port
self.instrument_start(beam.name)
user_logger.info('beamformer metadata')
beam.rx_meta(beam.obs_meta) # additional obs related info
user_logger.info('waiting 10s to write metadata for beam %r' %
(beam.name,))
time.sleep(10)
# Start transmitting data
user_logger.info('beamformer data for beam %r' % (beam.name,))
beam.rx_beam(pol=beam.pol, port=beam.data_port)
time.sleep(1)
def capture_stop(self):
"""Exit the data capturing session, stopping the capture."""
# End all receivers
for beam in self.beams:
user_logger.info('Stopping receiver and stream for beam %r' %
(beam.name,))
beam.rx_stop()
time.sleep(5)
self.instrument_stop(beam.name)
user_logger.info(beam.rx_close())
user_logger.info('Stopping correlator (used for signal displays)')
# Set up standard script options
usage = "%prog [options] <'target'>"
description = "Perform a beamforming run on a specified target, optionally " \
"visiting a gain calibrator beforehand to set beamformer weights."
parser = standard_script_options(usage, description)
# Add experiment-specific options
parser.add_option('-a', '--ants', default='all',
help="Antennas to include in beamformer (default='%default')")
parser.add_option('-b', '--buffercap', action='store_true',default=False,
help="Use real-time dspsr pipeline (default='%default')")
parser.add_option('-t', '--target-duration', type='float', default=20,
help='Minimum duration to track the beamforming target, '
'in seconds (default=%default)')
parser.add_option('--half-band', action='store_true', default=False,
help='Use only inner 50% of output band')
parser.add_option('--reset', action="store_true", default=False,
help='Reset the gains to 160.')
# Set default value for any option (both standard and experiment-specific options)
parser.set_defaults(description='Beamformer observation', nd_params='off',
dump_rate=1.0, mode='bc16n400M1k')
# Parse the command line
opts, args = parser.parse_args()
# Check options and arguments and connect to KAT proxies and devices
if len(args) == 0:
raise ValueError("Please specify the target")
with verify_and_connect(opts) as kat:
cbf = kat.dbe7
ants = kat.ants
if opts.buffercap: # set passband w.r.t. SPEAD rx
bw, cfreq = [200000000, 100000000]
else:
bw, cfreq = [400000000, 200000000]
for beam in ['bf0','bf1']:
cbf.req.dbe_k7_beam_passband(beam, bw, cfreq)
# We are only interested in the first target
user_logger.info('Looking up main beamformer target...')
target = collect_targets(kat, args[:1]).targets[0]
# Ensure that the target is up
target_elevation = np.degrees(target.azel()[1])
if target_elevation < opts.horizon:
raise ValueError("The desired target to be observed is below the horizon")
# Start correlator capture session
with start_session(kat, **vars(opts)) as corr_session:
# Force delay tracking to be on
opts.no_delays = False
corr_session.standard_setup(**vars(opts))
corr_session.capture_start()
# Dictionary to hold observation metadata to send over to beamformer receiver
for beam in beams:
beam.obs_meta.update(vars(opts))
beam.obs_meta['ants'] = [(ant.name + beam.pol) for ant in ants]
beam.obs_meta['target'] = target.description
if cal_target and len(ants) >= 4:
beam.obs_meta['cal_target'] = cal_target.description
if len(ants) > 1:
user_logger.info('Setting beamformer weight to 1 for %d antennas' % (len(ants),))
inputs = reduce(lambda inp, beam: inp + beam.inputs, beams, [])
# set the beamformer weights to 1 as the phaseing is done in the f-engine
weights = {}
bf_weights_str = ' '.join(1024 * ['1'])
for inp in inputs:
weights[inp] = bf_weights_str
for beam in beams:
phase_up(cbf, weights, inputs=beam.inputs, bf=beam.name, style='norm')
time.sleep(1)
else:
# The single-dish case does not need beamforming
user_logger.info('Set beamformer weights to select single dish')
for beam in beams:
select_ant(cbf, input=beam.inputs[0], bf=beam.name)
time.sleep(1)
# Start beamformer session
with BeamformerSession(cbf, beams) as bf_session:
# Get onto beamformer target
corr_session.label('track')
corr_session.track(target, duration=0)
# Only start capturing with beamformer once we are on target
bf_session.capture_start()
corr_session.track(target, duration=opts.target_duration)
|
StarcoderdataPython
|
4935158
|
from output.models.nist_data.atomic.id.schema_instance.nistschema_sv_iv_atomic_id_max_length_1_xsd.nistschema_sv_iv_atomic_id_max_length_1 import (
NistschemaSvIvAtomicIdMaxLength1,
Out,
)
__all__ = [
"NistschemaSvIvAtomicIdMaxLength1",
"Out",
]
|
StarcoderdataPython
|
1808997
|
import datetime
class FPS:
def __init__(self):
self._start = None
self._end = None
self._numFrames = 0
def start(self):
self._start = datetime.datetime.now()
self._numFrames = 0
return self
def stop(self):
self._end = datetime.datetime.now()
return self
def update(self):
self._numFrames += 1
def elapsed(self):
return (self._end - self._start).total_seconds()
def fps(self):
return self._numFrames
|
StarcoderdataPython
|
3235983
|
'''
SignInPage.py Lib
Written By <NAME>
Version 20190420v1
'''
# import buildin pkgs
import os
from flask_restful import Resource
from flask_login import login_user, login_required
from flask import redirect, request, \
render_template, Response, \
url_for, session
## import priviate pkgs
from app.views.SignInForm import SignInForm
from app.models.User import User
## Sign In Class
class SignInPage(Resource):
## get method
def get(self):
form = SignInForm()
return(Response(render_template('SignIn.html', title="Sign In", form = form)))
## post method
def post(self):
form = SignInForm()
if form.validate_on_submit():
user_name = request.form.get('user_name', None)
password = request.form.get('password', None)
remember_me = request.form.get('remember_me', False)
userObj = User(user_name)
if userObj.verifyPassword(password):
login_user(userObj, remember = remember_me)
userObj.is_authenticated = True
session['user_name'] = user_name
return(redirect(url_for('indexpage')))
return(Response(render_template('SignIn.html', title="Sign In", form = form, message = 'Password Error')))
|
StarcoderdataPython
|
208388
|
import sys
if int(sys.stdin.readline()) % 2 == 0:
print "Bob"
else:
print "Alice"
|
StarcoderdataPython
|
9600459
|
<gh_stars>1-10
#!/usr/bin/env python
# Copyright 2020 Google Inc. Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
import os, shutil, subprocess
import tensorflow as tf
def cleanup_dir(OUTPUT_DIR):
on_cloud = OUTPUT_DIR.startswith('gs://')
if on_cloud:
try:
subprocess.check_call('gsutil -m rm -r {}'.format(OUTPUT_DIR).split())
except subprocess.CalledProcessError:
pass
else:
shutil.rmtree(OUTPUT_DIR, ignore_errors=True)
os.makedirs(OUTPUT_DIR)
def create_strategy(mode):
"""
mode has be to be one of the following:
* cpu
* gpus_one_machine
* gpus_multiple_machines
* tpu_colab
* tpu_caip
* the actual name of the cloud_tpu
If you are using TPUs, this methods has to be the very first thing you do.
"""
if mode == 'cpu':
return tf.distribute.OneDeviceStrategy('/cpu:0')
if mode == 'gpus_one_machine':
print('Using {} GPUs'.format(len(tf.config.experimental.list_physical_devices("GPU"))))
return tf.distribute.MirroredStrategy()
if mode == 'gpus_multiple_machines':
print("Using TFCONFIG=", os.environ["TF_CONFIG"])
return tf.distribute.experimental.MultiWorkerMirroredStrategy()
# treat as tpu
if mode == 'tpu_colab':
tpu_name = 'grpc://' + os.environ['COLAB_TPU_ADDR']
elif mode == 'tpu_caip':
tpu_name = None
else:
tpu_name = mode
print("Using TPU: ", tpu_name)
resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu=tpu_name)
tf.config.experimental_connect_to_cluster(resolver)
# TPUs wipe out memory, so this has to be at very start of program
tf.tpu.experimental.initialize_tpu_system(resolver)
print("All devices: ", tf.config.list_logical_devices('TPU'))
return tf.distribute.TPUStrategy(resolver)
|
StarcoderdataPython
|
1649912
|
<reponame>maximilionus/telemonitor
from sys import platform
from logging import getLogger
from os import path, remove
from telemonitor.helpers import TM_Config, DEF_CFG, tm_colorama
__version = 1
__logger = getLogger(__name__)
# All relative paths are starting from root directory of module `telemonitor`,
# Not from this directory!
__service_config_template_path = './extensions/systemd_service/files/telemonitor-bot-template.service'
__shell_launch_script_path = './extensions/systemd_service/files/telemonitor_start.sh'
__service_config_final_path = '/lib/systemd/system/telemonitor-bot.service'
def cli(mode: str):
colorama = tm_colorama()
if platform == 'linux':
if mode == 'install':
if service_install():
print("Successfully installed Telemonitor systemd service to your linux system!",
f"\nName of the service is: {colorama.Fore.CYAN}{path.basename(__service_config_final_path)}{colorama.Fore.RESET}",
"\n\nNow the only thing you need to do is to run this command to detect a new service:",
f"\n\t{colorama.Fore.GREEN}systemctl daemon-reload{colorama.Fore.RESET}",
"\n\nAnd now you can manually control this service with:",
f"\n\t{colorama.Fore.GREEN}systemctl status {path.basename(__service_config_final_path)}{colorama.Fore.RESET} # View Telemonitor logs and current status",
f"\n\t{colorama.Fore.GREEN}systemctl start {path.basename(__service_config_final_path)}{colorama.Fore.RESET} # Start the Telemonitor service",
f"\n\t{colorama.Fore.GREEN}systemctl stop {path.basename(__service_config_final_path)}{colorama.Fore.RESET} # Stop the Telemonitor service"
f"\n\t{colorama.Fore.GREEN}systemctl enable {path.basename(__service_config_final_path)}{colorama.Fore.RESET} # Start Telemonitor service on system launch"
f"\n\t{colorama.Fore.GREEN}systemctl disable {path.basename(__service_config_final_path)}{colorama.Fore.RESET} # Disable Telemonitor service automatic startup",
"\n\nPlease note, that the commands above will require root user privileges to run."
)
else:
print("Telemonitor systemd service is already installed on this system")
elif mode == 'upgrade':
service_upgrade()
elif mode == 'remove':
if service_remove():
print("Successfully removed service from system")
else:
print("Systemd service configuration file doesn't exist, nothing to remove")
elif mode == 'status':
cfg_service = TM_Config.get()['systemd_service']
service_exists = __systemd_config_exists()
text = f"Telemonitor Systemd Service - Status\
\n\n- Is installed: {colorama.Fore.CYAN}{service_exists}{colorama.Fore.RESET}"
if service_exists:
text += f"\n- Version: {colorama.Fore.CYAN}{cfg_service['version']}{colorama.Fore.RESET}\
\n- Installation path: {colorama.Fore.CYAN}{__service_config_final_path}{colorama.Fore.RESET}"
print(text)
else:
print(f"This feature is available only for {colorama.Fore.CYAN}linux{colorama.Fore.RESET} platforms with systemd support.\nYour platform is {colorama.Fore.CYAN}{platform}{colorama.Fore.RESET}.")
__logger.error(f"Requested feature is available only on 'linux' platforms with systemd support. Your platform is {platform}")
exit()
def service_install() -> bool:
""" Install systemd service
Returns:
bool: Was service installed
"""
__logger.info("Begin systemd service installation")
colorama = tm_colorama()
result = False
if not __systemd_config_exists():
try:
template_service_file = open(__service_config_template_path, 'rt')
final_service_file = open(__service_config_final_path, 'wt')
text = template_service_file.read()
text = text.replace('<SHELL_SCRIPT_PATH>', path.abspath(__shell_launch_script_path))
final_service_file.write(text)
except Exception as e:
e_text = f"Can't write systemd service config file to {__service_config_final_path} due to {str(e)}"
print(f"{colorama.Fore.RED}{e_text}\n")
__logger.error(e_text)
else:
__update_cfg_values('install')
__logger.info("Systemd service was successfully installed on system.")
result = True
finally:
template_service_file.close()
final_service_file.close()
else:
__logger.error(f"Service file already exists in '{__service_config_final_path}'")
return result
def service_upgrade() -> bool:
""" Check systemd service config files and upgrade them to newer version if available
Returns:
bool: Was service updated
"""
was_updated = False
colorama = tm_colorama()
__logger.info("Begin systemd service upgrade check")
if __systemd_config_exists():
config = TM_Config.get()
builtin_version = __version
installed_version = config["systemd_service"]["version"]
if installed_version < builtin_version:
choice = input(f"Service file can be upgraded to version {colorama.Fore.CYAN}{builtin_version}{colorama.Fore.RESET} (Current version: {colorama.Fore.CYAN}{installed_version}{colorama.Fore.RESET}). Upgrade? {colorama.Fore.GREEN}[y/n]{colorama.Fore.RESET}: ")
if choice[0].lower() == 'y':
print(f"\n- Removing installed version {colorama.Fore.CYAN}{installed_version}{colorama.Fore.RESET} service from system...")
if service_remove():
print(
"- Installed version of service was removed",
f"\n- Installing the systemd service version {colorama.Fore.CYAN}{builtin_version}{colorama.Fore.RESET} to system..."
)
if service_install():
print("- Successfully installed new systemd service")
__update_cfg_values('upgrade')
print(f"\nService was successfully upgraded from version {colorama.Fore.CYAN}{installed_version}{colorama.Fore.RESET} to {colorama.Fore.CYAN}{builtin_version}{colorama.Fore.RESET}")
was_updated = True
else:
text = "Service is not installed, nothing to upgrade"
__logger.info(text)
print(text)
return was_updated
def service_remove() -> bool:
""" Remove all systemd service files, generated by Telemonitor, from system
Returns:
bool:
True - Successfully removed service from system
False - Can't remove service
"""
__logger.info("Begin systemd service removal")
result = False
colorama = tm_colorama()
if __systemd_config_exists():
try:
remove(__service_config_final_path)
except Exception as e:
print(f"Can't remove systemd service file in {colorama.Fore.CYAN}{__service_config_final_path}{colorama.Fore.RESET} due to {colorama.Fore.RED}{str(e)}")
__logger.error(f"Can't remove systemd service file in {__service_config_final_path} due to {str(e)}")
else:
__update_cfg_values('remove')
__logger.info(f"Successfully removed service file on path {colorama.Fore.CYAN}{__service_config_final_path}")
result = True
else:
__logger.error("Systemd service configuration file doesn't exist, nothing to remove")
return result
def __systemd_config_exists() -> bool:
""" Check for systemd config existence
Returns:
bool:
True - Config exists
False - Can't find any config file
"""
return path.isfile(__service_config_final_path)
def __update_cfg_values(mode: str):
""" Update config values related to systemd service
Args:
mode (str): [
'install' - Set config values to fresh install version.
'upgrade' - Upgrade value of `version` key in config.
'remove' - Reset `systemd_service` dict to default values.
]
"""
options = ('install', 'upgrade', 'remove')
if mode not in options:
raise Exception(f"Option '{mode}' doesn't exist in this function")
cfg = TM_Config.get()
if mode == 'install':
cfg['systemd_service'] = {
"version": __version
}
elif mode == 'upgrade':
cfg['systemd_service']["version"] = __version
elif mode == 'remove':
cfg['systemd_service'] = DEF_CFG['systemd_service']
TM_Config.write(cfg)
__logger.debug(f"Updated configuration dict 'systemd_service' to mode '{mode}'")
|
StarcoderdataPython
|
4866397
|
<filename>danceschool/core/management/commands/setup_invoicing.py<gh_stars>10-100
from django.core.management.base import BaseCommand
from django.apps import apps
from django.conf import settings
from six.moves import input
try:
import readline
except ImportError:
pass
class Command(BaseCommand):
help = 'Create necessary placeholders for staff members to generate and email invoices for registrations'
def boolean_input(self, question, default=None):
'''
Method for yes/no boolean inputs
'''
result = input("%s: " % question)
if not result and default is not None:
return default
while len(result) < 1 or result[0].lower() not in "yn":
result = input("Please answer yes or no: ")
return result[0].lower() == "y"
def handle(self, *args, **options):
from cms.api import add_plugin
from cms.models import Page, StaticPlaceholder
try:
initial_language = settings.LANGUAGES[0][0]
except IndexError:
initial_language = getattr(settings, 'LANGUAGE_CODE', 'en')
# Do some sanity checks to ensure that necessary apps are listed in INSTALLED_APPS
# before proceeding
required_apps = [
('cms', 'Django CMS'),
('danceschool.core', 'Core danceschool app'),
]
for this_app in required_apps:
if not apps.is_installed(this_app[0]):
self.stdout.write(
self.style.ERROR(
'ERROR: %s is not installed or listed in INSTALLED_APPS. Please install before proceeding.' % (
this_app[1],
)
)
)
return None
self.stdout.write(
"""
CHECKING INVOICE GENERATION FUNCTIONALITY
-----------------------------------------
"""
)
add_invoicing = self.boolean_input(
'Add invoice generation plugin to the registration summary view to ' +
'allow staff members to generate and email invoices for registrations [Y/n]',
True
)
if add_invoicing:
invoice_sp = StaticPlaceholder.objects.get_or_create(code='registration_invoice_placeholder')
invoice_p_draft = invoice_sp[0].draft
invoice_p_public = invoice_sp[0].public
if invoice_p_public.get_plugins().filter(plugin_type='CreateInvoicePlugin').exists():
self.stdout.write('Invoice generation form already present.')
else:
add_plugin(
invoice_p_draft, 'CreateInvoicePlugin', initial_language,
)
add_plugin(
invoice_p_public, 'CreateInvoicePlugin', initial_language,
)
self.stdout.write('Invoice generation form added.')
|
StarcoderdataPython
|
4940507
|
from http.cookies import SimpleCookie
import pytest
from tests.factories.tag import (
TagFactory,
tag_instant_delayed,
tag_instant_traceable,
tag_instant_analytical,
tag_instant_functional,
)
from tests.factories.page import TaggableContentPageFactory
from wagtail_tag_manager.models import Tag
@pytest.mark.django_db
def test_view_functional(client, site):
response = client.get(site.root_page.url)
assert response.status_code == 200
tag_instant_functional(tag_location=Tag.TOP_HEAD)
client.cookies = SimpleCookie({"wtm": "functional:true"})
response = client.get(site.root_page.url)
assert response.status_code == 200
assert b'console.log("functional instant")' in response.content
tag_instant_functional(name="instant functional 2", tag_location=Tag.BOTTOM_HEAD)
client.cookies = SimpleCookie({"wtm": "functional:true"})
response = client.get(site.root_page.url)
assert response.status_code == 200
assert b'console.log("functional instant")' in response.content
client.cookies = SimpleCookie({"wtm": "functional:false"})
response = client.get(site.root_page.url)
assert response.status_code == 200
assert b'console.log("functional instant")' in response.content
@pytest.mark.django_db
def test_view_analytical(client, site):
tag_instant_analytical(tag_location=Tag.TOP_BODY)
client.cookies = SimpleCookie({"wtm": "analytical:true"})
response = client.get(site.root_page.url)
assert response.status_code == 200
assert b'console.log("analytical instant")' in response.content
@pytest.mark.django_db
def test_view_delayed(client, site):
tag_instant_delayed(tag_location=Tag.TOP_BODY)
client.cookies = SimpleCookie({"wtm": "delayed:true"})
response = client.get(site.root_page.url)
assert response.status_code == 200
assert b'console.log("delayed instant")' in response.content
@pytest.mark.django_db
def test_view_traceable(client, site):
tag_instant_traceable(tag_location=Tag.BOTTOM_BODY)
client.cookies = SimpleCookie({"wtm": "traceable:true"})
response = client.get(site.root_page.url)
assert response.status_code == 200
assert b'console.log("traceable instant")' in response.content
@pytest.mark.django_db
def test_page_tags(client, site):
response = client.get(site.root_page.url)
assert response.status_code == 200
tag_functional = TagFactory(
name="functional instant",
auto_load=False,
tag_loading=Tag.INSTANT_LOAD,
content='<script>console.log("functional")</script>',
)
tag_analytical = TagFactory(
name="analytical instant",
auto_load=False,
tag_loading=Tag.INSTANT_LOAD,
tag_type="analytical",
content='<script>console.log("analytical")</script>',
)
tag_delayed = TagFactory(
name="delayed instant",
auto_load=False,
tag_loading=Tag.INSTANT_LOAD,
tag_type="delayed",
content='<script>console.log("delayed")</script>',
)
tag_traceable = TagFactory(
name="traceable instant",
auto_load=False,
tag_loading=Tag.INSTANT_LOAD,
tag_type="traceable",
content='<script>console.log("traceable")</script>',
)
assert tag_functional in Tag.objects.passive().sorted()
assert tag_analytical in Tag.objects.passive().sorted()
assert tag_delayed in Tag.objects.passive().sorted()
assert tag_traceable in Tag.objects.passive().sorted()
page = TaggableContentPageFactory(parent=site.root_page, slug="tagged-page")
page.wtm_tags.add(tag_functional)
page.wtm_tags.add(tag_analytical)
page.wtm_tags.add(tag_delayed)
page.wtm_tags.add(tag_traceable)
page.save()
client.cookies = SimpleCookie({"wtm": "functional:true"})
response = client.get(page.get_url())
assert response.status_code == 200
assert b'console.log("functional")' in response.content
client.cookies = SimpleCookie({"wtm": "analytical:true"})
response = client.get(page.get_url())
assert response.status_code == 200
assert b'console.log("analytical")' in response.content
client.cookies = SimpleCookie({"wtm": "delayed:true"})
response = client.get(page.get_url())
assert response.status_code == 200
assert b'console.log("delayed")' in response.content
client.cookies = SimpleCookie({"wtm": "traceable:true"})
response = client.get(page.get_url())
assert response.status_code == 200
assert b'console.log("traceable")' in response.content
|
StarcoderdataPython
|
6512326
|
#! /g/kreshuk/pape/Work/software/conda/miniconda3/envs/cluster_env37/bin/python
import os
import json
import luigi
from cluster_tools.downscaling import DownscalingWorkflow
def downscale_raw(path, max_jobs=8, target='local'):
""" Downscale raw data.
Arguments:
path [str] - path to raw data
max_jobs [int] - maximum number of jobs
target [str] - target of computation: local, slurm or lsf
"""
# input and output keys
input_key = 'raw'
output_key = 'volumes/raw'
# temporary directories
config_dir = './configs'
tmp_folder = './tmp_downscaling'
os.makedirs(config_dir, exist_ok=True)
# write the global configiration with shebang of python env with
# all necessary dependencies
config = DownscalingWorkflow.get_config()
global_config = config['global']
shebang = '#! /g/kreshuk/pape/Work/software/conda/miniconda3/envs/cluster_env37/bin/python'
global_config.update({'shebang': shebang})
with open(os.path.join(config_dir, 'global.config'), 'w') as f:
json.dump(global_config, f)
# write the task specific config
# here, we configure the downscaling task to use skimage
task_config = config['downscaling']
task_config.update({'library': 'skimage'})
with open(os.path.join(config_dir, 'downscaling.config'), 'w') as f:
json.dump(task_config, f)
scale_factors = [[1, 2, 2], [1, 2, 2], [1, 2, 2], 2]
halos = [[0, 10, 10], [0, 10, 10], [0, 10, 10], [10, 10, 10]]
task = DownscalingWorkflow(tmp_folder=tmp_folder,
max_jobs=max_jobs,
config_dir=config_dir,
target=target,
input_path=path,
input_key=input_key,
output_key_prefix=output_key,
scale_factors=scale_factors,
halos=halos)
success = luigi.build([task], local_scheduler=True)
assert success, "Dowscaling failed"
if __name__ == '__main__':
path = '/g/kreshuk/data/cremi/example/sampleA.n5'
downscale_raw(path, max_jobs=8, target='local')
|
StarcoderdataPython
|
12829565
|
from .base import loader
|
StarcoderdataPython
|
8166769
|
<reponame>maanavshah/movie-review-analysis<filename>sentiment_analysis/crawler.py
import urllib.request
from bs4 import BeautifulSoup
import csv
import signal
import sys
from subprocess import call
import os
signal.signal(signal.SIGINT, lambda x,y: sys.exit(0))
with open('data/urls.csv','r') as f:
reader = csv.reader(f)
rt_urls = list(reader)
csvfile = open('data/test.tsv','w')
wr = csv.writer(csvfile,dialect='excel',delimiter='\t')
wr.writerow(["PhraseId","SentenceId","Phrase"])
for i in rt_urls:
count = 0
print("------------------------------------------------------------------------------------------------------------------------------------------------------\n")
print("Fetching movie reviews :: ",i,"\n")
URL = "".join((str(i).replace('[','').replace(']','').replace("'",""),'reviews'))
url = urllib.request.urlopen(URL)
content = url.read()
soup = BeautifulSoup(content,"html.parser")
mydivs = soup.findAll("div", { "class" : "the_review" })
temp_id = 15601
for i in mydivs:
review = str(i).replace('<div class="the_review">','').replace('</div>','').lstrip()
review = review.replace('\n', ' ').replace('\r', '')
if review != "":
print("-",review)
wr.writerow([temp_id,temp_id,str(review)])
temp_id = temp_id + 1
count = count + 1
print("\nTotal reviews fetched :: ", count,"\n")
print("Performing sentiment analysis!\n")
break
|
StarcoderdataPython
|
11263034
|
import logging
import uuid
from typing import List, Union
from galaxy import model
from galaxy.util import ExecutionTimer
from galaxy.workflow import modules
from galaxy.workflow.run_request import (
workflow_request_to_run_config,
workflow_run_config_to_request,
WorkflowRunConfig
)
log = logging.getLogger(__name__)
# Entry point for core workflow scheduler.
def schedule(trans, workflow, workflow_run_config, workflow_invocation):
return __invoke(trans, workflow, workflow_run_config, workflow_invocation)
def __invoke(trans, workflow, workflow_run_config, workflow_invocation=None, populate_state=False):
""" Run the supplied workflow in the supplied target_history.
"""
if populate_state:
modules.populate_module_and_state(trans, workflow, workflow_run_config.param_map, allow_tool_state_corrections=workflow_run_config.allow_tool_state_corrections)
invoker = WorkflowInvoker(
trans,
workflow,
workflow_run_config,
workflow_invocation=workflow_invocation,
)
try:
outputs = invoker.invoke()
except modules.CancelWorkflowEvaluation:
if workflow_invocation:
if workflow_invocation.cancel():
trans.sa_session.add(workflow_invocation)
outputs = []
except Exception:
log.exception("Failed to execute scheduled workflow.")
if workflow_invocation:
# Running workflow invocation in background, just mark
# persistent workflow invocation as failed.
workflow_invocation.fail()
trans.sa_session.add(workflow_invocation)
else:
# Running new transient workflow invocation in legacy
# controller action - propage the exception up.
raise
outputs = []
if workflow_invocation:
# Be sure to update state of workflow_invocation.
trans.sa_session.flush()
return outputs, invoker.workflow_invocation
def queue_invoke(trans, workflow, workflow_run_config, request_params=None, populate_state=True, flush=True):
request_params = request_params or {}
if populate_state:
modules.populate_module_and_state(trans, workflow, workflow_run_config.param_map, allow_tool_state_corrections=workflow_run_config.allow_tool_state_corrections)
workflow_invocation = workflow_run_config_to_request(trans, workflow_run_config, workflow)
workflow_invocation.workflow = workflow
return trans.app.workflow_scheduling_manager.queue(workflow_invocation, request_params, flush=flush)
class WorkflowInvoker:
def __init__(self, trans, workflow, workflow_run_config, workflow_invocation=None, progress=None):
self.trans = trans
self.workflow = workflow
if progress is not None:
assert workflow_invocation is None
workflow_invocation = progress.workflow_invocation
if workflow_invocation is None:
invocation_uuid = uuid.uuid1()
workflow_invocation = model.WorkflowInvocation()
workflow_invocation.workflow = self.workflow
# In one way or another, following attributes will become persistent
# so they are available during delayed/revisited workflow scheduling.
workflow_invocation.uuid = invocation_uuid
workflow_invocation.history = workflow_run_config.target_history
self.workflow_invocation = workflow_invocation
else:
self.workflow_invocation = workflow_invocation
self.workflow_invocation.copy_inputs_to_history = workflow_run_config.copy_inputs_to_history
self.workflow_invocation.use_cached_job = workflow_run_config.use_cached_job
self.workflow_invocation.replacement_dict = workflow_run_config.replacement_dict
module_injector = modules.WorkflowModuleInjector(trans)
if progress is None:
progress = WorkflowProgress(
self.workflow_invocation,
workflow_run_config.inputs,
module_injector,
param_map=workflow_run_config.param_map,
jobs_per_scheduling_iteration=getattr(trans.app.config, "maximum_workflow_jobs_per_scheduling_iteration", -1),
)
self.progress = progress
def invoke(self):
workflow_invocation = self.workflow_invocation
config = self.trans.app.config
maximum_duration = getattr(config, "maximum_workflow_invocation_duration", -1)
if maximum_duration > 0 and workflow_invocation.seconds_since_created > maximum_duration:
log.debug(f"Workflow invocation [{workflow_invocation.id}] exceeded maximum number of seconds allowed for scheduling [{maximum_duration}], failing.")
workflow_invocation.state = model.WorkflowInvocation.states.FAILED
# All jobs ran successfully, so we can save now
self.trans.sa_session.add(workflow_invocation)
# Not flushing in here, because web controller may create multiple
# invocations.
return self.progress.outputs
if workflow_invocation.history.deleted:
log.info("Cancelled workflow evaluation due to deleted history")
raise modules.CancelWorkflowEvaluation()
remaining_steps = self.progress.remaining_steps()
delayed_steps = False
max_jobs_per_iteration_reached = False
for (step, workflow_invocation_step) in remaining_steps:
max_jobs_to_schedule = self.progress.maximum_jobs_to_schedule_or_none
if max_jobs_to_schedule is not None and max_jobs_to_schedule <= 0:
max_jobs_per_iteration_reached = True
break
step_delayed = False
step_timer = ExecutionTimer()
try:
self.__check_implicitly_dependent_steps(step)
if not workflow_invocation_step:
workflow_invocation_step = model.WorkflowInvocationStep()
workflow_invocation_step.workflow_invocation = workflow_invocation
workflow_invocation_step.workflow_step = step
workflow_invocation_step.state = 'new'
workflow_invocation.steps.append(workflow_invocation_step)
incomplete_or_none = self._invoke_step(workflow_invocation_step)
if incomplete_or_none is False:
step_delayed = delayed_steps = True
workflow_invocation_step.state = 'ready'
self.progress.mark_step_outputs_delayed(step, why="Not all jobs scheduled for state.")
else:
workflow_invocation_step.state = 'scheduled'
except modules.DelayedWorkflowEvaluation as de:
step_delayed = delayed_steps = True
self.progress.mark_step_outputs_delayed(step, why=de.why)
except Exception:
log.exception(
"Failed to schedule %s, problem occurred on %s.",
self.workflow_invocation.workflow.log_str(),
step.log_str(),
)
raise
if not step_delayed:
log.debug(f"Workflow step {step.id} of invocation {workflow_invocation.id} invoked {step_timer}")
if delayed_steps or max_jobs_per_iteration_reached:
state = model.WorkflowInvocation.states.READY
else:
state = model.WorkflowInvocation.states.SCHEDULED
workflow_invocation.state = state
# All jobs ran successfully, so we can save now
self.trans.sa_session.add(workflow_invocation)
# Not flushing in here, because web controller may create multiple
# invocations.
return self.progress.outputs
def __check_implicitly_dependent_steps(self, step):
""" Method will delay the workflow evaluation if implicitly dependent
steps (steps dependent but not through an input->output way) are not
yet complete.
"""
for input_connection in step.input_connections:
if input_connection.non_data_connection:
output_id = input_connection.output_step.id
self.__check_implicitly_dependent_step(output_id)
def __check_implicitly_dependent_step(self, output_id):
step_invocation = self.workflow_invocation.step_invocation_for_step_id(output_id)
# No steps created yet - have to delay evaluation.
if not step_invocation:
delayed_why = f"depends on step [{output_id}] but that step has not been invoked yet"
raise modules.DelayedWorkflowEvaluation(why=delayed_why)
if step_invocation.state != 'scheduled':
delayed_why = f"depends on step [{output_id}] job has not finished scheduling yet"
raise modules.DelayedWorkflowEvaluation(delayed_why)
# TODO: Handle implicit dependency on stuff like pause steps.
for job in step_invocation.jobs:
# At least one job in incomplete.
if not job.finished:
delayed_why = f"depends on step [{output_id}] but one or more jobs created from that step have not finished yet"
raise modules.DelayedWorkflowEvaluation(why=delayed_why)
if job.state != job.states.OK:
raise modules.CancelWorkflowEvaluation()
def _invoke_step(self, invocation_step):
incomplete_or_none = invocation_step.workflow_step.module.execute(self.trans,
self.progress,
invocation_step,
use_cached_job=self.workflow_invocation.use_cached_job)
return incomplete_or_none
STEP_OUTPUT_DELAYED = object()
class WorkflowProgress:
def __init__(self, workflow_invocation, inputs_by_step_id, module_injector, param_map, jobs_per_scheduling_iteration=-1):
self.outputs = {}
self.module_injector = module_injector
self.workflow_invocation = workflow_invocation
self.inputs_by_step_id = inputs_by_step_id
self.param_map = param_map
self.jobs_per_scheduling_iteration = jobs_per_scheduling_iteration
self.jobs_scheduled_this_iteration = 0
@property
def maximum_jobs_to_schedule_or_none(self):
if self.jobs_per_scheduling_iteration > 0:
return self.jobs_per_scheduling_iteration - self.jobs_scheduled_this_iteration
else:
return None
def record_executed_job_count(self, job_count):
self.jobs_scheduled_this_iteration += job_count
def remaining_steps(self):
# Previously computed and persisted step states.
step_states = self.workflow_invocation.step_states_by_step_id()
steps = self.workflow_invocation.workflow.steps
# TODO: Wouldn't a generator be much better here so we don't have to reason about
# steps we are no where near ready to schedule?
remaining_steps = []
step_invocations_by_id = self.workflow_invocation.step_invocations_by_step_id()
for step in steps:
step_id = step.id
if not hasattr(step, 'module'):
self.module_injector.inject(step, step_args=self.param_map.get(step.id, {}))
if step_id not in step_states:
template = "Workflow invocation [%s] has no step state for step id [%s]. States ids are %s."
message = template % (self.workflow_invocation.id, step_id, list(step_states.keys()))
raise Exception(message)
runtime_state = step_states[step_id].value
step.state = step.module.decode_runtime_state(runtime_state)
invocation_step = step_invocations_by_id.get(step_id, None)
if invocation_step and invocation_step.state == 'scheduled':
self._recover_mapping(invocation_step)
else:
remaining_steps.append((step, invocation_step))
return remaining_steps
def replacement_for_input(self, step, input_dict):
replacement: Union[
modules.NoReplacement,
model.DatasetCollectionInstance,
List[model.DatasetCollectionInstance],
] = modules.NO_REPLACEMENT
prefixed_name = input_dict["name"]
multiple = input_dict["multiple"]
if prefixed_name in step.input_connections_by_name:
connection = step.input_connections_by_name[prefixed_name]
if input_dict["input_type"] == "dataset" and multiple:
temp = [self.replacement_for_connection(c) for c in connection]
# If replacement is just one dataset collection, replace tool
# input_dict with dataset collection - tool framework will extract
# datasets properly.
if len(temp) == 1:
if isinstance(temp[0], model.HistoryDatasetCollectionAssociation):
replacement = temp[0]
else:
replacement = temp
else:
replacement = temp
else:
is_data = input_dict["input_type"] in ["dataset", "dataset_collection"]
replacement = self.replacement_for_connection(connection[0], is_data=is_data)
return replacement
def replacement_for_connection(self, connection, is_data=True):
output_step_id = connection.output_step.id
if output_step_id not in self.outputs:
message = f"No outputs found for step id {output_step_id}, outputs are {self.outputs}"
raise Exception(message)
step_outputs = self.outputs[output_step_id]
if step_outputs is STEP_OUTPUT_DELAYED:
delayed_why = f"dependent step [{output_step_id}] delayed, so this step must be delayed"
raise modules.DelayedWorkflowEvaluation(why=delayed_why)
output_name = connection.output_name
try:
replacement = step_outputs[output_name]
except KeyError:
# Must resolve.
template = "Workflow evaluation problem - failed to find output_name %s in step_outputs %s"
message = template % (output_name, step_outputs)
raise Exception(message)
if isinstance(replacement, model.HistoryDatasetCollectionAssociation):
if not replacement.collection.populated:
if not replacement.waiting_for_elements:
# If we are not waiting for elements, there was some
# problem creating the collection. Collection will never
# be populated.
# TODO: consider distinguish between cancelled and failed?
raise modules.CancelWorkflowEvaluation()
delayed_why = f"dependent collection [{replacement.id}] not yet populated with datasets"
raise modules.DelayedWorkflowEvaluation(why=delayed_why)
if isinstance(replacement, model.DatasetCollection):
raise NotImplementedError
if not is_data and isinstance(replacement, (model.HistoryDatasetAssociation, model.HistoryDatasetCollectionAssociation)):
if isinstance(replacement, model.HistoryDatasetAssociation):
if replacement.is_pending:
raise modules.DelayedWorkflowEvaluation()
if not replacement.is_ok:
raise modules.CancelWorkflowEvaluation()
else:
if not replacement.collection.populated:
raise modules.DelayedWorkflowEvaluation()
pending = False
for dataset_instance in replacement.dataset_instances:
if dataset_instance.is_pending:
pending = True
elif not dataset_instance.is_ok:
raise modules.CancelWorkflowEvaluation()
if pending:
raise modules.DelayedWorkflowEvaluation()
return replacement
def get_replacement_workflow_output(self, workflow_output):
step = workflow_output.workflow_step
output_name = workflow_output.output_name
step_outputs = self.outputs[step.id]
if step_outputs is STEP_OUTPUT_DELAYED:
delayed_why = f"depends on workflow output [{output_name}] but that output has not been created yet"
raise modules.DelayedWorkflowEvaluation(why=delayed_why)
else:
return step_outputs[output_name]
def set_outputs_for_input(self, invocation_step, outputs=None, already_persisted=False):
step = invocation_step.workflow_step
if outputs is None:
outputs = {}
if self.inputs_by_step_id:
step_id = step.id
if step_id not in self.inputs_by_step_id and 'output' not in outputs:
default_value = step.input_default_value
if default_value:
outputs['output'] = default_value
else:
template = "Step with id %s not found in inputs_step_id (%s)"
message = template % (step.log_str(), self.inputs_by_step_id)
raise ValueError(message)
elif step_id in self.inputs_by_step_id:
outputs['output'] = self.inputs_by_step_id[step_id]
self.set_step_outputs(invocation_step, outputs, already_persisted=already_persisted)
def set_step_outputs(self, invocation_step, outputs, already_persisted=False):
step = invocation_step.workflow_step
if invocation_step.output_value:
outputs[invocation_step.output_value.workflow_output.output_name] = invocation_step.output_value.value
self.outputs[step.id] = outputs
if not already_persisted:
workflow_outputs_by_name = {wo.output_name: wo for wo in step.workflow_outputs}
for output_name, output_object in outputs.items():
if hasattr(output_object, "history_content_type"):
invocation_step.add_output(output_name, output_object)
else:
# Add this non-data, non workflow-output output to the workflow outputs.
# This is required for recovering the output in the next scheduling iteration,
# and should be replaced with a WorkflowInvocationStepOutputValue ASAP.
if not workflow_outputs_by_name.get(output_name) and not output_object == modules.NO_REPLACEMENT:
workflow_output = model.WorkflowOutput(step, output_name=output_name)
step.workflow_outputs.append(workflow_output)
for workflow_output in step.workflow_outputs:
output_name = workflow_output.output_name
if output_name not in outputs:
message = f"Failed to find expected workflow output [{output_name}] in step outputs [{outputs}]"
# raise KeyError(message)
# Pre-18.01 we would have never even detected this output wasn't configured
# and even in 18.01 we don't have a way to tell the user something bad is
# happening so I guess we just log a debug message and continue sadly for now.
# Once https://github.com/galaxyproject/galaxy/issues/5142 is complete we could
# at least tell the user what happened, give them a warning.
log.debug(message)
continue
output = outputs[output_name]
self._record_workflow_output(
step,
workflow_output,
output=output,
)
def _record_workflow_output(self, step, workflow_output, output):
self.workflow_invocation.add_output(workflow_output, step, output)
def mark_step_outputs_delayed(self, step, why=None):
if why:
message = f"Marking step {step.id} outputs of invocation {self.workflow_invocation.id} delayed ({why})"
log.debug(message)
self.outputs[step.id] = STEP_OUTPUT_DELAYED
def _subworkflow_invocation(self, step):
workflow_invocation = self.workflow_invocation
subworkflow_invocation = workflow_invocation.get_subworkflow_invocation_for_step(step)
if subworkflow_invocation is None:
raise Exception(f"Failed to find persisted workflow invocation for step [{step.id}]")
return subworkflow_invocation
def subworkflow_invoker(self, trans, step, use_cached_job=False):
subworkflow_invocation = self._subworkflow_invocation(step)
workflow_run_config = workflow_request_to_run_config(trans, subworkflow_invocation)
subworkflow_progress = self.subworkflow_progress(subworkflow_invocation, step, workflow_run_config.param_map)
subworkflow_invocation = subworkflow_progress.workflow_invocation
return WorkflowInvoker(
trans,
workflow=subworkflow_invocation.workflow,
workflow_run_config=workflow_run_config,
progress=subworkflow_progress,
)
def subworkflow_progress(self, subworkflow_invocation, step, param_map):
subworkflow = subworkflow_invocation.workflow
subworkflow_inputs = {}
for input_subworkflow_step in subworkflow.input_steps:
connection_found = False
for input_connection in step.input_connections:
if input_connection.input_subworkflow_step == input_subworkflow_step:
subworkflow_step_id = input_subworkflow_step.id
is_data = input_connection.output_step.type != "parameter_input"
replacement = self.replacement_for_connection(
input_connection,
is_data=is_data,
)
subworkflow_inputs[subworkflow_step_id] = replacement
connection_found = True
break
if not connection_found:
raise Exception("Could not find connections for all subworkflow inputs.")
return WorkflowProgress(
subworkflow_invocation,
subworkflow_inputs,
self.module_injector,
param_map=param_map
)
def _recover_mapping(self, step_invocation):
try:
step_invocation.workflow_step.module.recover_mapping(step_invocation, self)
except modules.DelayedWorkflowEvaluation as de:
self.mark_step_outputs_delayed(step_invocation.workflow_step, de.why)
__all__ = ('queue_invoke', 'WorkflowRunConfig')
|
StarcoderdataPython
|
9767089
|
<filename>backend_utils/permissions.py
"""
@copyright Copyright (c) 2013
@author <NAME> (@asullom)
@package utils
Descripcion: Componenetes para controlar los permisos por roles de los usuarios
y los permisos a la información a la que ha sido asignado
"""
import logging
log = logging.getLogger(__name__)
from rest_framework import permissions
from django.utils.translation import ugettext as _ # , ungettext
from .logs import log_params
class ModelPermission(permissions.BasePermission):
"""
Valida o por queryset.model para los Models de DB 1°
o por permission_replace_by_model para modelos "virtuales" 2°
Usage::
1° in permission_classes add ModelPermission:
permission_classes = [ModelPermission]
2° use permission_replace_by_model de la forma:
permission_classes = [ModelPermission]
permission_replace_by_model = 'app_label.model'
HEAD don't require a permission but specified is authenticated
GET and OPTIONS map to list
POST maps to add
PUT and PATCH map to change
DELETE maps to delete
importante, para permisos personalizados ver MiPermission y su uso en
@api_view(['GET'])
@permission_classes((permissions.IsAuthenticated, MiPermission, ))
def load_menu(request, format=None):
"""
perms_map = {
'GET': ['%(app_label)s.list_%(model_name)s'],
'OPTIONS': ['%(app_label)s.list_%(model_name)s'],
'HEAD': [],
'POST': ['%(app_label)s.add_%(model_name)s'],
'PUT': ['%(app_label)s.change_%(model_name)s'],
'PATCH': ['%(app_label)s.change_%(model_name)s'],
'DELETE': ['%(app_label)s.delete_%(model_name)s'],
}
def get_required_virtual_permissions(self, method, model_cls):
"""
Given a virtual model and an HTTP method, return the list of permission
codes that the user is required to have.
"""
app_label, model_name = model_cls.split('.', 1)
kwargs = {
'app_label': app_label,
'model_name': model_name
}
return [perm % kwargs for perm in self.perms_map[method]]
def get_required_permissions(self, method, model_cls):
"""
Given a model and an HTTP method, return the list of permission
codes that the user is required to have.
"""
kwargs = {
'app_label': model_cls._meta.app_label,
'model_name': model_cls._meta.model_name
}
return [perm % kwargs for perm in self.perms_map[method]]
def has_permission(self, request, view):
if not request.user.is_authenticated():
return False # raise PermissionDenied # 403.html
#print ("view.action :", view.action)
#print ("permissions.SAFE_METHODS:", permissions.SAFE_METHODS)
print ("request.method:", request.method)
# if request.method in permissions.SAFE_METHODS:
# retuchecksrn True
# return True
# if getattr(view, '_ignore_model_permissions', False):
# return True
if hasattr(view, 'permission_replace_by_model'):
model = view.permission_replace_by_model
print ("permission_replace_by_model:", model)
perms = self.get_required_virtual_permissions(
request.method, model
)
else:
if hasattr(view, 'get_queryset'):
queryset = view.get_queryset()
else:
queryset = getattr(view, 'queryset', None)
assert queryset is not None, (
'Cannot apply ModelPermissions on a view that '
'does not set `.queryset` or have a `.get_queryset()` method. '
'Add permission_replace_by_model = \'app_label.model_name\' '
' variable to APIView class'
)
print ("queryset.model:", queryset.model)
perms = self.get_required_permissions(
request.method, queryset.model
)
print ("perms:", perms)
if request.user.has_perms(perms):
return True
else:
log.info(
_('Permission denied. You don\'t have permission to %s.'
) % (perms),
extra=log_params(request)
)
return False
|
StarcoderdataPython
|
8059329
|
<gh_stars>10-100
#
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# Menu for impact modules
import click
from dorothy.main import dorothy_shell
from dorothy.modules.defense_evasion import (
change_app_state,
change_rule_state,
change_zone_state,
change_policy_state,
modify_policy,
modify_policy_rule,
modify_zone,
)
from dorothy.modules.persistence import change_user_state
@dorothy_shell.subshell(name="impact")
@click.pass_context
def impact(ctx):
"""Modules to interrupt components of the Okta environment"""
# Reuse a few commands from defense_evasion
impact.add_command(change_user_state.change_user_state)
impact.add_command(change_app_state.change_app_state)
impact.add_command(change_rule_state.change_rule_state)
impact.add_command(change_zone_state.change_zone_state)
impact.add_command(change_policy_state.change_policy_state)
impact.add_command(modify_policy.modify_policy)
impact.add_command(modify_policy_rule.modify_policy_rule)
impact.add_command(modify_zone.modify_zone)
|
StarcoderdataPython
|
8180225
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_bcrypt import Bcrypt
from werkzeug.security import generate_password_hash, check_password_hash
from flask_login import UserMixin
from app import application
db = SQLAlchemy(application)
bcrypt = Bcrypt(application)
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(75), nullable=False, unique=True)
email = db.Column(db.String(120), unique=True, nullable=False)
zipcode = db.Column(db.Integer, nullable=False)
password = db.Column(db.String(300), nullable=False)
inhaler = db.Column(db.Integer)
meds = db.Column(db.Integer)
lng = db.Column(db.Float)
lat = db.Column(db.Float)
def set_password(self, password):
self.password = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password, password)
def __repr__(self):
return '<User %r>' % self.username
class Treatment(UserMixin, db.Model):
__tablename__ = 'treatment'
id = db.Column(db.Integer, primary_key=True)
userid = db.Column(db.Integer, nullable=False)
timestamp = db.Column(db.BigInteger, nullable=False)
lng = db.Column(db.Float)
lat = db.Column(db.Float)
treatment = db.Column(db.String(100), nullable=False)
accuracy = db.Column(db.Integer)
zipcode = db.Column(db.String(10))
|
StarcoderdataPython
|
8170016
|
"""SA Connections Runner (SAIR)
SAIR processes Data Connections in *_CONNECTION tables
"""
import fire
from multiprocessing import Pool
from datetime import datetime
import importlib
import json
from types import GeneratorType
import yaml
from runners.helpers import db, log, vault
from runners.config import RUN_ID, DC_METADATA_TABLE, DC_POOLSIZE
def connection_run(connection_table):
table_name = connection_table['name']
table_comment = connection_table['comment']
log.info(f"-- START DC {table_name} --")
try:
metadata = {'START_TIME': datetime.utcnow()}
options = yaml.load(table_comment) or {}
if 'module' in options:
module = options['module']
metadata.update(
{
'RUN_ID': RUN_ID,
'TYPE': module,
'LANDING_TABLE': table_name,
'INGEST_COUNT': 0,
}
)
connector = importlib.import_module(f"connectors.{module}")
for module_option in connector.CONNECTION_OPTIONS:
name = module_option['name']
if module_option.get('secret') and name in options:
options[name] = vault.decrypt_if_encrypted(options[name])
if module_option.get('type') == 'json':
options[name] = json.loads(options[name])
if module_option.get('type') == 'list':
if type(options[name]) is str:
options[name] = options[name].split(',')
if module_option.get('type') == 'int':
options[name] = int(options[name])
if callable(getattr(connector, 'ingest', None)):
ingested = connector.ingest(table_name, options)
if isinstance(ingested, int):
metadata['INGEST_COUNT'] += ingested
elif isinstance(ingested, GeneratorType):
for n in ingested:
metadata['INGEST_COUNT'] += n
else:
metadata['INGESTED'] = ingested
db.record_metadata(metadata, table=DC_METADATA_TABLE)
except Exception as e:
log.error(f"Error loading logs into {table_name}: ", e)
db.record_metadata(metadata, table=DC_METADATA_TABLE, e=e)
log.info(f"-- END DC --")
def main(connection_table="%_CONNECTION"):
tables = list(db.fetch(f"SHOW TABLES LIKE '{connection_table}' IN data"))
if len(tables) == 1:
connection_run(tables[0])
else:
Pool(DC_POOLSIZE).map(connection_run, tables)
if __name__ == "__main__":
fire.Fire(main)
|
StarcoderdataPython
|
1920702
|
a=input("left or right?")
if(a=="right"):
b=input("swim or wait? ")
if(b=="swim"):
c=input("colour = ")
if(c=="yellow"):
print("win")
else:
print("Game over")
elif(b=="wait"):
print("Game over")
elif(a=="left"):
print("Game over")
|
StarcoderdataPython
|
3551516
|
from gordon.utils_tests import BaseIntegrationTest, BaseBuildTest
from gordon.utils import valid_cloudformation_name
from gordon import utils
class IntegrationTest(BaseIntegrationTest):
def test_0001_project(self):
self._test_project_step('0001_project')
self.assert_stack_succeed('p')
self.assert_stack_succeed('r')
lambda_ = self.get_lambda(utils.valid_cloudformation_name('pyexample:pyexample'))
self.assertEqual(lambda_['Runtime'], 'python2.7')
self.assertEqual(lambda_['Description'], 'My description')
self.assertEqual(lambda_['MemorySize'], 192)
self.assertEqual(lambda_['Timeout'], 123)
aliases = self.get_lambda_aliases(function_name=lambda_['FunctionName'])
self.assertEqual(list(aliases.keys()), ['current'])
response = self.invoke_lambda(
function_name=lambda_['FunctionName'],
payload={'key1': 'hello'}
)
self.assert_lambda_response(response, 'hello')
def test_0002_project(self):
self._test_project_step('0002_project')
self.assert_stack_succeed('p')
self.assert_stack_succeed('r')
lambda_ = self.get_lambda(utils.valid_cloudformation_name('pyexample:pyexample'))
self.assertEqual(lambda_['Runtime'], 'python2.7')
self.assertEqual(lambda_['Description'], 'My second description')
self.assertEqual(lambda_['MemorySize'], 256)
self.assertEqual(lambda_['Timeout'], 199)
aliases = self.get_lambda_aliases(function_name=lambda_['FunctionName'])
self.assertEqual(list(aliases.keys()), ['current'])
response = self.invoke_lambda(
function_name=lambda_['FunctionName'],
payload={'key1': 'hello', 'key2': 'bye'}
)
self.assert_lambda_response(response, 'bye')
class BuildTest(BaseBuildTest):
def test_0001_project(self):
self._test_project_step('0001_project')
self.assertBuild('0001_project', '0001_p.json')
self.assertBuild('0001_project', '0002_pr_r.json')
self.assertBuild('0001_project', '0003_r.json')
def test_0002_project(self):
self._test_project_step('0002_project')
self.assertBuild('0002_project', '0001_p.json')
self.assertBuild('0002_project', '0002_pr_r.json')
self.assertBuild('0002_project', '0003_r.json')
|
StarcoderdataPython
|
1968381
|
from covertutils.handlers import BufferingHandler
from covertutils.orchestration import Orchestrator
from covertutils.bridges import SimpleBridge
from time import sleep
from functools import wraps
try :
from queue import Queue
except ImportError:
from Queue import Queue
def handlerCallbackHook( instance, on_chunk_function, orch_id ) :
# print( "In the Hook" )
@wraps(on_chunk_function)
def wrapper( *args, **kwargs ) :
# print( "In the Wrapper" )
stream, message = args
pseudo_stream = "%s:%s" % (orch_id, stream)
if message :
# print( stream, message )
# print args
instance.onMessage( pseudo_stream, message )
else :
instance.onChunk( pseudo_stream, message )
on_chunk_function( *args, **kwargs ) # Not honoring return values
return on_chunk_function
return wrapper
class MultiHandler( BufferingHandler ) :
"""
A class that aggregates multiple :class:`BaseHandler` parented objects, to support parallel session handling.
It supports the standard :meth:`onMessage` API of the original :class:`BaseHandler` objects, as well as methods for dispatching `messages` en-masse.
"""
class __NullOrchestrator(Orchestrator) :
def readyMessage( self, message, stream ) :
# print "ready"
assert False == True # This is dummy, dead code
return "%s:%s" % (stream, message)
def depositChunk( self, chunk ) :
# print "deposit"
assert False == True # This is dummy, dead code
stream, message = chunk.split(':',1)
return stream, message
def start(self) : pass
def nullSend( self, message, stream ) : print "nullSend" ; pass
# def onChunk( self, stream, message ) : pass
# def onNotRecognised( self ) : pass
def __init__( self, handlers, **kw ) :
assert type(handlers == list)
def send_internal(raw) :
print "++++++++Send internal run+++++"
assert False == True # This is dummy, dead code
def recv_internal() :
print "=========recv internal run======="
assert False == True # This is dummy, dead code
return None
orch = MultiHandler.__NullOrchestrator("", 0)
super(MultiHandler, self).__init__(recv_internal, send_internal, orch, **kw)
# self.preferred_send = self.nullSend
self.handlers = {}
for handler in handlers :
self.addHandler(handler)
def resolveStream( self, stream_alias ) :
orch_id, stream = stream_alias.split(':',1)
handler = self.handlers[orch_id]['handler']
return handler, stream
def preferred_send( self, message, stream ) :
handler, stream = self.resolveStream( stream )
handler.preferred_send( message, stream )
print "RUNI"
def queueSend( self, message, stream ) :
pass
def dispatch( self, orch_ids, stream, message ) :
for orch_id in orch_ids :
handler = self.handlers[orch_id]['handler']
handler.preferred_send( message, stream )
def dispatchTo( self, orch_id, stream, message ) :
self.dispatch( [orch_id], stream, message )
def dispatchAll( self, message, stream = 'control' ) : # Make it look for hard_streams when stream = None
for orch_id in self.handlers.keys() :
if stream in self.handlers[orch_id]['streams'] :
handler = self.handlers[orch_id]['handler']
handler.preferred_send( message, stream )
#
# def sendTo( self, orch_id, message, stream = 'control', local = True ) : # Make it look for hard_streams when stream = None
# for orch_id_ in self.handlers.keys() :
# handler = self.handlers[orch_id]['handler']
# if local :
# orch = handler.getOrchestrator()
# orch_to_check = orch.getIdentity()
# else :
# for x,y in zip(orch_id, orch_id_) :
# if z
# # if orch.checkIdentity(orch_id) ==
#
# if stream in self.handlers[orch_id]['streams'] :
# handler = self.handlers[orch_id]['handler']
# handler.preferred_send( message, stream )
def getHandler(self, orch_id) :
return self.handlers[orch_id]['handler']
def getAllHandlers(self) :
return [self.handlers[o_id]['handler'] for o_id in self.handlers.keys()]
def addStream( self, stream ) :
for orch_id in self.getOrchestratorIDs() :
self.__add_stream(orch_id, stream)
def __add_stream(self, orch_id, stream) :
self.handlers[orch_id]['streams'].append(stream)
pseudo_stream = "%s:%s" % (orch_id, stream) # not used
self.getOrchestrator().addStream(pseudo_stream)
def addHandler(self, handler) :
orch_id = handler.getOrchestrator().getIdentity()
self.handlers[orch_id] = {}
self.handlers[orch_id]['streams'] = []
buffered_handler = BufferingHandler.bufferize_handler_obj(handler)
# self.handlers[orch_id]['bridge'] = SimpleBridge( self, buffered_handler )
self.handlers[orch_id]['handler'] = buffered_handler
buffered_handler.onChunk = handlerCallbackHook( self, buffered_handler.onChunk, orch_id )
for stream in handler.getOrchestrator().getStreams() :
# print pseudo_stream
self.__add_stream(orch_id, stream)
def getOrchestratorIDs(self) :
return self.handlers.keys()
|
StarcoderdataPython
|
4967809
|
import numpy as np
import os
fs = open('similary.txt', 'w')
ff = open('featu.txt', 'w')
xdat = np.load('xdat.npy')
for x in xdat:
for d in x[:512]:
ff.write('%f '%d)
ff.write('\n')
for d in x[512:]:
fs.write('%f '%d)
fs.write('\n')
fs.close()
ff.close()
|
StarcoderdataPython
|
3213742
|
<gh_stars>0
#!/usr/bin/env python
import os
import sys
from setuptools import Command, find_packages, setup
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
version_file = os.path.join(
BASE_DIR,
'kata_test_framework/version.txt'
)
class VersionCommand(Command):
description = "generate version number, and write to version file"
user_options = [
('in-place', 'i', 'edit file in-place'),
]
def run(self):
try:
from setuptools_scm import get_version
except ImportError:
sys.stderr.write("[FAILED] this command requires 'setuptools_scm'"
" to be installed!\n")
sys.exit(1)
else:
if self.in_place:
version = get_version(root=BASE_DIR, write_to=version_file)
sys.stdout.write("[DONE] write version %r to %r\n"
% (version, version_file))
else:
version = get_version(root=BASE_DIR)
sys.stdout.write("%r\n" % version)
def initialize_options(self):
self.in_place = None
def finalize_options(self):
self.in_place = False if self.in_place is None else True
install_requires = [
]
with open(version_file, "r") as fd:
version = fd.readline().strip()
if not version:
raise RuntimeError("Cannot find version information")
setup(
name="kata-test-framework",
version=version,
author="sveinchen",
author_email="<EMAIL>",
url="https://sveinchen.github.io/kata-test-framework",
packages=find_packages(include=['kata_test_framework',
'kata_test_framework.*']),
entry_points={
'console_scripts': [
'kata-test = kata_test_framework.runner2:run',
],
},
install_requires=install_requires,
include_package_data=True,
zip_safe=False,
cmdclass={
'version': VersionCommand,
},
)
|
StarcoderdataPython
|
4876297
|
from agent import Agent
from board import Game
# Allows a human to play against AlphaZero
class HumanAgent(Agent):
def update_board(self, board : Game):
self.board = board
def pick_move(self):
valid_action = True
try:
action = int(input('Choose a column to place the token (0 to 6): '))
if action not in self.board.available_moves():
valid_action = False
except ValueError:
valid_action = False
while valid_action == False:
print('The chosen action is not possible.')
try:
action = int(input('Please, choose a valid column: '))
if action in self.board.available_moves():
valid_action = True
except ValueError:
continue
return action
|
StarcoderdataPython
|
1690
|
<reponame>FreesiaLikesPomelo/-offer
'''
面试题37. 序列化二叉树
请实现两个函数,分别用来序列化和反序列化二叉树。
示例:
你可以将以下二叉树:
1
/ \
2 3
/ \
4 5
序列化为 "[1,2,3,null,null,4,5]"
'''
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# 执行用时 :240 ms, 在所有 Python3 提交中击败了22.75%的用户
# 内存消耗 :31 MB, 在所有 Python3 提交中击败了100.00%的用户
class Codec:
def __init__(self):
self.tree = []
self.temp = []
self.flag = 1 # non-None element appears again add self.temp to self.tree
def traByLayer(self, tree: List[TreeNode]):
if tree==[]:
return
else:
temp = tree.pop(0)
if temp!=None:
self.tree+=self.temp
self.temp = []
self.tree.append(temp.val)
tree.append(temp.left)
tree.append(temp.right)
else:
self.temp.append(None)
#print("trabylary",self.tree)
self.traByLayer(tree)
def serialize(self, root):
"""Encodes a tree to a single string.
:type root: TreeNode
:rtype: str
"""
if root==None:
return ''
tree = [root]
self.traByLayer(tree)
print(str(self.tree))
return str(self.tree)
def deserialize(self, data):
"""Decodes your encoded data to tree.
:type data: str
:rtype: TreeNode
"""
#data = '[1, 2, 3, 1, 3, 2, 4]'
if data=='':
return None
start = 0
end = 0
tree = []
for i in range(len(data)):
if data[i]==',' or data[i]==']':
start = end+1
end = i
if data[start:end]!=' None':
#print(start,end,data[start:end])
tree.append(int(data[start:end]))
else:
tree.append(None)
#print("Tree",tree,"then build the Tree")
root = TreeNode(tree.pop(0))
self.buildTreeByList([root],tree)
return root
def buildTreeByList(self,r:List[TreeNode], data: List[int]):
if r==[] or data==[]:
return
root = r.pop(0)
datalen = len(data)
if datalen==0:
return
elif datalen<=2:
#print("root",root.val,"tree",data,"datalen",datalen)
temp = data.pop(0)
if temp!=None:
root.left = TreeNode(temp)
r.append(root.left)
if data!=[]:
temp = data.pop(0)
if temp!=None:
root.right = TreeNode(temp)
r.append(root.right)
return
else:
#print("root",root.val,"tree",data,"datalen",datalen)
temp = data.pop(0)
if temp!=None:
root.left = TreeNode(temp)
r.append(root.left)
temp = data.pop(0)
if temp!=None:
root.right = TreeNode(temp)
r.append(root.right)
self.buildTreeByList(r,data)
# Your Codec object will be instantiated and called as such:
# codec = Codec()
# codec.deserialize(codec.serialize(root))
|
StarcoderdataPython
|
1685681
|
# coding: utf-8
#
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for recent commit controllers."""
from __future__ import absolute_import
from __future__ import unicode_literals
from core import feconf
from core import python_utils
from core.platform import models
from core.tests import test_utils
(exp_models,) = models.Registry.import_models([models.NAMES.exploration])
class RecentCommitsHandlerUnitTests(test_utils.GenericTestBase):
"""Test the RecentCommitsHandler class."""
def setUp(self):
super(RecentCommitsHandlerUnitTests, self).setUp()
self.signup(self.MODERATOR_EMAIL, self.MODERATOR_USERNAME)
self.set_moderators([self.MODERATOR_USERNAME])
self.signup(self.VIEWER_EMAIL, self.VIEWER_USERNAME)
self.committer_1_id = self.get_user_id_from_email(self.VIEWER_EMAIL)
self.signup(self.NEW_USER_EMAIL, self.NEW_USER_USERNAME)
self.committer_2_id = self.get_user_id_from_email(self.NEW_USER_EMAIL)
commit1 = exp_models.ExplorationCommitLogEntryModel.create(
'entity_1', 0, self.committer_1_id, 'create',
'created first commit', [], 'public', True)
commit2 = exp_models.ExplorationCommitLogEntryModel.create(
'entity_1', 1, self.committer_2_id, 'edit', 'edited commit', [],
'public', True)
commit3 = exp_models.ExplorationCommitLogEntryModel.create(
'entity_2', 0, self.committer_1_id, 'create',
'created second commit', [], 'private', False)
commit1.exploration_id = 'exp_1'
commit2.exploration_id = 'exp_1'
commit3.exploration_id = 'exp_2'
commit1.update_timestamps()
commit1.put()
commit2.update_timestamps()
commit2.put()
commit3.update_timestamps()
commit3.put()
def test_get_recent_commits(self):
"""Test that this method should return all nonprivate commits."""
self.login(self.MODERATOR_EMAIL)
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertEqual(len(response_dict['results']), 2)
self.assertDictContainsSubset(
{'username': self.VIEWER_USERNAME, 'exploration_id': 'exp_1',
'post_commit_status': 'public', 'version': 0,
'commit_message': 'created first commit',
'commit_type': 'create'},
response_dict['results'][1])
self.assertDictContainsSubset(
{'username': self.NEW_USER_USERNAME, 'exploration_id': 'exp_1',
'post_commit_status': 'public', 'version': 1,
'commit_message': 'edited commit',
'commit_type': 'edit'},
response_dict['results'][0])
self.logout()
def test_get_recent_commits_explorations(self):
"""Test that the response dict contains the correct exploration."""
self.login(self.MODERATOR_EMAIL)
self.save_new_default_exploration(
'exp_1', 'owner0', title='MyExploration')
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertEqual(len(response_dict['exp_ids_to_exp_data']), 1)
self.assertEqual(
response_dict['exp_ids_to_exp_data']['exp_1']['title'],
'MyExploration')
self.logout()
def test_get_recent_commits_three_pages_with_cursor(self):
self.login(self.MODERATOR_EMAIL)
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertFalse(response_dict['more'])
for i in python_utils.RANGE(feconf.COMMIT_LIST_PAGE_SIZE * 2):
entity_id = 'my_entity_%s' % i
exp_id = 'exp_%s' % i
commit_i = exp_models.ExplorationCommitLogEntryModel.create(
entity_id, 0, self.committer_2_id, 'create', 'created commit',
[], 'public', True)
commit_i.exploration_id = exp_id
commit_i.update_timestamps()
commit_i.put()
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'all_non_private_commits'})
self.assertEqual(
len(response_dict['results']), feconf.COMMIT_LIST_PAGE_SIZE)
self.assertTrue(response_dict['more'])
cursor = response_dict['cursor']
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={
'query_type': 'all_non_private_commits',
'cursor': cursor
})
self.assertEqual(
len(response_dict['results']),
feconf.COMMIT_LIST_PAGE_SIZE)
self.assertTrue(response_dict['more'])
cursor = response_dict['cursor']
response_dict = self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={
'query_type': 'all_non_private_commits',
'cursor': cursor
})
self.assertFalse(response_dict['more'])
self.assertEqual(len(response_dict['results']), 2)
self.logout()
def test_get_recent_commits_with_invalid_query_type_returns_404_status(
self):
self.login(self.MODERATOR_EMAIL)
self.get_json(
feconf.RECENT_COMMITS_DATA_URL,
params={'query_type': 'invalid_query_type'},
expected_status_int=404)
self.logout()
|
StarcoderdataPython
|
1884737
|
# -*- coding: utf-8 -*-
# Disable doc-string warning for test files
# pylint: disable=C0111
# pylint: disable=unused-import
import sys
import os
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', r"src")))
import zipkintrace
|
StarcoderdataPython
|
9665205
|
from django.shortcuts import render
# Create your views here.
from django_redis import get_redis_connection
from rest_framework import status
from rest_framework.generics import RetrieveAPIView, ListAPIView
from rest_framework.response import Response
from rest_framework.views import APIView
from goods.models import SKU
from users.models import User
from users.serializers import RegisterUserSerializer, UserCenterInfoSerializer, UserEmailInfoSerializer, \
AddUserBrowsingHistorySerializer, SKUSerializer
from users.serializers import AddressSerializer
from users.utils import check_token
"""
前段发送用户给后端,我们判断用户名是否 注册
"""
# APIView 基类
# GenericAPIView 对列表视图和详情视图做了通用支持,一般和mixin配合使用
# ListAPIView,RetrieveAPIView 封装好了
class RegisterUsernameCountAPIView(APIView):
def get(self,request,username):
count = User.objects.filter(username=username).count()
# 组织数据
context= {
'count':count,
'username':username
}
return Response(context)
class RegisterPhoneCountAPIView(APIView):
"""
查询手机号的个数
GET: /users/phones/(?P<mobile>1[345789]\d{9})/count/
"""
def get(self,request,mobile):
#通过模型查询获取手机号个数
count = User.objects.filter(mobile=mobile).count()
#组织数据
context = {
'count':count,
'phone':mobile
}
return Response(context)
"""
1.接受数据
2.数据效验
3.数据入库
4.返回响应
POST /users/register/
"""
class RegisterUserAPIView(APIView):
def post(self,request):
# 1.接受数据
data=request.data
# 2.数据效验
serializer=RegisterUserSerializer(data=data)
serializer.is_valid(raise_exception=True)
# 3.数据入库
serializer.save()
# 4.返回响应
# 序列化:将模型转换为JSON
"""
如何序列化呢?
我们的序列化器是根据字段来查询模型中对应数据,如果序列化器中有模型模型 中没有,则会报错
如果再序列化器中设置write_only则会再序列化中忽略此字段
"""
return Response(serializer.data)
"""
当用户注册成功之后,自动登录
自动登录的功能是要求用户注册成功之后,返回数据的时候需要添加额外一个token
1.序列化的时候添加token
2.token是怎么生成的
"""
"""
个人中心的信息展示 必须是登录用户
1.让前端传递用户信息
2.我们根据用户信息来获取user
3.将对象转化为字典数据
4.返回响应
GET /users/infos/
"""
from rest_framework.permissions import IsAuthenticated
# class UserCenterInfoAPIView(APIView):
# permission_classes = [IsAuthenticated]
# def get(self,request):
# # 1.获取用户信息
# user = request.user
# # 2.将模型转化为字典
# serializer = UserCenterInfoSerializer(user)
# #3.返回响应
# return Response(serializer.data)
class UserCenterInfoAPIView(RetrieveAPIView):
permission_classes = [IsAuthenticated]
serializer_class = UserCenterInfoSerializer
# 已有的父类不能满足我们的需求,需要重写
def get_object(self):
return self.request.user
"""
当用户 输入邮箱之后,点击保存的时候,
1.我们需要将 邮箱内容发送给后端,后端需要更新 指定用户的 email字段
2.同时后端需要给这个邮箱发送一个 激活连接
3.当用户点击激活连接的时候 ,改变 email_active的状态
用户 输入邮箱之后,点击保存的时候,
我们需要将 邮箱内容发送给后端
# 1. 后端需要接收 邮箱
# 2. 校验
# 3. 更新数据
# 4. 返回相应
PUT /users/emails/
"""
#APIView 基类
#GenericAPIVIew 对列表视图和详情视图做了通用支持,一般和mixin配合使用
#UpdateAPIView 封装好了
# class UserEmailInfoAPIView(APIView):
# permission_classes = [IsAuthenticated]
# def put(self,request):
# # 1. 后端需要接收 邮箱
# data = request.data
# # 2. 校验 更新某一个人的对象
# serializer = UserEmailInfoSerializer(instance=request.user,data=data)
# serializer.is_valid(raise_exception=True)
# # 3. 更新数据
# serializer.save()
# # 4. 返回相应
# return Response(serializer.data)
from rest_framework.generics import UpdateAPIView
class UserEmailInfoAPIView(UpdateAPIView):
permission_classes = [IsAuthenticated]
serializer_class = UserEmailInfoSerializer
# 父类方法不能满足我们需求
def get_object(self):
return self.request.user
"""
激活需求:
当用户点击激活连接的时候,需要让前端接收到 token信息
然后让前端发送 一个请求,这个请求 包含 token信息
1. 接收token信息
2. 对token进行解析
3. 解析获取user_id之后,进行查询
4. 修改状态
5. 返回相应
GET /users/emails/verification/
"""
class UserEmailVerificationAPIView(APIView):
def get(self,request):
# 1获取token信息
token = request.query_params.get('token')
if token is None:
return Response(status=status.HTTP_400_BAD_REQUEST)
# 2.对token进行解析
user_id = check_token(token)
if user_id is None:
return Response(status=status.HTTP_400_BAD_REQUEST)
# 3.解析获取user_id之后, 进行查询
user = User.objects.get(pk=user_id)
# 4.修改状态
user.email_active = True
user.save()
# 5.返回相应
return Response({'msg':'ok'})
"""
新增地址
1.后端接受数据
2.对数据进行效验
3.数据入库
4.返回响应
POST /users/addresses/
"""
from rest_framework.generics import CreateAPIView
# class UserAddressesAPIView(CreateAPIView):
# serializer_class = AddressSerializer
# queery_set 新增数据用不到该数据
from rest_framework import mixins
from rest_framework.viewsets import GenericViewSet
from rest_framework.decorators import action
from users.serializers import AddressTitleSerializer
class AddressViewSet(mixins.ListModelMixin,mixins.CreateModelMixin,mixins.UpdateModelMixin,GenericViewSet):
"""
用户地址新增与修改
list GET: /users/addresses/
create POST: /users/addresses/
destroy DELETE: /users/addresses/
action PUT: /users/addresses/pk/status/
action PUT: /users/addresses/pk/title/
"""
#制定序列化器
serializer_class = AddressSerializer
#添加用户权限
permission_classes = [IsAuthenticated]
#由于用户的地址有存在删除的状态,所以我们需要对数据进行筛选
def get_queryset(self):
return self.request.user.addresses.filter(is_deleted=False)
def create(self, request, *args, **kwargs):
"""
保存用户地址数据
"""
count = request.user.addresses.count()
if count >= 20:
return Response({'message':'保存地址数量已经达到上限'},status=status.HTTP_400_BAD_REQUEST)
return super().create(request,*args,**kwargs)
def list(self, request, *args, **kwargs):
"""
获取用户地址列表
"""
# 获取所有地址
queryset = self.get_queryset()
# 创建序列化器
serializer = self.get_serializer(queryset, many=True)
user = self.request.user
# 响应
return Response({
'user_id': user.id,
'default_address_id': user.default_address_id,
'limit': 20,
'addresses': serializer.data,
})
def destroy(self, request, *args, **kwargs):
"""
处理删除
"""
address = self.get_object()
# 进行逻辑删除
address.is_deleted = True
address.save()
return Response(status=status.HTTP_204_NO_CONTENT)
@action(methods=['put'], detail=True)
def title(self, request, pk=None, address_id=None):
"""
修改标题
"""
address = self.get_object()
serializer = AddressTitleSerializer(instance=address, data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data)
@action(methods=['put'], detail=True)
def status(self, request, pk=None, address_id=None):
"""
设置默认地址
"""
address = self.get_object()
request.user.default_address = address
request.user.save()
return Response({'message': 'OK'}, status=status.HTTP_200_OK)
"""
添加浏览记录的业务逻辑
1.接受商品id
2.效验数据
3.数据保存到数据库中
4.返回响应
post /users/histories/
"""
class UserHistoryAPIView(CreateAPIView):
permission_classes = [IsAuthenticated]
serializer_class = AddUserBrowsingHistorySerializer
"""
获取浏览历史记录 GET
"""
def get(self,request):
user = request.user
# 从redis中获取数据
redis_conn = get_redis_connection('history')
ids = redis_conn.lrange('history_%s'%user.id,0,4)
# 根据id查询数据
# skus = SKU.objects.filter(id_in=ids) 浏览顺序会变化
skus = []
for id in ids:
sku = SKU.objects.get(pk=id)
skus.append(sku)
serializer = SKUSerializer(skus,many=True)
return Response(serializer.data)
# 登录合并购物车 修改登录视图
from rest_framework_jwt.views import ObtainJSONWebToken
from carts.utils import merge_cookie_to_redis
class MergeLoginAPIView(ObtainJSONWebToken):
def post(self, request, *args, **kwargs):
# 调用jwt扩展的方法,对用户登录的数据进行验证
response = super().post(request)
# 如果用户登录成功,进行购物车数据合并
serializer = self.get_serializer(data=request.data)
if serializer.is_valid():
# 表示用户登录成功
user = serializer.validated_data.get("user")
# 合并购物车
# merge_cart_cookie_to_redis(request, user, response)
response = merge_cookie_to_redis(request, user, response)
return response
|
StarcoderdataPython
|
3562296
|
# coding: utf-8
"""
Transaction Management Bus (TMB) API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: V3.2.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class TpoDataDTOsSharedPatientDTO(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id_type': 'str',
'member_id': 'str',
'e_health_id': 'str',
'nationality': 'str',
'national_id_number': 'str',
'first_name': 'str',
'second_name': 'str',
'third_name': 'str',
'last_name': 'str',
'gender': 'str',
'date_of_birth': 'str',
'dob_hijri': 'str',
'contact_number': 'str',
'email': 'str',
'blood_group': 'str',
'preferred_language': 'str'
}
attribute_map = {
'id_type': 'IdType',
'member_id': 'MemberID',
'e_health_id': 'EHealthID',
'nationality': 'Nationality',
'national_id_number': 'NationalIDNumber',
'first_name': 'FirstName',
'second_name': 'SecondName',
'third_name': 'ThirdName',
'last_name': 'LastName',
'gender': 'Gender',
'date_of_birth': 'DateOfBirth',
'dob_hijri': 'DOBHijri',
'contact_number': 'ContactNumber',
'email': 'Email',
'blood_group': 'BloodGroup',
'preferred_language': 'PreferredLanguage'
}
def __init__(self, id_type=None, member_id=None, e_health_id=None, nationality=None, national_id_number=None,
first_name=None, second_name=None, third_name=None, last_name=None, gender=None, date_of_birth=None,
dob_hijri=None, contact_number=None, email=None, blood_group=None,
preferred_language=None): # noqa: E501
"""TpoDataDTOsSharedPatientDTO - a model defined in Swagger""" # noqa: E501
self._id_type = None
self._member_id = None
self._e_health_id = None
self._nationality = None
self._national_id_number = None
self._first_name = None
self._second_name = None
self._third_name = None
self._last_name = None
self._gender = None
self._date_of_birth = None
self._dob_hijri = None
self._contact_number = None
self._email = None
self._blood_group = None
self._preferred_language = None
self.discriminator = None
if id_type is not None:
self.id_type = id_type
self.member_id = member_id
if e_health_id is not None:
self.e_health_id = e_health_id
if nationality is not None:
self.nationality = nationality
self.national_id_number = national_id_number
self.first_name = first_name
if second_name is not None:
self.second_name = second_name
if third_name is not None:
self.third_name = third_name
if last_name is not None:
self.last_name = last_name
self.gender = gender
self.date_of_birth = date_of_birth
if dob_hijri is not None:
self.dob_hijri = dob_hijri
self.contact_number = contact_number
if email is not None:
self.email = email
if blood_group is not None:
self.blood_group = blood_group
self.preferred_language = preferred_language
@property
def id_type(self):
"""Gets the id_type of this TpoDataDTOsSharedPatientDTO. # noqa: E501
IdType Can Be Only: D005 = Resident Card D004 = Passport D002 = GCC ID D003 = National Card D001 = Boarder Number # noqa: E501
:return: The id_type of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:rtype: str
"""
return self._id_type
@id_type.setter
def id_type(self, id_type):
"""Sets the id_type of this TpoDataDTOsSharedPatientDTO.
IdType Can Be Only: D005 = Resident Card D004 = Passport D002 = GCC ID D003 = National Card D001 = Boarder Number # noqa: E501
:param id_type: The id_type of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:type: str
"""
self._id_type = id_type
@property
def member_id(self):
"""Gets the member_id of this TpoDataDTOsSharedPatientDTO. # noqa: E501
In the case of an insurance patient: The patient's insurance member number # noqa: E501
:return: The member_id of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:rtype: str
"""
return self._member_id
@member_id.setter
def member_id(self, member_id):
"""Sets the member_id of this TpoDataDTOsSharedPatientDTO.
In the case of an insurance patient: The patient's insurance member number # noqa: E501
:param member_id: The member_id of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:type: str
"""
if member_id is None:
raise ValueError("Invalid value for `member_id`, must not be `None`") # noqa: E501
self._member_id = member_id
@property
def e_health_id(self):
"""Gets the e_health_id of this TpoDataDTOsSharedPatientDTO. # noqa: E501
E-health id. # noqa: E501
:return: The e_health_id of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:rtype: str
"""
return self._e_health_id
@e_health_id.setter
def e_health_id(self, e_health_id):
"""Sets the e_health_id of this TpoDataDTOsSharedPatientDTO.
E-health id. # noqa: E501
:param e_health_id: The e_health_id of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:type: str
"""
self._e_health_id = e_health_id
@property
def nationality(self):
"""Gets the nationality of this TpoDataDTOsSharedPatientDTO. # noqa: E501
Patient nationality # noqa: E501
:return: The nationality of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:rtype: str
"""
return self._nationality
@nationality.setter
def nationality(self, nationality):
"""Sets the nationality of this TpoDataDTOsSharedPatientDTO.
Patient nationality # noqa: E501
:param nationality: The nationality of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:type: str
"""
self._nationality = nationality
@property
def national_id_number(self):
"""Gets the national_id_number of this TpoDataDTOsSharedPatientDTO. # noqa: E501
The unique number the government assigns to a citizen # noqa: E501
:return: The national_id_number of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:rtype: str
"""
return self._national_id_number
@national_id_number.setter
def national_id_number(self, national_id_number):
"""Sets the national_id_number of this TpoDataDTOsSharedPatientDTO.
The unique number the government assigns to a citizen # noqa: E501
:param national_id_number: The national_id_number of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:type: str
"""
if national_id_number is None:
raise ValueError("Invalid value for `national_id_number`, must not be `None`") # noqa: E501
self._national_id_number = national_id_number
@property
def first_name(self):
"""Gets the first_name of this TpoDataDTOsSharedPatientDTO. # noqa: E501
Patient first name. # noqa: E501
:return: The first_name of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:rtype: str
"""
return self._first_name
@first_name.setter
def first_name(self, first_name):
"""Sets the first_name of this TpoDataDTOsSharedPatientDTO.
Patient first name. # noqa: E501
:param first_name: The first_name of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:type: str
"""
if first_name is None:
raise ValueError("Invalid value for `first_name`, must not be `None`") # noqa: E501
self._first_name = first_name
@property
def second_name(self):
"""Gets the second_name of this TpoDataDTOsSharedPatientDTO. # noqa: E501
Patient second name. # noqa: E501
:return: The second_name of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:rtype: str
"""
return self._second_name
@second_name.setter
def second_name(self, second_name):
"""Sets the second_name of this TpoDataDTOsSharedPatientDTO.
Patient second name. # noqa: E501
:param second_name: The second_name of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:type: str
"""
self._second_name = second_name
@property
def third_name(self):
"""Gets the third_name of this TpoDataDTOsSharedPatientDTO. # noqa: E501
Patient third name. # noqa: E501
:return: The third_name of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:rtype: str
"""
return self._third_name
@third_name.setter
def third_name(self, third_name):
"""Sets the third_name of this TpoDataDTOsSharedPatientDTO.
Patient third name. # noqa: E501
:param third_name: The third_name of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:type: str
"""
self._third_name = third_name
@property
def last_name(self):
"""Gets the last_name of this TpoDataDTOsSharedPatientDTO. # noqa: E501
Patient last name. # noqa: E501
:return: The last_name of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:rtype: str
"""
return self._last_name
@last_name.setter
def last_name(self, last_name):
"""Sets the last_name of this TpoDataDTOsSharedPatientDTO.
Patient last name. # noqa: E501
:param last_name: The last_name of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:type: str
"""
self._last_name = last_name
@property
def gender(self):
"""Gets the gender of this TpoDataDTOsSharedPatientDTO. # noqa: E501
Patient gender Values: Male Female Other # noqa: E501
:return: The gender of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:rtype: str
"""
return self._gender
@gender.setter
def gender(self, gender):
"""Sets the gender of this TpoDataDTOsSharedPatientDTO.
Patient gender Values: Male Female Other # noqa: E501
:param gender: The gender of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:type: str
"""
if gender is None:
raise ValueError("Invalid value for `gender`, must not be `None`") # noqa: E501
self._gender = gender
@property
def date_of_birth(self):
"""Gets the date_of_birth of this TpoDataDTOsSharedPatientDTO. # noqa: E501
Is the date on which a person was born or is officially deemed to have been born. # noqa: E501
:return: The date_of_birth of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:rtype: str
"""
return self._date_of_birth
@date_of_birth.setter
def date_of_birth(self, date_of_birth):
"""Sets the date_of_birth of this TpoDataDTOsSharedPatientDTO.
Is the date on which a person was born or is officially deemed to have been born. # noqa: E501
:param date_of_birth: The date_of_birth of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:type: str
"""
if date_of_birth is None:
raise ValueError("Invalid value for `date_of_birth`, must not be `None`") # noqa: E501
self._date_of_birth = date_of_birth
@property
def dob_hijri(self):
"""Gets the dob_hijri of this TpoDataDTOsSharedPatientDTO. # noqa: E501
Is the date in hijri format (dd/MM/yyyy) on which a person was born or is officially deemed to have been born. # noqa: E501
:return: The dob_hijri of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:rtype: str
"""
return self._dob_hijri
@dob_hijri.setter
def dob_hijri(self, dob_hijri):
"""Sets the dob_hijri of this TpoDataDTOsSharedPatientDTO.
Is the date in hijri format (dd/MM/yyyy) on which a person was born or is officially deemed to have been born. # noqa: E501
:param dob_hijri: The dob_hijri of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:type: str
"""
self._dob_hijri = dob_hijri
@property
def contact_number(self):
"""Gets the contact_number of this TpoDataDTOsSharedPatientDTO. # noqa: E501
Patient contact number # noqa: E501
:return: The contact_number of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:rtype: str
"""
return self._contact_number
@contact_number.setter
def contact_number(self, contact_number):
"""Sets the contact_number of this TpoDataDTOsSharedPatientDTO.
Patient contact number # noqa: E501
:param contact_number: The contact_number of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:type: str
"""
if contact_number is None:
raise ValueError("Invalid value for `contact_number`, must not be `None`") # noqa: E501
self._contact_number = contact_number
@property
def email(self):
"""Gets the email of this TpoDataDTOsSharedPatientDTO. # noqa: E501
The personal email address of the patient # noqa: E501
:return: The email of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:rtype: str
"""
return self._email
@email.setter
def email(self, email):
"""Sets the email of this TpoDataDTOsSharedPatientDTO.
The personal email address of the patient # noqa: E501
:param email: The email of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:type: str
"""
self._email = email
@property
def blood_group(self):
"""Gets the blood_group of this TpoDataDTOsSharedPatientDTO. # noqa: E501
Patient blood group Values: O- O+ A+ A- B+ B- AB+ AB- # noqa: E501
:return: The blood_group of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:rtype: str
"""
return self._blood_group
@blood_group.setter
def blood_group(self, blood_group):
"""Sets the blood_group of this TpoDataDTOsSharedPatientDTO.
Patient blood group Values: O- O+ A+ A- B+ B- AB+ AB- # noqa: E501
:param blood_group: The blood_group of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:type: str
"""
self._blood_group = blood_group
@property
def preferred_language(self):
"""Gets the preferred_language of this TpoDataDTOsSharedPatientDTO. # noqa: E501
Preferred language. Values: En for English Ar for Arabic # noqa: E501
:return: The preferred_language of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:rtype: str
"""
return self._preferred_language
@preferred_language.setter
def preferred_language(self, preferred_language):
"""Sets the preferred_language of this TpoDataDTOsSharedPatientDTO.
Preferred language. Values: En for English Ar for Arabic # noqa: E501
:param preferred_language: The preferred_language of this TpoDataDTOsSharedPatientDTO. # noqa: E501
:type: str
"""
if preferred_language is None:
raise ValueError("Invalid value for `preferred_language`, must not be `None`") # noqa: E501
self._preferred_language = preferred_language
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(TpoDataDTOsSharedPatientDTO, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TpoDataDTOsSharedPatientDTO):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
StarcoderdataPython
|
11250921
|
import sys
from PyQt5 import QtWidgets as qtw
from PyQt5 import QtCore as qtc
# from PyQt5 import QtGui as qtg
def crit_bonus(crit_rate: float, crit_dmg: float):
return 1 + ((crit_rate / 100) * (crit_dmg / 100))
def defense(player_level: int, enemy_level: int, defense_drop: float = 1):
return (100 + player_level) / ((100 + player_level) + (100 + enemy_level) * defense_drop)
def eff_atk(atk, dmg):
return atk * (1 + (dmg / 100))
def damage_on_crit(eff_attack, crit_dmg, ability, resistance, total_defense):
return eff_attack * (ability / 100) * (1 + (crit_dmg / 100)) * total_defense * (1 - (resistance / 100))
def damage_on_non_crit(eff_attack, ability, resistance, total_defense):
return eff_attack * (ability / 100) * total_defense * (1 - (resistance / 100))
def average_damage(eff_attack, total_crit_bonus, ability, resistance, total_defense):
return eff_attack * (ability / 100) * total_crit_bonus * total_defense * (1 - (resistance / 100))
class MainApp(qtw.QApplication):
def __init__(self, argv):
super().__init__(argv)
self.main_window = MainWindow()
self.calc_window = CalcWindow()
self.weapon_compare_window = WeaponCompareWindow()
self.calc_result_window = CalcResultWindow()
self.main_window.show()
self.main_window.calculation_requested.connect(self.calc_window.show)
self.main_window.weapon_comparison_requested.connect(self.weapon_compare_window.show)
self.calc_window.submitted.connect(self.calc_result_window.initial_logic)
class MainWindow(qtw.QWidget):
calculation_requested = qtc.pyqtSignal()
weapon_comparison_requested = qtc.pyqtSignal()
def __init__(self):
super().__init__()
self.setWindowTitle("Genshin Impact Calculators")
self.setMinimumSize(260, 260)
self.calc_button = qtw.QPushButton("Calculate Damage")
self.weapon_compare_button = qtw.QPushButton("Compare Weapons")
self.setLayout(qtw.QGridLayout())
self.layout().addWidget(self.calc_button)
self.layout().addWidget(self.weapon_compare_button)
self.calc_button.clicked.connect(self.calculation_requested.emit)
self.weapon_compare_button.clicked.connect(self.weapon_comparison_requested.emit)
# noinspection PyArgumentList
class WeaponCompareWindow(qtw.QWidget):
def __init__(self):
super().__init__()
self.setWindowTitle("Genshin Weapon Comparison")
self.setMinimumSize(580, 440)
self.frame1 = qtw.QFrame()
self.frame1.setGeometry(qtc.QRect(320, 150, 118, 3))
self.frame1.setFrameShape(qtw.QFrame.VLine)
self.frame1.setFrameShadow(qtw.QFrame.Sunken)
self.frame2 = qtw.QFrame()
self.frame2.setGeometry(qtc.QRect(1, 1, 1, 1))
self.frame2.setFrameShape(qtw.QFrame.VLine)
self.frame2.setFrameShadow(qtw.QFrame.Sunken)
self.character_atk_spinbox = qtw.QSpinBox(maximum=99999)
self.character_asc_spinbox = qtw.QDoubleSpinBox(maximum=9999, suffix="%")
self.character_ability_spinbox = qtw.QDoubleSpinBox(maximum=9999, suffix="%")
self.character_level_spinbox = qtw.QSpinBox(maximum=99999)
self.enemy_level_spinbox = qtw.QSpinBox(maximum=99999)
self.resistance_spinbox = qtw.QDoubleSpinBox(maximum=999, suffix="%")
self.character_asc_stat = qtw.QComboBox()
self.character_resonance = qtw.QComboBox()
self.artifact_dmg_set_bonus_type = qtw.QComboBox()
self.character_asc_stat.addItems(["None", "ATK%", "Crit Rate%", "Crit Dmg%", "Physical%", "Elemental%"])
self.character_resonance.addItems(["None", "Anemo", "Cryo", "Electro", "Geo", "Hydro", "Pyro"])
self.artifact_dmg_set_bonus_type.addItems(["None", "Dmg%", "Skill%", "Burst%", "Physical%", "Elemental%",
"Normal Attack%", "Charged Attack%", "Normal/Charged Attack%"])
self.artifact_atk_spinbox = qtw.QSpinBox(maximum=99999)
self.artifact_crit_rate_spinbox = qtw.QDoubleSpinBox(maximum=100, suffix="%")
self.artifact_crit_dmg_spinbox = qtw.QDoubleSpinBox(maximum=9999, suffix="%")
self.artifact_dmg_set_bonus_spinbox = qtw.QDoubleSpinBox(maximum=9999, suffix="%")
self.artifact_dmg_spinbox = qtw.QDoubleSpinBox(maximum=9999, suffix="%")
self.atk_set_bonus = qtw.QCheckBox("+18% ATK%")
self.crit_rate_set_bonus = qtw.QCheckBox("+12% Crit Rate%")
self.dmg_set_bonus = qtw.QCheckBox("Dmg% Set Bonus: ")
self.number_of_weapons = qtw.QSpinBox(maximum=50)
self.weapon_number_submit = qtw.QPushButton("Apply")
self.submit_button = qtw.QPushButton("Submit", styleSheet="font: bold")
self.upper_spacer = qtw.QSpacerItem(0, 0, qtw.QSizePolicy.Minimum, qtw.QSizePolicy.Expanding)
self.character_stat_layout = qtw.QGridLayout()
self.character_stat_layout.addWidget(qtw.QLabel("Character", styleSheet="font: bold"))
self.character_stat_layout.addWidget(qtw.QLabel("Character Base ATK:"), 1, 0)
self.character_stat_layout.addWidget(self.character_atk_spinbox, 1, 1)
self.character_stat_layout.addWidget(qtw.QLabel("Ability%:"), 2, 0)
self.character_stat_layout.addWidget(self.character_ability_spinbox, 2, 1)
self.character_stat_layout.addWidget(qtw.QLabel("Player Level:"), 3, 0)
self.character_stat_layout.addWidget(self.character_level_spinbox, 3, 1)
self.character_stat_layout.addWidget(qtw.QLabel("Enemy Level:"), 4, 0)
self.character_stat_layout.addWidget(self.enemy_level_spinbox, 4, 1)
self.character_stat_layout.addWidget(qtw.QLabel("Enemy Resistance:"), 5, 0)
self.character_stat_layout.addWidget(self.resistance_spinbox, 5, 1)
self.character_stat_layout.addWidget(qtw.QLabel("Resonance:"), 6, 0)
self.character_stat_layout.addWidget(self.character_resonance, 6, 1)
self.character_stat_layout.addWidget(qtw.QLabel("Ascension Stat:"), 7, 0)
self.character_stat_layout.addWidget(self.character_asc_stat, 7, 1)
self.character_stat_layout.addWidget(self.character_asc_spinbox, 7, 2)
self.artifact_stat_layout = qtw.QGridLayout()
self.artifact_stat_layout.addWidget(qtw.QLabel("Artifacts", styleSheet="font: bold"))
self.artifact_stat_layout.addWidget(qtw.QLabel("Total Artifact ATK:"), 1, 0)
self.artifact_stat_layout.addWidget(self.artifact_atk_spinbox, 1, 2, 1, 2)
self.artifact_stat_layout.addWidget(qtw.QLabel("Total Artifact Dmg%:"), 2, 0)
self.artifact_stat_layout.addWidget(self.artifact_dmg_spinbox, 2, 2, 1, 2)
self.artifact_stat_layout.addWidget(qtw.QLabel("Total Artifact Crit Rate%:"), 3, 0)
self.artifact_stat_layout.addWidget(self.artifact_crit_rate_spinbox, 3, 2, 1, 2)
self.artifact_stat_layout.addWidget(qtw.QLabel("Total Artifact Crit Dmg%:"), 4, 0)
self.artifact_stat_layout.addWidget(self.artifact_crit_dmg_spinbox, 4, 2, 1, 2)
self.artifact_stat_layout.addWidget(qtw.QLabel("Set Bonuses", styleSheet="font: bold"), 5, 0)
self.artifact_stat_layout.addWidget(self.atk_set_bonus, 6, 0)
self.artifact_stat_layout.addWidget(self.frame2, 6, 1, 2, 1)
self.artifact_stat_layout.addWidget(self.crit_rate_set_bonus, 6, 2)
self.artifact_stat_layout.addWidget(self.dmg_set_bonus, 7, 0)
self.artifact_stat_layout.addWidget(self.artifact_dmg_set_bonus_type, 8, 0)
self.artifact_stat_layout.addWidget(self.artifact_dmg_set_bonus_spinbox, 8, 2)
self.weapon_boxes = qtw.QWidget()
self.weapon_box_scrollArea = qtw.QScrollArea()
self.weapon_box_layout = qtw.QVBoxLayout()
self.weapon_box_scrollArea.setWidgetResizable(True)
self.weapon_box_scrollArea.setSizePolicy(qtw.QSizePolicy.Preferred, qtw.QSizePolicy.MinimumExpanding)
self.weapon_box_scrollArea.setWidget(self.weapon_boxes)
self.weapon_boxes.setLayout(self.weapon_box_layout)
self.weapon_box_scrollArea.setMinimumHeight(200)
self.weapon_box_scrollArea.setMaximumHeight(300)
self.setLayout(qtw.QGridLayout())
self.layout().addLayout(self.character_stat_layout, 0, 0, 1, 3)
self.layout().addWidget(self.frame1, 0, 3, 2, 1)
self.layout().addLayout(self.artifact_stat_layout, 0, 4, 2, 2)
self.layout().addWidget(qtw.QLabel("Number of Weapons:"), 1, 0)
self.layout().addWidget(self.number_of_weapons, 1, 1)
self.layout().addWidget(self.weapon_number_submit, 1, 2)
self.layout().addWidget(self.weapon_box_scrollArea, 2, 0, 1, 6)
self.layout().addWidget(self.submit_button, 3, 0, 1, 6)
self.layout().addItem(self.upper_spacer, 4, 0)
self.weapon_number_submit.clicked.connect(self.create_weapon_box)
self.submit_button.clicked.connect(self.submit_weapon)
def submit_weapon(self):
self.char_atk = int(self.character_atk_spinbox.text())
self.char_crit_rate = 5
self.char_crit_dmg = 50
self.ability = float(self.character_ability_spinbox.cleanText())
self.player_level = int(self.character_level_spinbox.text())
self.enemy_level = int(self.enemy_level_spinbox.text())
self.resistance = float(self.resistance_spinbox.cleanText())
self.resonance = self.character_resonance.currentText()
self.asc_type = self.character_asc_stat.currentText()
self.asc_num = float(self.character_asc_spinbox.cleanText())
self.art_atk = int(self.artifact_atk_spinbox.text())
self.art_dmg = float(self.artifact_dmg_spinbox.cleanText())
self.art_crit_rate = float(self.artifact_crit_rate_spinbox.cleanText())
self.art_crit_dmg = float(self.artifact_crit_dmg_spinbox.cleanText())
self.atk_set = self.atk_set_bonus.isChecked()
self.crit_set = self.crit_rate_set_bonus.isChecked()
self.dmg_set = self.dmg_set_bonus.isChecked()
self.dmg_set_type = self.artifact_dmg_set_bonus_type.currentText()
self.dmg_set_num = float(self.artifact_dmg_set_bonus_spinbox.cleanText())
self.interim_atk = self.char_atk + self.art_atk
if self.asc_type == "Physical%" or self.asc_type == "Elemental%":
self.interim_dmg = self.asc_num + self.art_dmg
self.asc_atk = 0
self.interim_crit_rate = self.art_crit_rate + self.char_crit_rate
self.interim_crit_dmg = self.art_crit_dmg + self.char_crit_dmg
elif self.asc_type == "ATK%":
self.interim_dmg = self.art_dmg
self.asc_atk = self.asc_num
self.interim_crit_rate = self.art_crit_rate + self.char_crit_rate
self.interim_crit_dmg = self.art_crit_dmg + self.char_crit_dmg
elif self.asc_type == "Crit Rate%":
self.interim_dmg = self.art_dmg
self.asc_atk = 0
self.interim_crit_rate = self.asc_num + self.art_crit_rate + self.char_crit_rate
self.interim_crit_dmg = self.art_crit_dmg + self.char_crit_dmg
elif self.asc_type == "Crit Dmg%":
self.interim_dmg = self.art_dmg
self.asc_atk = 0
self.interim_crit_rate = self.art_crit_rate + self.char_crit_rate
self.interim_crit_dmg = self.asc_num + self.art_crit_dmg + self.char_crit_dmg
else:
self.interim_dmg = self.art_dmg
self.asc_atk = 0
self.interim_crit_rate = self.art_crit_rate + self.char_crit_rate
self.interim_crit_dmg = self.art_crit_dmg + self.char_crit_dmg
if self.atk_set:
self.interim_atk_per = self.asc_atk + 18
else:
self.interim_atk_per = self.asc_atk
if self.crit_set:
self.second_interim_crit_rate = self.interim_crit_rate + 12
else:
self.second_interim_crit_rate = self.interim_crit_rate
if self.dmg_set:
self.second_interim_dmg = self.dmg_set_num + self.interim_dmg
else:
self.second_interim_dmg = self.interim_dmg
for i in range(self.weapon_box_layout.count()):
self.wep_name = self.weapon_box_layout.itemAt(i).widget().layout().itemAt(0).widget().text()
self.wep_atk = int(self.weapon_box_layout.itemAt(i).widget().layout().itemAt(2).widget().text())
self.wep_sec_stat = self.weapon_box_layout.itemAt(i).widget().layout().itemAt(5).widget().currentText()
self.wep_sec_num = float(self.weapon_box_layout.itemAt(i).widget().layout().itemAt(6).widget().cleanText())
print("Weapon Name: " + str(self.wep_name))
print("Weapon ATK: " + str(self.wep_atk))
print("Weapon Sec Stat: " + str(self.wep_sec_stat))
print("Weapon Sec Stat Value: " + str(self.wep_sec_num))
self.base_atk = self.wep_atk + self.char_atk
if self.wep_sec_stat == "ATK%":
print("Interim: " + str(self.interim_atk_per))
print("Weapon ATK%: " + str(self.wep_sec_num))
self.final_atk_per = self.interim_atk_per + self.wep_sec_num
self.final_crit_rate = self.second_interim_crit_rate
self.final_crit_dmg = self.interim_crit_dmg
self.final_dmg = self.second_interim_dmg
elif self.wep_sec_stat == "Crit Rate%":
self.final_atk_per = self.interim_atk_per
self.final_crit_rate = self.second_interim_crit_rate + self.wep_sec_num
self.final_crit_dmg = self.interim_crit_dmg
self.final_dmg = self.second_interim_dmg
elif self.wep_sec_stat == "Crit Dmg%":
self.final_atk_per = self.interim_atk_per
self.final_crit_rate = self.second_interim_crit_rate
self.final_crit_dmg = self.interim_crit_dmg + self.wep_sec_num
self.final_dmg = self.second_interim_dmg
elif self.wep_sec_stat == "Physical%" or self.wep_sec_stat == "Elemental%":
self.final_atk_per = self.interim_atk_per
self.final_crit_rate = self.second_interim_crit_rate
self.final_crit_dmg = self.interim_crit_dmg
self.final_dmg = self.second_interim_dmg + self.wep_sec_num
else:
self.final_atk_per = self.interim_atk_per
self.final_crit_rate = self.second_interim_crit_rate
self.final_crit_dmg = self.interim_crit_dmg
self.final_dmg = self.second_interim_dmg
self.total_atk = self.base_atk + self.art_atk + (self.base_atk * (self.final_atk_per/100))
print(self.total_atk)
self.eff_atk = eff_atk(self.total_atk, self.final_dmg)
self.total_crit_bonus = crit_bonus(self.final_crit_rate, self.final_crit_dmg)
self.total_defense = defense(self.player_level, self.enemy_level)
self.average_dmg = average_damage(self.eff_atk, self.total_crit_bonus, self.ability,
self.resistance, self.total_defense)
self.non_crit = damage_on_non_crit(self.eff_atk, self.ability, self.resistance, self.total_defense)
self.crit = damage_on_crit(self.eff_atk, self.final_crit_dmg, self.ability,
self.resistance, self.total_defense)
print("Crit Dmg%: " + str(self.final_crit_dmg))
print("Eff. ATK: " + str(self.eff_atk))
print("Tot. Crit: " + str(self.total_crit_bonus))
print("Tot. Defense: " + str(self.total_defense))
print("Average Dmg: " + str(self.average_dmg))
print("Non-Crit Dmg: " + str(self.non_crit))
print("Crit Dmg: " + str(self.crit))
def create_weapon_box(self):
for i in reversed(range(self.weapon_box_layout.count())):
self.weapon_box_layout.itemAt(i).widget().setParent(None)
for i in range(0, int(self.number_of_weapons.text())):
self.frame1 = qtw.QFrame()
self.frame1.setGeometry(qtc.QRect(320, 150, 118, 3))
self.frame1.setFrameShape(qtw.QFrame.VLine)
self.frame1.setFrameShadow(qtw.QFrame.Sunken)
self.weapon_box = qtw.QWidget()
self.weapon_name = qtw.QLineEdit(placeholderText="Weapon " + str(i + 1) + ": Enter Weapon Name")
self.weapon_atk_spinbox = qtw.QSpinBox(maximum=9999)
self.weapon_sub_stat_name = qtw.QComboBox(placeholderText="Choose Sub Stat")
self.weapon_sub_stat_name.addItems(["None", "ATK%", "Crit Rate%", "Crit Dmg%", "Physical%", "Elemental%"])
self.weapon_sub_stat_name.setCurrentIndex(0)
self.weapon_sub_spinbox = qtw.QDoubleSpinBox(maximum=9999, suffix="%")
self.weapon_box.setLayout(qtw.QGridLayout())
self.weapon_box.layout().addWidget(self.weapon_name, 0, 0, 1, 2)
self.weapon_box.layout().addWidget(qtw.QLabel("Weapon ATK: "), 1, 0)
self.weapon_box.layout().addWidget(self.weapon_atk_spinbox, 1, 1)
self.weapon_box.layout().addWidget(self.frame1, 0, 3, 2, 1)
self.weapon_box.layout().addWidget(qtw.QLabel("Weapon Sub Stat: "), 0, 4)
self.weapon_box.layout().addWidget(self.weapon_sub_stat_name, 1, 4)
self.weapon_box.layout().addWidget(self.weapon_sub_spinbox, 1, 5)
self.weapon_box.setStyleSheet(".QWidget {border: 1px solid black}")
self.weapon_box_layout.addWidget(self.weapon_box)
# noinspection PyArgumentList
class CalcWindow(qtw.QWidget):
submitted = qtc.pyqtSignal(int, float, float, float, float, int, int, float)
def __init__(self):
super().__init__()
self.setWindowTitle("Genshin Damage Calculator")
self.setMinimumSize(260, 260)
self.atk_input = qtw.QSpinBox(maximum=99999)
self.dmg_input = qtw.QDoubleSpinBox(maximum=99999, suffix="%")
self.ability_input = qtw.QDoubleSpinBox(maximum=99999, suffix="%")
self.crit_rate_input = qtw.QDoubleSpinBox(maximum=100, suffix="%")
self.crit_dmg_input = qtw.QDoubleSpinBox(maximum=99999, suffix="%")
self.player_level_input = qtw.QSpinBox(maximum=1000)
self.enemy_level_input = qtw.QSpinBox(maximum=1000)
self.enemy_resistance_input = qtw.QDoubleSpinBox(maximum=100, suffix="%")
self.submit_button = qtw.QPushButton("Submit")
self.main_layout = qtw.QVBoxLayout()
self.input_layout = qtw.QFormLayout()
self.input_layout.addRow("ATK:", self.atk_input)
self.input_layout.addRow("DMG%:", self.dmg_input)
self.input_layout.addRow("Ability%:", self.ability_input)
self.input_layout.addRow("Crit Rate%:", self.crit_rate_input)
self.input_layout.addRow("Crit DMG%:", self.crit_dmg_input)
self.input_layout.addRow("Player Level:", self.player_level_input)
self.input_layout.addRow("Enemy Level:", self.enemy_level_input)
self.input_layout.addRow("Enemy Resistance%:", self.enemy_resistance_input)
self.main_layout.addLayout(self.input_layout)
self.main_layout.addWidget(self.submit_button)
self.setLayout(self.main_layout)
self.submit_button.clicked.connect(self.submit_verification)
def error_box(self, error_title: str, error_message: str):
qtw.QMessageBox(parent=self,
text=error_message,
icon=qtw.QMessageBox.Warning,
windowTitle=error_title).exec_()
def convert_to_int(self, inputted_text, index):
if inputted_text != "":
try:
int(inputted_text)
except ValueError:
self.error_box("Invalid Input", "Please input a whole number for " +
self.input_layout.itemAt(index).widget().text()[:-1])
return False
else:
return True
else:
self.error_box("Invalid Input", "Please input a whole number for " +
self.input_layout.itemAt(index).widget().text()[:-1])
return False
def convert_to_float(self, inputted_text, index):
if inputted_text != "":
try:
float(inputted_text)
except ValueError:
self.error_box("Invalid Input", "Please input a number for " +
self.input_layout.itemAt(index).widget().text()[:-1])
return False
else:
return True
else:
self.error_box("Invalid Input", "Please input a number for " +
self.input_layout.itemAt(index).widget().text()[:-1])
return False
def submit_verification(self):
if (self.convert_to_int(self.atk_input.text(), 0) and
self.convert_to_float(self.dmg_input.cleanText(), 2) and
self.convert_to_float(self.ability_input.cleanText(), 4) and
self.convert_to_float(self.crit_rate_input.cleanText(), 6) and
self.convert_to_float(self.crit_dmg_input.cleanText(), 8) and
self.convert_to_int(self.player_level_input.cleanText(), 10) and
self.convert_to_int(self.enemy_level_input.cleanText(), 12) and
self.convert_to_float(self.enemy_resistance_input.cleanText(), 14)):
self.submitted.emit(int(self.atk_input.cleanText()), float(self.dmg_input.cleanText()),
float(self.ability_input.cleanText()), float(self.crit_rate_input.cleanText()),
float(self.crit_dmg_input.cleanText()), int(self.player_level_input.cleanText()),
int(self.enemy_level_input.cleanText()), float(self.enemy_resistance_input.cleanText()))
class CalcResultWindow(qtw.QWidget):
defense: float
crit_bonus: float
def __init__(self):
super().__init__()
self.setWindowTitle("Results")
self.setMinimumSize(300, 210)
self.setWindowModality(qtc.Qt.ApplicationModal)
self.atk_label = qtw.QLabel()
self.dmg_label = qtw.QLabel()
self.eff_atk_label = qtw.QLabel()
self.crit_rate_label = qtw.QLabel()
self.crit_dmg_label = qtw.QLabel()
self.crit_bonus_label = qtw.QLabel()
self.player_level_label = qtw.QLabel()
self.enemy_level_label = qtw.QLabel()
self.defense_label = qtw.QLabel()
self.enemy_resistance_label = qtw.QLabel()
self.ability_label = qtw.QLabel()
self.average_damage_label = qtw.QLabel()
self.average_damage_label_static = qtw.QLabel("Average Damage: ")
self.crit_damage_label = qtw.QLabel()
self.crit_damage_label_static = qtw.QLabel("Damage on Crit: ")
self.non_crit_damage_label = qtw.QLabel()
self.non_crit_damage_label_static = qtw.QLabel("Damage on Non-Crit: ")
self.frame1 = qtw.QFrame()
self.frame1.setGeometry(qtc.QRect(320, 150, 118, 3))
self.frame1.setFrameShape(qtw.QFrame.HLine)
self.frame1.setFrameShadow(qtw.QFrame.Sunken)
self.frame2 = qtw.QFrame()
self.frame2.setGeometry(qtc.QRect(320, 150, 118, 3))
self.frame2.setFrameShape(qtw.QFrame.HLine)
self.frame2.setFrameShadow(qtw.QFrame.Sunken)
self.eff_atk_layout = qtw.QFormLayout()
self.eff_atk_layout.addRow("ATK: ", self.atk_label)
self.eff_atk_layout.addRow("DMG%: ", self.dmg_label)
self.eff_atk_layout.addRow("Effective ATK: ", self.eff_atk_label)
self.crit_bonus_layout = qtw.QFormLayout()
self.crit_bonus_layout.addRow("Crit Rate%: ", self.crit_rate_label)
self.crit_bonus_layout.addRow("Crit DMG%: ", self.crit_dmg_label)
self.crit_bonus_layout.addRow("Total Crit Bonus: ", self.crit_bonus_label)
self.defense_layout = qtw.QFormLayout()
self.defense_layout.addRow("Player Level: ", self.player_level_label)
self.defense_layout.addRow("Enemy Level: ", self.enemy_level_label)
self.defense_layout.addRow("Enemy Defense: ", self.defense_label)
self.misc_stats_layout = qtw.QFormLayout()
self.misc_stats_layout.addRow("Enemy Resistance: ", self.enemy_resistance_label)
self.misc_stats_layout.addRow("Ability%: ", self.ability_label)
self.average_damage_layout = qtw.QGridLayout()
self.left_spacer = qtw.QSpacerItem(0, 0, qtw.QSizePolicy.Expanding, qtw.QSizePolicy.Minimum)
self.right_spacer = qtw.QSpacerItem(0, 0, qtw.QSizePolicy.Expanding, qtw.QSizePolicy.Minimum)
self.upper_spacer = qtw.QSpacerItem(0, 0, qtw.QSizePolicy.Minimum, qtw.QSizePolicy.Expanding)
self.average_damage_layout.addItem(self.left_spacer, 4, 0)
self.average_damage_layout.addItem(self.right_spacer, 4, 3)
self.average_damage_layout.addItem(self.upper_spacer, 4, 1)
self.average_damage_layout.addWidget(self.non_crit_damage_label_static, 0, 1)
self.average_damage_layout.addWidget(self.non_crit_damage_label, 0, 2)
self.average_damage_layout.addWidget(self.crit_damage_label_static, 1, 1)
self.average_damage_layout.addWidget(self.crit_damage_label, 1, 2)
self.average_damage_layout.addWidget(self.average_damage_label_static, 2, 1)
self.average_damage_layout.addWidget(self.average_damage_label, 2, 2)
self.setLayout(qtw.QGridLayout())
self.layout().addLayout(self.eff_atk_layout, 0, 0)
self.layout().addWidget(self.frame1, 1, 0, 1, 2)
self.layout().addLayout(self.crit_bonus_layout, 0, 1)
self.layout().addLayout(self.defense_layout, 2, 0)
self.layout().addLayout(self.misc_stats_layout, 2, 1)
self.layout().addWidget(self.frame2, 3, 0, 1, 2)
self.layout().addLayout(self.average_damage_layout, 4, 0, 1, 2)
@qtc.pyqtSlot(int, float, float, float, float, int, int, float)
def initial_logic(self, atk, dmg, ability, crit_rate, crit_dmg, player_level, enemy_level, enemy_resistance):
self.crit_bonus = crit_bonus(crit_rate, crit_dmg)
self.defense = defense(player_level, enemy_level)
self.eff_atk = eff_atk(atk, dmg)
self.average_damage = average_damage(self.eff_atk, self.crit_bonus, ability, enemy_resistance, self.defense)
self.on_crit_damage = damage_on_crit(self.eff_atk, crit_dmg, ability, enemy_resistance, self.defense)
self.on_non_crit_damage = damage_on_non_crit(self.eff_atk, ability, enemy_resistance, self.defense)
self.show()
self.atk_label.setText(str(atk))
self.dmg_label.setText("{:.2%}".format(dmg / 100))
self.ability_label.setText("{:.2%}".format(ability / 100))
self.crit_rate_label.setText("{:.2%}".format(crit_rate / 100))
self.crit_dmg_label.setText("{:.2%}".format(crit_dmg / 100))
self.player_level_label.setText(str(player_level))
self.enemy_level_label.setText(str(enemy_level))
self.enemy_resistance_label.setText("{:.2%}".format(enemy_resistance / 100))
self.crit_bonus_label.setText("{:.2%}".format(self.crit_bonus))
self.defense_label.setText("{:.2%}".format(self.defense))
self.eff_atk_label.setText("{:.2f}".format(self.eff_atk))
self.average_damage_label.setText("{:.2f}".format(self.average_damage))
self.crit_damage_label.setText("{:.2f}".format(self.on_crit_damage))
self.non_crit_damage_label.setText("{:.2f}".format(self.on_non_crit_damage))
if __name__ == '__main__':
app = MainApp(sys.argv)
sys.exit(app.exec_())
|
StarcoderdataPython
|
11275012
|
from django.contrib import admin
from mptt.admin import DraggableMPTTAdmin
from .models import Products, Category, Image, Review
import admin_thumbnails
from fieldsets_with_inlines import FieldsetsInlineMixin
class ProductImageInline(admin.TabularInline):
model = Image
readonly_fields = ('id',)
extra = 3
max_num = 10
class ProductsAdmin(FieldsetsInlineMixin, admin.ModelAdmin):
model = Products
list_display = ('title','category', 'price',
'discount_price', 'featured', 'created', 'status', 'image_tag')
list_display_links = ('title', 'price', 'created',)
list_editable = ('category', 'featured', 'status')
list_per_page = 8
search_fields = ('title', 'price', 'summary', 'description')
readonly_fields = ('image_tag', 'slug', 'created', 'modified', 'average_rate')
filter_horizontal = ( )
list_filter = ('created',)
inlines = [ProductImageInline]
fieldsets_with_inlines = (
("Product Head", {'fields': ('title', 'category',)}),
("Prodcut Base Image", {'fields': ('image',)}),
(ProductImageInline),
("Product Details", {
'fields': ('price', 'discount_price', 'description', 'additional_info', 'average_rate')}),
("Product Permission/Others",
{'fields': ('status','slug', 'featured', 'created', 'modified')}),
)
class CategroyAdmin(DraggableMPTTAdmin):
model = Category
mptt_indent_field = "title"
list_display = ('tree_actions', 'indented_title', 'title', 'parent', 'slug',
'related_products_count', 'related_products_cumulative_count',)
list_display_links = ('indented_title', 'parent', 'slug')
search_fields = ('title', 'parent__title', 'slug')
readonly_fields = ('slug', 'created', 'updated')
filter_horizontal = ()
list_filter = ('parent',)
fieldsets = (
("Category Details", {
'fields': ('title', 'parent', 'image', 'slug', 'created', 'updated')}),
)
def get_queryset(self, request):
query = super(CategroyAdmin, self).get_queryset(request)
query = Category.objects.add_related_count(
query,
Products,
'category',
'products_cumulative_count',
cumulative=True)
query = Category.objects.add_related_count(
query,
Products,
'category',
'products_count',
cumulative=False)
return query
def related_products_count(self, instance):
return instance.products_count
related_products_count.short_description = 'Products (In This catgory)'
def related_products_cumulative_count(self, instance):
return instance.products_cumulative_count
related_products_cumulative_count.short_description = 'Products (In This Tree)'
@admin_thumbnails.thumbnail('images')
class ImageAdmin(admin.ModelAdmin):
model = Image
list_display = ['images', 'product_slug',
'short_description', 'images_thumbnail']
def product_slug(self, instance):
return instance.product.slug
product_slug.short_description = "Product Slug"
product_slug.admin_order_field = "product__slug"
admin.site.register(Review)
admin.site.register(Image, ImageAdmin)
admin.site.register(Products, ProductsAdmin)
admin.site.register(Category, CategroyAdmin)
|
StarcoderdataPython
|
9628243
|
<filename>ampsim/tools/venner.py<gh_stars>0
"""
(c) MGH Center for Integrated Diagnostics
"""
from __future__ import print_function
from __future__ import absolute_import
import re
import os
from collections import namedtuple
import click
import pandas as pd
from pybedtools import BedTool
import matplotlib.pyplot as plt
from ampsim.utils import safe_divide, vcf_parser, get_vcfs
pd.set_option('display.expand_frame_repr', False) # to print the dataframe without breaking the row into multiple lines
plt.style.use('ggplot')
def calculate_target_size(path):
"""
Given a bed for target regions, calculate the total size of the regions (bp)
:param path: To a bed file
:return: Integer size
"""
size = 0
bed = BedTool(path)
for target in bed:
size += abs(target.start - target.end)
return size
def load_variants(truth_path, vcf_paths, allowed_callers):
"""
Load variant from the Truth set and all VCF files.
:param truth_path: BED file of the truth variant sets
:param vcf_paths: List of paths to the VCF files
:return: dataframe of variants and a list of caller column names
"""
columns = ['key', 'chrom', 'start', 'end', 'ref', 'alt', 'var_type', 'var_size', 'is_truth', 'fraction']
callers = {caller: False for caller in [os.path.basename(path).split(".")[-3].split('_')[0]
for path in vcf_paths] if caller in allowed_callers}
columns.extend(callers)
Variant = namedtuple('Variant', columns)
truth = {}
for vcf_path in vcf_paths:
caller = os.path.basename(vcf_path).split(".")[-3].split('_')[0]
if caller not in allowed_callers:
continue
fraction = re.findall("\d+\.\d+", os.path.basename(vcf_path))[0]
# fraction = os.path.basename(vcf_path).split("_")[0][:-4]
# load variant from the truth set
for record in vcf_parser(truth_path):
key, chrom, start, end, ref, alt, var_type, var_size = record
key = "_".join([chrom, str(start), ref, alt.sequence])
if fraction not in truth:
truth[fraction] = {}
if key not in truth[fraction]:
truth[fraction][key] = Variant(key, chrom, start, end, ref, alt, var_type, int(var_size),
is_truth=True, fraction=fraction, **callers)._asdict()
for record in vcf_parser(vcf_path):
key, chrom, start, end, ref, alt, var_type, var_size = record
if key in truth[fraction]:
truth[fraction][key][caller] = True
else:
truth[fraction][key] = Variant(key, chrom, start, end, ref, alt, var_type, abs(var_size),
is_truth=False, fraction=fraction, **callers)._asdict()
truth[fraction][key][caller] = True
holder = [truth[fraction][key] for fraction in truth for key in truth[fraction]]
df = pd.DataFrame.from_records(data=holder, columns=columns)
return df, callers
def calculate_true_negative(target_size, var_type, caller, df):
"""
The true negative are all nucleotide bases that don't overlap with truth set variants or false positive variants
:param target_size: Integer
:param caller: String caller name
:param df: dataframe
:return: Integer
"""
expected_variant_size = sum(df.loc[(df['var_type'].isin(var_type)) &
(df['is_truth'] == True)]['var_size'].tolist())
observed_fp_size = sum(df.loc[(df['var_type'].isin(var_type)) &
(df['is_truth'] == False) &
(df[caller] == True)]['var_size'].tolist())
return target_size - expected_variant_size - observed_fp_size
def caller_performance(df, target_size, callers, fractions=None, var_type=['SNV', 'DEL', 'INS', 'CNV_DEL', 'CNV_DUP']):
"""
Return the FP, FN, TP, TN, PPV, NPV, specificity, sensitivity, recall, precision
:param df: The variant including truth set
:param var_type: list of variant types snv, ins, del, cnv_del, cnv_dup
:param callers: list of caller column names in the dataframe
:param fractions: list of fractions to filter in
:return: tp, tn, fp, fn, total, sen, spe, per, acc
"""
columns = ['caller', 'fraction', 'tp', 'tn', 'fp', 'fn', 'total', 'sensitivity', 'specificity',
'ppv', 'npv', 'accuracy', 'precision', 'recall', 'f1']
performance_df = pd.DataFrame(columns=columns)
if fractions:
df = df.loc[df['fraction'].isin(fractions)]
fraction_name = ",".join([str(x) for x in fractions])
else:
fraction_name = 'all'
for caller in callers:
if caller not in df.columns:
continue
record = {k: 0. for k in columns}
record['caller'] = caller
record['fraction'] = fraction_name
record['tp'] = float(df.loc[(df['var_type'].isin(var_type)) & (df['is_truth'] == True) &
(df[caller] == True)].shape[0])
record['tn'] = calculate_true_negative(target_size, var_type, caller, df)
record['fp'] = float(df.loc[(df['var_type'].isin(var_type)) & (df['is_truth'] == False) &
(df[caller] == True)].shape[0])
record['fn'] = float(df.loc[(df['var_type'].isin(var_type)) & (df['is_truth'] == True) &
(df[caller] == False)].shape[0])
record['total'] = record['tp'] + record['tn'] + record['fp'] + record['fn']
record['sensitivity'] = safe_divide(record['tp'], (record['tp'] + record['fn']))
record['specificity'] = safe_divide(record['tn'], (record['tn'] + record['fp']))
record['ppv'] = safe_divide(record['tp'], (record['tp'] + record['fp']))
record['npv'] = safe_divide(record['tn'], (record['tn'] + record['fn']))
record['accuracy'] = safe_divide(record['tp'] + record['tn'], record['total'])
record['precision'] = safe_divide(record['tp'], record['tp'] + record['fp'])
record['recall'] = safe_divide(record['tp'], record['tp'] + record['fn'])
record['f1'] = safe_divide(2 * (record['precision'] * record['recall']),
(record['precision'] + record['recall']))
performance_df.loc[caller] = pd.Series(record)
return performance_df
def plot_performance_per_bin(df, callers, var_types, targets_size, output_png):
"""
Plot the sensitivity and specificity for each caller at every fraction bin and save it to the output_file[:-3] + png
:param df: Dataframe of all variants
:param callers: list of callers (each will have its own line)
:param var_types: list of variants to include
:param targets_size: Total size of the targets
:param output_png: Path to save the plot
:return: None
"""
results = {}
fractions = sorted(list(set(([float(x) for x in df['fraction'].tolist()]))))
for caller in callers:
results[caller] = {'sensitivity': {}, 'specificity': {}}
for fraction in fractions:
caller_performance_df = caller_performance(df, targets_size, [caller], [str(fraction)], var_types)
results[caller]['sensitivity'][fraction] = caller_performance_df.loc[caller]['sensitivity']
results[caller]['specificity'][fraction] = caller_performance_df.loc[caller]['specificity']
results_df = pd.DataFrame.from_dict(data=results, orient='index')
# plotting
plt.figure(figsize=(7, 5), dpi=80)
plt.title("Callers sensitivity {0} variants ({1})".format(output_png.split("/")[-1][:-4], ",".join(var_types)),
fontsize=14)
plt.xlabel("ALT fractions", fontsize=12)
plt.ylabel("Sensitivity", fontsize=12) # with error profile # error-free
for caller in callers:
fractions = [fraction for fraction in fractions if fraction <= 1.0]
y_values = [results_df.loc[caller]['sensitivity'][fraction] for fraction in fractions]
if caller == 'ensemble':
linestyle = '--'
else:
linestyle = '-'
plt.plot(fractions, y_values, linestyle=linestyle, linewidth=2, label=caller)
plt.legend(fontsize=11, loc='best')
plt.tight_layout()
plt.savefig(output_png)
plt.show()
@click.group(invoke_without_command=True)
@click.option('--truth-path', '-t', help='Path to the variant truth set')
@click.option('--target-bed', '-b', help='Path to teget bed')
@click.option('--cider-path', '-c', help='Path to cider output to get all vcf files at once')
@click.option('--vcf-paths', '-v', multiple=True, help='Path to the VCF paths (multiple files)')
@click.option('--var-types', '-r', default='SNV,INS,DEL', help='List SNV,DEL or INS (comma separated). Default is SNV.')
@click.option('--output-png', '-o', help='Path to save the results')
def cli(truth_path=None, target_bed=None, cider_path=None, vcf_paths=None, var_types=None, output_png=None):
var_types = var_types.split(",")
allowed_callers = ['lofreq', 'gatk', 'mutect', 'mutect2', 'ensembler']#, 'hotspotter']
targets_size = calculate_target_size(target_bed)
vcf_paths = get_vcfs(cider_path) # TODO remove this in production
df, callers = load_variants(truth_path, vcf_paths, allowed_callers)
print(df.to_csv(sep='\t'))
fractions = sorted(list(set(([float(x) for x in df['fraction'].tolist()]))))
for fraction in fractions:
results_df = caller_performance(df, targets_size, allowed_callers, fractions=[str(fraction)], var_type=var_types)
print(results_df.to_csv(sep='\t'))
# results_df = caller_performance(df, targets_size, allowed_callers, var_type=var_types)
# print(results_df.to_csv(sep='\t'))
plot_performance_per_bin(df, callers, var_types, targets_size, output_png)
if __name__ == '__main__':
cli()
|
StarcoderdataPython
|
3576370
|
<reponame>avendesta/vulnhub<filename>flaskapp/blog.py
from flask import Flask, request, jsonify, send_from_directory, redirect, url_for
from flask_pymongo import PyMongo
from forms import RegistrationForm, LoginForm, RequestForm
from flask_jwt_extended import JWTManager, jwt_required, create_access_token,get_jwt_identity
app = Flask(__name__,static_folder='static')
app.config['MONGO_DBNAME'] = "flaskapp"
app.config['MONGO_URI'] = "mongodb://localhost:27017/flaskapp"
#check https://flask-jwt-extended.readthedocs.io/en/stable/options/ for configuration options of jwt
app.config['WTF_CSRF_ENABLED'] = False
app.config['JWT_ACCESS_TOKEN_EXPIRES'] = 60*30 # 30 minutes
app.config['JWT_TOKEN_LOCATION'] = ('json','headers')
app.config['JWT_ERROR_MESSAGE_KEY'] = 'access_token'
app.config['JWT_SECRET_KEY'] = 'qwerty123456'
app.config['JWT_HEADER_TYPE'] = ''
app.config['JWT_HEADER_NAME'] = 'access_token'
mongo = PyMongo(app)
jwt = JWTManager(app)
@app.route("/")
def index():
return jsonify(status='up')
@app.route("/api/challenge")
def base():
return jsonify({"goal-1":"log in as a previleged/admin user",
"goal-2":"find the secret_key of this flask webapp",
"note":"please do not brute force the site, it won't help"
})
@app.route("/api/get", methods=['GET'])
@jwt_required
def get():
form = RequestForm(request.args)
if form.validate():
email = form.email.data
current_user = get_jwt_identity()
element = mongo.db.accounts.find_one({'email':email})
if element and current_user == element.get('username'):
username = element.get('username')
email = element.get('email')
return jsonify(username=username,email=email)
else:
return jsonify({"email":'email not found'}),400
else:
return jsonify(form.errors),400
@app.route("/api/register", methods=['POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
username = form.username.data
email = form.email.data
password = form.password.data
accounts = mongo.db.accounts
if accounts.find({"email":form.email.data}).count()>0:
return jsonify(email="email already exist"),400
else:
accounts.insert_one({"username":username,"email":email,"password":password})
return jsonify(username=username,email=email,password="****"), 201
else:
return jsonify(form.errors), 400
@app.route("/api/login",methods=['POST'])
def login():
form = LoginForm()
accounts = mongo.db.accounts
if form.validate_on_submit():
email = form.email.data
password = form.password.data
fetch = accounts.find_one({"email":email,"password":password})
if fetch:
username = fetch.get("username")
access_token = create_access_token(identity=username)
return jsonify(access_token=access_token,email=email)
else:
return jsonify(msg='Incorrect email or password'),400
else:
return jsonify(form.errors),400
@app.route("/api/recover", methods=['GET'])
def recover():
form = RequestForm(request.args)
if form.validate():
email = form.email.data
element = mongo.db.accounts.find_one({'email':email})
if element:
username = element.get('username')
email = element.get('email')
return f"Dear {username}, this functionality is not implemented yet!", 202
else:
return jsonify({"email":'email not found'}),400
else:
return jsonify(form.errors),404
@app.route("/api/help", methods=['GET'])
def help():
return jsonify(msg="You can contact the admin via <EMAIL>")
@app.route('/robots.txt')
def robots():
return send_from_directory(app.static_folder, request.path[1:])
if __name__ == "__main__":
app.run(debug=True)
|
StarcoderdataPython
|
6619915
|
# encoding: utf-8
import numpy as np
import model
import utils
import plot_data
def exec_c3_1_a(X_a, X_b, init_w):
"""
plot 3 histogram of data projecting to difference vector w
:param X_a: Gaussian data of class a
:param X_b: Gaussian data of class b
:param init_w: initial w vector to be projected
:return: none
"""
n_histogram = 3
proj_a = np.zeros((X_a.shape[0], n_histogram))
proj_b = np.zeros((X_b.shape[0], n_histogram))
new_w = np.zeros((init_w.shape[0], n_histogram))
for i in range(n_histogram):
new_w[:, i] = (init_w + np.array(np.random.randn(*init_w.shape))).ravel()
proj_a[:, i] = utils.project_X_onto_w(X_a, new_w[:, i]).ravel()
proj_b[:, i] = utils.project_X_onto_w(X_b, new_w[:, i]).ravel()
plot_data.plt_histogram(proj_a, proj_b, new_w)
def exec_c3_1_b(X_a, X_b, init_w):
"""
Turn vector w by 360 degree to find the maximum value of Fisher score,
and the corresponding direction w∗
:param X_a: Gaussian data of class a
:param X_b: Gaussian data of class b
:param init_w: initial w vector to be projected
:return: none
"""
fs_clf = model.FisherScoreClassifier(X_a, X_b, init_w)
optimal_w = fs_clf.classify()
if __name__ == '__main__':
# generate gaussian distribution for class a
n_pts = 100
mean_a = [4, 2]
cov_a = np.array([[1, 0.5], [0.5, 1]]) # diagonal covariance
Gaus_dist_a = model.GausDS(mean_a, cov_a, n_pts)
# generate gaussian distribution for class b
mean_b = [2, 4]
cov_b = np.array([[1, 0.5], [0.5, 1]]) # diagonal covariance
Gaus_dist_b = model.GausDS(mean_b, cov_b, n_pts)
# plot two Gaussian distributions including class a and class b
plot_data.plt_distribution(Gaus_dist_a.data, Gaus_dist_b.data)
# init weight to do projection
init_w = np.array([1, -2]).reshape(-1, 1)
# draw three histograms by projecting to different w
exec_c3_1_a(Gaus_dist_a.data, Gaus_dist_b.data, init_w)
# find optimal angel to separate class a and class b
exec_c3_1_b(Gaus_dist_a.data, Gaus_dist_b.data, init_w)
|
StarcoderdataPython
|
136614
|
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Graph browser related handlers."""
import flask
import json
from cache import cache
import services.datacommons as dc
import lib.statvar_hierarchy_search as svh_search
from services.datacommons import fetch_data
from flask import Response
from flask import request
import routes.api.place as place_api
bp = flask.Blueprint('api.browser', __name__, url_prefix='/api/browser')
NO_MMETHOD_KEY = 'no_mmethod'
NO_OBSPERIOD_KEY = 'no_obsPeriod'
@cache.memoize(timeout=3600 * 24) # Cache for one day.
@bp.route('/triples/<path:dcid>')
def triple_api(dcid):
"""Returns all the triples given a node dcid."""
return json.dumps(dc.get_triples([dcid]).get(dcid, []))
@cache.memoize(timeout=3600 * 24) # Cache for one day.
@bp.route('/propvals/<path:prop>/<path:dcid>')
def get_property_value(dcid, prop):
"""Returns the property values for a given node dcid and property label."""
response = fetch_data('/node/property-values', {
'dcids': [dcid],
'property': prop,
},
compress=False,
post=False)
result = {}
result["property"] = prop
result["values"] = response.get(dcid, {})
return Response(json.dumps(result), 200, mimetype='application/json')
@cache.memoize(timeout=3600 * 24) # Cache for one day.
@bp.route('/proplabels/<path:dcid>')
def get_property_labels(dcid):
"""Returns all property labels given a node dcid."""
labels = dc.get_property_labels([dcid]).get(dcid, {})
return Response(json.dumps(labels), 200, mimetype='application/json')
def get_sparql_query(place_id, stat_var_id, date):
date_triple = "?svObservation observationDate ?obsDate ."
date_selector = " ?obsDate"
if date:
date_triple = f'?svObservation observationDate "{date}" .'
date_selector = ""
sparql_query = f"""
SELECT ?dcid ?mmethod ?obsPeriod{date_selector}
WHERE {{
?svObservation typeOf StatVarObservation .
?svObservation variableMeasured {stat_var_id} .
?svObservation observationAbout {place_id} .
?svObservation dcid ?dcid .
?svObservation measurementMethod ?mmethod .
?svObservation observationPeriod ?obsPeriod .
{date_triple}
}}
"""
return sparql_query
@cache.cached(timeout=3600 * 24, query_string=True) # Cache for one day.
@bp.route('/observation-id')
def get_observation_id():
"""Returns the observation node dcid for a combination of
predicates: observedNodeLocation, statisticalVariable, date,
measurementMethod optional), observationPeriod (optional)"""
place_id = request.args.get("place")
if not place_id:
return Response(json.dumps("error: must provide a place field"),
400,
mimetype='application/json')
stat_var_id = request.args.get("statVar")
if not stat_var_id:
return Response(json.dumps("error: must provide a statVar field"),
400,
mimetype='application/json')
date = request.args.get("date", "")
if not date:
return Response(json.dumps("error: must provide a date field"),
400,
mimetype='application/json')
request_mmethod = request.args.get("measurementMethod", NO_MMETHOD_KEY)
request_obsPeriod = request.args.get("obsPeriod", NO_OBSPERIOD_KEY)
sparql_query = get_sparql_query(place_id, stat_var_id, date)
result = ""
(_, rows) = dc.query(sparql_query)
for row in rows:
cells = row.get('cells', [])
if len(cells) != 3:
continue
dcid = cells[0].get('value', '')
mmethod = cells[1].get('value', NO_MMETHOD_KEY)
obsPeriod = cells[2].get('value', NO_OBSPERIOD_KEY)
if mmethod == request_mmethod and obsPeriod == request_obsPeriod:
result = dcid
break
return Response(json.dumps(result), 200, mimetype='application/json')
def statvar_hierarchy_helper(svg_id, svg_map, processed_svg_map, processed_sv,
seen_sv, level):
"""Processes the childStatVars and childStatVarGroups of a stat var group.
Adds parent field for those processed statVars and statVarGroups.
Args:
svg_id: stat var group of interest
svg_map: mapping of svg_id to the unprocessed svg object
processed_svg_map: mapping of svg_id to the processed svg object
processed_sv: mapping of stat var id to the processed stat var object
seen_sv: stat vars that have already been processed
"""
svg = svg_map.get(svg_id, {})
for child_sv in svg.get("childStatVars", []):
if child_sv["id"] in seen_sv:
continue
child_sv["parent"] = svg_id
processed_sv[child_sv["id"]] = child_sv
seen_sv.add(child_sv["id"])
for child_svg in svg.get("childStatVarGroups", []):
child_svg_id = child_svg.get("id")
child_svg = processed_svg_map.get(child_svg_id, svg_map[child_svg_id])
child_svg["parent"] = svg_id
child_svg["level"] = level
processed_svg_map[child_svg_id] = child_svg
seen_sv.add(child_svg_id)
statvar_hierarchy_helper(child_svg_id, svg_map, processed_svg_map,
processed_sv, seen_sv, level + 1)
@bp.route('/statvar-hierarchy', methods=['POST'])
def get_statvar_hierarchy():
"""Returns the stat var groups objects and stat vars objects relevant to a
specific dcid.
Each stat var group object (keyed by its stat var group id) will have an
absolute name, optional list of child stat vars, optional list of child stat
var groups, and optional list of parent stat var groups.
Each stat var object (keyed by its stat var id) will have its parent stat
var group id.
"""
dcids = request.json.get('dcids', [])
return get_statvar_hierarchy_helper("^".join(sorted(dcids)))
@cache.memoize(timeout=3600 * 24) # Cache for one day.
def get_statvar_hierarchy_helper(dcid_string):
dcids = []
if dcid_string != "":
dcids = dcid_string.split("^")
svg_map = dc.get_statvar_groups(dcids)
processed_svg_map = {}
processed_sv = {}
seen_sv = set()
for svg_id, svg in svg_map.items():
if svg_id in seen_sv:
continue
svg["level"] = 0
processed_svg_map[svg_id] = svg
statvar_hierarchy_helper(svg_id, svg_map, processed_svg_map,
processed_sv, seen_sv, 1)
for _, sv in processed_sv.items():
parent_svg = processed_svg_map.get(sv["parent"])
sv["level"] = parent_svg["level"] + 1
result = {}
result["statVarGroups"] = processed_svg_map
result["statVars"] = processed_sv
return Response(json.dumps(result), 200, mimetype='application/json')
@bp.route('/search_statvar_hierarchy')
@cache.cached(timeout=3600 * 24, query_string=True)
def search_statvar_hierarchy():
"""Gets the statvars and statvar groups that match the tokens in the query
"""
query = request.args.get("query").lower()
query = query.replace(",", " ")
tokens = query.split()
result = svh_search.get_search_result(tokens)
return Response(json.dumps(list(result)), 200, mimetype='application/json')
@cache.memoize(timeout=3600 * 24) # Cache for one day.
@bp.route('/num_stat_vars/<path:dcid>')
def get_num_statvars(dcid):
"""Returns number of stat vars for a dcid
"""
statsvars = place_api.statsvars(dcid)
num_statvars = len(statsvars)
return Response(json.dumps(num_statvars), 200, mimetype='application/json')
|
StarcoderdataPython
|
5010758
|
# elif myclass == "Barbarian":
# #Barbarian strength build
#Human Bonus Feat) Toughness
#1) Power Attack
#3) Improved Initiative
#5) Endurance
#7) Diehard
#9) Improved Critical (main weapon)
#11) Sickening Critical
#13) Staggering Critical
#15) Blinding Critical
#17) Stunning Critical
#19) Heroic Defiance
#TO ADD
#Barbarian archer build
#Barbarian mounted build
#Totem barbarian build
#Invulnerable rager barbarian build (uses Stalwart, tank build)
# elif myclass == "Bard":
#TO ADD
#Bard controller build
#Bard archer build
#Bard melee build
# #Bard casting build
#Bard social build
# elif myclass == "Cleric":
#Cleric bruiser build
#Cleric caster build
# elif myclass == "Druid":
# #Druid bruiser build
#Druid archetypes?
# elif myclass == "Fighter":
# #Fighter 2h build
#Fighter archery build
#Fighter mounted build
#Fighter OHF build
#Fighter THF build
#Fighter sword and board build
# elif myclass == "Monk":
# #Monk unarmed build
#Monk armed build
#Monk ranged build
#Monk switch hitter build
# elif myclass == "Paladin":
# #Paladin social build
#Paladin tank build
#Paladin melee smite build
#Paladin ranged smite build
# elif myclass == "Ranger":
# #Ranger archer build
#Ranger TWF build
#Ranger switch hitter build
# elif myclass == "Rogue":
# #Rogue TWF build
#Rogue social build
#Rogue THF build
#Rogue archery build
# elif myclass == "Sorcerer":
# #Sorcerer utility build
#Sorcerer blast build
#Sorcerer mixed build
# elif myclass == "Wizard":
# #Wizard control build
#Wizard debuff build
#Wizard buff build
#Wizard save or die build
#Wizard blast build
#Wizard summoning build
|
StarcoderdataPython
|
8067554
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from django.core.management import call_command
from django.utils.six.moves import input # noqa
class Command(BaseCommand):
help = 'Pulls, make locales and pushes (in that order) the .po files to transifex'
requires_system_checks = False
def handle(self, *args, **options):
# todo: test!
# Requires ``pip install transifex-client``
call_command('spiritmakelocales', stdout=self.stdout, stderr=self.stderr)
call_command('spirittxpush', stdout=self.stdout, stderr=self.stderr)
call_command('spirittxpull', stdout=self.stdout, stderr=self.stderr)
call_command('spiritmakelocales', stdout=self.stdout, stderr=self.stderr)
self.stdout.write('ok')
|
StarcoderdataPython
|
8184399
|
<gh_stars>1-10
import unittest
unittest.defaultTestLoader.testMethodPrefix = 'should'
import textwrap
from clckwrkbdgr import tui
from clckwrkbdgr.tui import widgets
class TestKey(unittest.TestCase):
def should_create_key(self):
self.assertEqual(tui.Key(65).value, 65)
self.assertEqual(tui.Key('A').value, 65)
self.assertEqual(tui.Key(tui.Key('A')).value, 65)
def should_compare_keys(self):
self.assertTrue(tui.Key('A') == 65)
self.assertTrue(65 == tui.Key('A'))
self.assertTrue('A' == tui.Key('A'))
self.assertTrue(tui.Key('A') == 'A')
self.assertTrue(tui.Key('A') == tui.Key('A'))
self.assertTrue(tui.Key('A') != 64)
self.assertTrue(tui.Key('A') < 'B')
self.assertTrue(tui.Key('A') <= 'A')
self.assertTrue(tui.Key('A') > ' ')
self.assertTrue(tui.Key('A') in ['A'])
self.assertTrue(tui.Key('A') in {tui.Key('A')})
class TestUtils(unittest.TestCase):
def should_fit_text_into_screen(self):
lorem_ipsum = textwrap.dedent("""\
Lorem ipsum dolor sit amet, consectetur adipiscing elit,
sed do eiusmod tempor incididunt ut labore et dolore magna
aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat.
Duis aute irure dolor in reprehenderit in voluptate velit
esse cillum dolore eu fugiat nulla pariatur. Excepteur sint
occaecat cupidatat non proident, sunt in culpa qui officia
deserunt mollit anim id est laborum.
""")
expected = [
'Lorem ipsum dolor sit amet',
'sed do eiusmod tempor inci',
'aliqua. Ut enim ad minim v',
'[...]',
'esse cillum dolore eu fugi',
'occaecat cupidatat non pro',
'deserunt mollit anim id es',
]
self.assertEqual(tui.ExceptionScreen._fit_into_bounds(lorem_ipsum, len(expected[0]), len(expected)), expected)
lorem_ipsum = textwrap.dedent("""\
Lorem ipsum dolor sit amet, consectetur adipiscing elit,
sed do eiusmod tempor incididunt ut labore et dolore magna
aliqua. Ut enim ad minim veniam, quis nostrud exercitation
ullamco laboris nisi ut aliquip ex ea commodo consequat.
Duis aute irure dolor in reprehenderit in voluptate velit
esse cillum dolore eu fugiat nulla pariatur. Excepteur sint
(CUT to make odd number of lines)
""")
expected = [
'Lorem ipsum dolor sit amet',
'sed do eiusmod tempor inci',
'[...]',
'Duis aute irure dolor in r',
'esse cillum dolore eu fugi',
'(CUT to make odd numb',
]
self.assertEqual(tui.ExceptionScreen._fit_into_bounds(lorem_ipsum, len(expected[0]), len(expected)), expected)
def should_prepare_prompt_from_choices(self):
self.assertEqual("", widgets.Prompt._prompt_from_choices([]))
self.assertEqual("a", widgets.Prompt._prompt_from_choices(['a']))
self.assertEqual("a-b", widgets.Prompt._prompt_from_choices(['a', 'b']))
self.assertEqual("y,n", widgets.Prompt._prompt_from_choices(['y', 'n']))
self.assertEqual("a-c", widgets.Prompt._prompt_from_choices(['a', 'b', 'c']))
self.assertEqual("a-c,x-z", widgets.Prompt._prompt_from_choices(['a', 'b', 'c', 'x', 'y', 'z']))
self.assertEqual("*,a-b", widgets.Prompt._prompt_from_choices(['a', 'b', '*']))
|
StarcoderdataPython
|
1782886
|
<filename>python/test_result.py
# Containing whole result of prediction
class TestResultClass(object):
# Class Name (ex: xxxx)
name = None
# Class Label (ex: 4)
label = None
# Class Score
score = 0
# Class Box coodinate ([xmin, ymin, xmax, ymax])
box = None
class TestResultImage(object):
# Image name
name = None
# Classes in this image
classes = None
# Filepath (absolute path)
filepath = None
def sort_classes(self):
if self.classes:
self.classes.sort(key=lambda r: r.label)
class TestResultGroup(object):
# Group Name (ex: Card)
name = None
# Required class labels (ex: 0, 3, 5)
required_classes = None
# Images in this group
images = None
class TestResult(object):
passed_count = 0
tested_count = 0
group = None
def is_passed(self):
return self.passed_count != 0 and (self.passed_count == self.tested_count)
|
StarcoderdataPython
|
3292104
|
from __future__ import annotations
from typing import Sequence
from .typing import StyleOptions
from .utils import is_oneliner
# Codes can be combined, e.g.:
# - "3;33": italic yellow
# - "3;4;33": italic underlined yellow
#
# Some terminals support a 256-color extended color set:
# - ansi pattern: "\033[38;5;{color}m{content}\033[0m"
#
# For more info: https://en.wikipedia.org/wiki/ANSI_escape_code
fg_colors = {
# text/fg colors:
"black": "30",
"blue_l1": "38;5;117", # iso "34"
"blue": "38;5;75",
"blue_d1": "38;5;32",
"blue_d2": "38;5;25",
"cyan_l1": "38;5;123", # iso "36"
"cyan": "38;5;44",
"cyan_d1": "38;5;37",
"cyan_d2": "38;5;30",
"green_l1": "38;5;46", # iso "32"
"green": "38;5;34",
"green_d1": "38;5;28",
"green_d2": "38;5;22",
"magenta_l1": "38;5;207", # iso "35"
"magenta": "38;5;201",
"magenta_d1": "38;5;127",
"magenta_d2": "38;5;90",
"pink_l1": "38;5;219",
"pink": "38;5;213",
"pink_d1": "38;5;170",
"pink_d2": "38;5;133",
"orange_l1": "38;5;214",
"orange": "38;5;208",
"orange_d1": "38;5;202",
"orange_d2": "38;5;130",
"red_l1": "38;5;210", # iso "31"
"red": "38;5;203",
"red_d1": "38;5;196",
"red_d2": "38;5;124",
"yellow_l1": "38;5;229", # iso "33"
"yellow": "38;5;227",
"yellow_d1": "38;5;184",
"yellow_d2": "38;5;142",
"white": "37",
"grey": "38;5;244",
"grey_0": "38;5;232", # == black
"grey_1": "38;5;236",
"grey_2": "38;5;240",
"grey_3": "38;5;244",
"grey_4": "38;5;248",
"grey_5": "38;5;252",
}
bg_colors = {
"black": "40",
"blue": "48;5;20",
"blue_d1": "48;5;19",
"blue_d2": "48;5;18",
"blue_d3": "48;5;17",
"cyan": "48;5;37", # iso: "46"
"cyan_d1": "48;5;30",
"cyan_d2": "48;5;23",
"default": "49",
"green": "48;5;34", # iso: "42"
"green_d1": "48;5;28",
"green_d2": "48;5;22",
"grey": "48;5;243",
"grey_d1": "48;5;241",
"grey_d2": "48;5;239",
"grey_d3": "48;5;237",
"grey_d4": "48;5;235",
"magenta": "48;5;164",
"magenta_d1": "48;5;127",
"magenta_d2": "48;5;90",
"magenta_d3": "48;5;53",
"orange": "48;5;202",
"orange_d1": "48;5;166",
"orange_d2": "48;5;130",
"orange_d3": "48;5;94",
"red": "48;5;160", # iso "41"
"red_d1": "48;5;124",
"red_d2": "48;5;88",
"red_d3": "48;5;52",
"yellow": "48;5;142", # iso "43"
"yellow_d1": "48;5;100",
"yellow_d2": "48;5;58",
"white": "47",
}
ansi_codes = {
"reset": "0",
"bold": "1",
"dim": "2",
"italic": "3",
"underline": "4",
"blink": "5",
"inverse": "7",
"invert": "7",
"strike": "9",
"default": "10",
"normal": "22", # normal color intensity - neither "bold" nor "dim"
"blink_end": "25",
"inverse_end": "27",
"invert_end": "27",
"strike_end": "29",
**fg_colors,
**{f"b_{color}": f"1;{code}" for color, code in fg_colors.items()},
**{f"bg_{color}": f"0;38;5;15;{code}"
for color, code in bg_colors.items()},
**{f"bg_b_{color}": f"0;1;38;5;15;{code}"
for color, code in bg_colors.items()},
}
ansi_pattern = "\x1b[{}m{}\x1b[0m"
def apply_style(content: str, style: StyleOptions = None) -> str:
if style is None:
return content
elif is_oneliner(content):
return ansi_pattern.format(_ansi_code(style), content)
else:
return "\n".join(ansi_pattern.format(_ansi_code(style), line)
for line in content.splitlines())
def _ansi_code(style: StyleOptions) -> str:
if isinstance(style, str):
if style in ansi_codes:
return ansi_codes[style]
else:
return style
elif isinstance(style, int):
return f"38;5;{style}"
elif isinstance(style, (bytes, bytearray)):
raise TypeError("Unsupported style.")
elif isinstance(style, Sequence):
return ";".join(_ansi_code(item) for item in style)
else:
raise TypeError(f"Unsupported style '{style}'.")
|
StarcoderdataPython
|
91422
|
<reponame>ThebiggunSeeoil/VIS-MASTER
from django.core import serializers
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, get_object_or_404, redirect
from django.template import loader
from django.http import HttpResponse
from django.http import JsonResponse
from django import template
import json
import datetime
from django.utils import timezone
from dateutil.relativedelta import relativedelta, SA, TH
from app.models import Team,Site,Nozzle,Status,Status_Error_logger,VIS_ip_address ,Setup_Config
from django.db.models import OuterRef, Subquery, Count, Min
from linebot.creating_flex_messages import *
class connect_data_to_db ():
def prepare_nozzle (GET_VIS_DATA,GET_VIS_DATA_ALL,NOZZLE) :
vis_check = [] #สำหรับเก็บค่า name_id เพื่อป้องกันไม่ให้มีการบันทึกซ้ำ
vis_result = []
#ส่วนสำหรับ นำค่าที่ได้จากตาราง site ที่เป็น name_id เอามาเพิ่มข้อมูล 'Unit_log_address':[] เข้าไปเพื่อใช้ในการเก็บข้อมูลของ nozzle
for data in GET_VIS_DATA:
# print(data)
if data['name_id'] not in vis_check: # ทำการเช็คว่า name_id มีเก็บไว้ใน vis_check = [] หรือไม่ถ้ายังไม่มีก็จะทำข้างล่างจนเสร็จก่อน แล้วค่อยนำ name_id ไปบันทึกไว้เพื่อป้องกันการ loop รอบอื่นๆมาทำซ้ำอีก
vis_check.append(data['name_id']) # ทำการนำ name_id ไปบันทึกไว้ที่ vis_check = []
data = {'name_id': data['name_id'],
'log_address_check': [],
'pump_log_address_check': [],
'nozzle_data_check': [],
'log_address_count': [],
'pump_log_address_count': [],
'nozzle_data_count': [],
'site_name':data['site__station_name'],
'station_ip':data['site__station_ip'],
'station_monitor_device': data['site__station_monitor_device'],
'MWGT_status':data['MWGT_status'],
'VIS_status':data['VIS_status'],
'NOZZLE_status_check':data['NOZZLE_status_check'],
'BATTERY_status_check':data['BATTERY_status_check'],
'VIS_last_time':data['VIS_last_time'],
'Unit_log_address': []} # สร้างข้อมูลไว้ สำหรับโยนเข้าไปเก็บไว้ใน vis_result = []
vis_result.append(data) # นำ data ไปเก็บไว้ใน vis_result = [] เพื่อเอาไปใช้ใน function อื่น
# for vis_1 in vis_result :
# print('vis 1 ',vis_1)
for name_id in vis_result:
for data in NOZZLE:
if data['site_id'] == name_id['name_id']:
name_id['nozzle_data_check'].append(data['nozzle_num'])
if data['pump_log_address'] not in name_id['pump_log_address_check']:
name_id['pump_log_address_check'].append(data['pump_log_address'])
if data['log_address'] not in name_id['log_address_check']:
name_id['log_address_check'].append(data['log_address'])
for count in vis_result:
count_log = len(count['pump_log_address_check'])
count_num = len(count['nozzle_data_check'])
count_log_main = len(count['log_address_check'])
count['pump_log_address_count'] = count_log
count['nozzle_data_count'] = count_num
count['log_address_count'] = count_log_main
GET_VIS_DATA_ALL_CHECK_STORE = [] #สำหรับเก็บค่า Unit_log_address เพื่อป้องกันไม่ให้มีการบันทึกซ้ำ
for Unit_check in vis_result :
for GET_VIS_DATA_ALL_CHECK in GET_VIS_DATA_ALL :
log_check = str(GET_VIS_DATA_ALL_CHECK['name_id']) + str(GET_VIS_DATA_ALL_CHECK['Unit_log_address'])
if GET_VIS_DATA_ALL_CHECK['name_id'] == Unit_check['name_id']:
if log_check not in GET_VIS_DATA_ALL_CHECK_STORE:
GET_VIS_DATA_ALL_CHECK_STORE.append(log_check)
value = {'Unit_log_address': GET_VIS_DATA_ALL_CHECK['Unit_log_address'],'DataUnitMap_IP': GET_VIS_DATA_ALL_CHECK['DataUnitMap_IP'] ,'nozzle':[]}
Unit_check['Unit_log_address'].append(value)
GET_NOZZLE_CHECK_STORE = [] #สำหรับเก็บค่า Unit_log_address เพื่อป้องกันไม่ให้มีการบันทึกซ้ำ
for nozzle_check in vis_result :
for GET_VIS_DATA_ALL_CHECK in GET_VIS_DATA_ALL:
if GET_VIS_DATA_ALL_CHECK['name_id'] == nozzle_check['name_id']:
log_check = str(GET_VIS_DATA_ALL_CHECK['name_id']) + str(GET_VIS_DATA_ALL_CHECK['Unit_log_address'])
value = {'Unit_log_address': GET_VIS_DATA_ALL_CHECK['Unit_log_address'] ,'nozzle':[]}
for nozzle_loop in nozzle_check['Unit_log_address'] :
if nozzle_loop['Unit_log_address'] == GET_VIS_DATA_ALL_CHECK['Unit_log_address']:
nozzle_loop['nozzle'].append(GET_VIS_DATA_ALL_CHECK)
# print(vis_result)
return (vis_result)
def RequestDataDBByUserRequestByIpAddress(user_type,ip_address_request):
data = []
data_site_name_id = Status.objects.values('name_id', 'site__station_name','site__station_ip','site__station_monitor_device' ,'MWGT_status','VIS_status','NOZZLE_status_check','BATTERY_status_check','VIS_last_time','Unit_log_address').annotate(dcount=Count('Unit_log_address')).filter(site__station_active=True,site__station_ip=ip_address_request).order_by('name_id')
data_status = Status.objects.values().filter(site__station_active=True,site__station_ip=ip_address_request)
nozzle_count = Nozzle.objects.values().filter(site__station_active=True,site__station_ip=ip_address_request)
results = connect_data_to_db.prepare_nozzle(data_site_name_id, data_status,nozzle_count)
return creating_flex_messages.CreateFormDetailByIpAddress(results)
def different_time_calculate(TimeZone,TimeCalculate):
# print(TimeCalculate)
# TimeCalculateDetail = TimeCalculate[1].MWGT_last_time
# print('TimeCalculateDetail',TimeCalculate)
different_time = relativedelta(TimeZone,TimeCalculate) # คำนวณหาผลต่างระหว่างวันที่ Now กับ MWGT_last_time
day_loss = different_time.days # แสดงผลลัพท์เป็นจำนวนวัน จาก different_time
hours_loss = different_time.hours # แสดงผลลัพท์เป็นจำนวน ชั่วโมง จาก different_time
minutes_loss = different_time.minutes # แสดงผลลัพท์เป็นจำนวนวัน นาที different_time
hours_count = TimeZone - TimeCalculate
hours_def = hours_count.total_seconds()
hours_deff = (hours_def/60)/60 # คำนวณผลต่างของเวลามให้แสดงผลในรูปแบบชั่วโมง
# print (hours_deff)
# datetime_now = datetime.datetime.now().strftime("%d-%m-%y %H:%M")
# MWGT_last_time = TimeCalculate.strftime("%d-%m-%y %H:%M") # แปลง datetime
# print('TimeCalculateDetail',TimeCalculate)
# print('different_time',different_time)
# print('day_loss',day_loss)
# print('hours_loss',hours_loss)
# print('minutes_loss',minutes_loss)
# print('datetime_now',datetime_now)
# print('MWGT_last_time',MWGT_last_time)
return day_loss , hours_loss , minutes_loss , hours_deff
def RequestDataDBForMGR():
dt = datetime.datetime.now().strftime("%d-%m-%d %H:%M")
VIS_SUM_OFFLINE = Status.objects.filter(VIS_status='offline',site__station_active=True).values('DataUnitMap_IP').annotate(dcount=Count('DataUnitMap_IP')).count()
MWGT_SUM_OFFLINE = Status.objects.filter(MWGT_status='offline',site__station_active=True).values('DataUnitMap_IP').annotate(dcount=Count('DataUnitMap_IP')).count()
TOTAL_SITE_ACTIVE = Site.objects.filter(station_active=True).values('station_ip').annotate(dcount=Count('station_ip')).count()
NOZZLE_OFFLINE = Status.objects.filter(NOZZLE_status_check='offline',site__station_active=True).count()
BATTERY_OFFLINE = Status.objects.filter(BATTERY_status_check='low', site__station_active=True).count()
return creating_flex_messages.CreateFormAllStatusForMGR(dt,VIS_SUM_OFFLINE,MWGT_SUM_OFFLINE,NOZZLE_OFFLINE,BATTERY_OFFLINE,TOTAL_SITE_ACTIVE)
def RequestAllDataForTechnician(user_type,message):
dt = datetime.datetime.now().strftime("%d-%m-%d %H:%M")
VIS_SUM_OFFLINE = Status.objects.filter(VIS_status='offline',site__station_active=True,site__team_support=user_type.if_technician).values('DataUnitMap_IP').annotate(dcount=Count('DataUnitMap_IP')).count()
MWGT_SUM_OFFLINE = Status.objects.filter(MWGT_status='offline',site__station_active=True,site__team_support=user_type.if_technician).values('DataUnitMap_IP').annotate(dcount=Count('DataUnitMap_IP')).count()
TOTAL_SITE_ACTIVE = Site.objects.filter(station_active=True,team_support=user_type.if_technician).values('station_ip').annotate(dcount=Count('station_ip')).count()
NOZZLE_OFFLINE = Status.objects.filter(NOZZLE_status_check='offline',site__station_active=True,site__team_support=user_type.if_technician).count()
BATTERY_OFFLINE = Status.objects.filter(BATTERY_status_check='low', site__station_active=True,site__team_support=user_type.if_technician).count()
return creating_flex_messages.CreateFormAllStatusForFirstLevel(dt,VIS_SUM_OFFLINE,MWGT_SUM_OFFLINE,NOZZLE_OFFLINE,BATTERY_OFFLINE,TOTAL_SITE_ACTIVE,user_type)
def RequestAllDataForAllUser(user_type,message):
dt = datetime.datetime.now().strftime("%d-%m-%d %H:%M")
VIS_SUM_OFFLINE = Status.objects.filter(VIS_status='offline',site__station_active=True).values('DataUnitMap_IP').annotate(dcount=Count('DataUnitMap_IP')).count()
MWGT_SUM_OFFLINE = Status.objects.filter(MWGT_status='offline',site__station_active=True).values('DataUnitMap_IP').annotate(dcount=Count('DataUnitMap_IP')).count()
TOTAL_SITE_ACTIVE = Site.objects.filter(station_active=True).values('station_ip').annotate(dcount=Count('station_ip')).count()
NOZZLE_OFFLINE = Status.objects.filter(NOZZLE_status_check='offline',site__station_active=True).count()
BATTERY_OFFLINE = Status.objects.filter(BATTERY_status_check='low', site__station_active=True).count()
return creating_flex_messages.CreateFormAllStatusForFirstLevel(dt,VIS_SUM_OFFLINE,MWGT_SUM_OFFLINE,NOZZLE_OFFLINE,BATTERY_OFFLINE,TOTAL_SITE_ACTIVE,user_type)
def RequestDataDBForTechnician(user_type,message):
VIS_SUM_OFFLINE = Status.objects.filter(VIS_status='offline',site__station_active=True,site__team_support=user_type.if_technician).values('DataUnitMap_IP').annotate(dcount=Count('DataUnitMap_IP')).count()
MWGT_SUM_OFFLINE = Status.objects.filter(MWGT_status='offline',site__station_active=True,site__team_support=user_type.if_technician).values('DataUnitMap_IP').annotate(dcount=Count('DataUnitMap_IP')).count()
if message in ('nozzle_status','battery_status') :
TOTAL_SITE_ACTIVE = Nozzle.objects.filter(site__station_active=True,active_nozzle=True,site__team_support=user_type.if_technician).values('id').count()
if message not in ('nozzle_status','battery_status') :
TOTAL_SITE_ACTIVE = Site.objects.filter(station_active=True,team_support=user_type.if_technician).values('station_ip').annotate(dcount=Count('station_ip')).count()
# MWGT_LAST_OFFLINE = Status.objects.filter(MWGT_status='offline',site__station_active=True).latest('Timestramp')
NOZZLE_OFFLINE = Status.objects.filter(NOZZLE_status_check='offline',site__station_active=True,site__team_support=user_type.if_technician).count()
# NOZZLE_LAST_OFFLINE = Status.objects.filter(NOZZLE_status_check='offline',site__station_active=True).latest('Timestramp')
BATTERY_OFFLINE = Status.objects.filter(BATTERY_status_check='low', site__station_active=True,site__team_support=user_type.if_technician).count()
# BATTERY_LAST_OFFLINE = Status.objects.filter(BATTERY_status_check='low', site__station_active=True).latest('Timestramp')
GET_VIS_DATA = Status.objects.select_related('site').filter(VIS_status='offline',site__station_active=True,site__team_support=user_type.if_technician)
GET_MWGT_DATA = Status.objects.select_related('site').filter(MWGT_status='offline', site__station_active=True,site__team_support=user_type.if_technician)
GET_NOZZLE_DATA = Status.objects.select_related('site').filter(NOZZLE_status_check='offline', site__station_active=True,site__team_support=user_type.if_technician)
GET_BATTERY_DATA = Status.objects.select_related('site').filter(BATTERY_status_check='low',site__station_active=True,site__team_support=user_type.if_technician)
STATUS_CONFIG = Setup_Config.objects.values()
for setup_config in STATUS_CONFIG :
time_alert_alarm_hours = setup_config['time_alert_alarm_hours']
time_alert_warning_hours = setup_config['time_alert_warning_hours']
battery_level_alarm_volt = setup_config['battery_level_alarm_volt']
battery_level_low_volt = setup_config['battery_level_low_volt']
battery_level_failed_volt = setup_config['battery_level_failed_volt']
data_store = []
vis_check = []
mwgt_check = []
vis_result = []
mwgt_result = []
nozzle_result = []
battery_result = []
for data in GET_VIS_DATA:
if data.DataUnitMap_IP not in vis_check:
vis_check.append(data.DataUnitMap_IP)
# vis_check2.append(data)
time_def_check = connect_data_to_db.different_time_calculate(timezone.now(),data.VIS_last_time)
vis_result.append({'name':data.site,'ip_address':data.site.station_ip,'type':'VIS',
'NOZZLE_Last_conn':data.NOZZLE_Last_conn,'time_dif':{'day':time_def_check[0],'hour':time_def_check[1],'minutes':time_def_check[2],'hours_deff':time_def_check[3]},
'NOZZLE_Battery_Status':data.NOZZLE_Battery_Status_Volts ,
'TEAM_ID':data.site.team_support.team ,
'TEAM_NAME': data.site.team_support.team_name , 'VIS_last_time':data.VIS_last_time,
'TIME_UPDATE':timezone.now()})
for data in GET_MWGT_DATA:
if data.DataUnitMap_IP not in mwgt_check:
mwgt_check.append(data.DataUnitMap_IP)
# vis_check2.append(data)
time_def_check = connect_data_to_db.different_time_calculate(timezone.now(),data.MWGT_last_time)
mwgt_result.append({'name':data.site,'ip_address':data.site.station_ip,'type':'MWGT',
'NOZZLE_Last_conn':data.NOZZLE_Last_conn,'time_dif':{'day':time_def_check[0],'hour':time_def_check[1],'minutes':time_def_check[2],'hours_deff':time_def_check[3]},
'NOZZLE_Battery_Status':data.NOZZLE_Battery_Status_Volts ,'MWGT_last_time':data.MWGT_last_time,
'TEAM_ID':data.site.team_support.team ,
'TEAM_NAME': data.site.team_support.team_name , 'DataUnitMap_IP':data.DataUnitMap_IP,
'MWGT_last_time':data.MWGT_last_time,'TIME_UPDATE':timezone.now()})
# print('mwgt_result',mwgt_result)
for data in GET_NOZZLE_DATA:
time_def_check = connect_data_to_db.different_time_calculate(timezone.now(),data.MWGT_last_time)
# print('time_def_check',time_def_check)
# print('time',data.MWGT_last_time)
nozzle_result.append({'name':data.site,'ip_address':data.site.station_ip,'type':'NOZZLE',
'NOZZLE_Last_conn':data.NOZZLE_Last_conn,'time_dif':{'day':time_def_check[0],'hour':time_def_check[1],'minutes':time_def_check[2],'hours_deff':time_def_check[3]},
'NOZZLE_Battery_Status':data.NOZZLE_Battery_Status_Volts ,
'TEAM_ID':data.site.team_support.team ,'VIS_last_time':data.VIS_last_time,'Unit_log_address':data.Unit_log_address,
'TEAM_NAME': data.site.team_support.team_name , 'NOZZLE_pump_log_address':data.NOZZLE_pump_log_address , 'NOZZLE_num':data.NOZZLE_num , 'TIME_UPDATE':timezone.now()})
# print('mwgt_result',nozzle_result)
for data in GET_BATTERY_DATA:
time_def_check = connect_data_to_db.different_time_calculate(timezone.now(),data.MWGT_last_time)
battery_result.append({'name':data.site,'ip_address':data.site.station_ip,'type':'BATT',
'NOZZLE_Last_conn':data.NOZZLE_Last_conn,'time_dif':{'day':time_def_check[0],'hour':time_def_check[1],'minutes':time_def_check[2],'hours_deff':time_def_check[3]},
'NOZZLE_Battery_Status':data.NOZZLE_Battery_Status_Volts ,
'TEAM_ID':data.site.team_support.team ,'BATTERY_status_check':data.BATTERY_status_check,'NOZZLE_SN':data.NOZZLE_SN,
'NOZZLE_Battery_Status_Volts':data.NOZZLE_Battery_Status_Volts,'TEAM_NAME': data.site.team_support.team_name , 'NOZZLE_pump_log_address':data.NOZZLE_pump_log_address , 'NOZZLE_num':data.NOZZLE_num , 'TIME_UPDATE':timezone.now()})
# print('mwgt_result',battery_result)
data = {'user_type':user_type,'TIME_UPDATE':timezone.now(),'VIS_SUM_OFFLINE':VIS_SUM_OFFLINE,'MWGT_SUM_OFFLINE':MWGT_SUM_OFFLINE,
'TOTAL_SITE_ACTIVE':TOTAL_SITE_ACTIVE,'NOZZLE_OFFLINE':NOZZLE_OFFLINE,
'BATTERY_OFFLINE':BATTERY_OFFLINE,
'VIS_DETAIL':vis_result ,'MWTG_DETAIL':mwgt_result ,'NOZZLE_DETAIL':nozzle_result ,'BATTERY_DETAIL':battery_result,
'time_alert_alarm_hours':time_alert_alarm_hours,'time_alert_warning_hours':time_alert_warning_hours,'battery_level_alarm_volt':battery_level_alarm_volt,
'battery_level_low_volt':battery_level_low_volt,'battery_level_failed_volt':battery_level_failed_volt}
if message == 'vis_status' :
return creating_flex_messages.CreateFormVisFlexMessageDetail(data,user_type)
elif message == 'mwgt_status' :
return creating_flex_messages.CreateFormMwgtFlexMessageDetail(data,user_type)
elif message == 'nozzle_status':
return creating_flex_messages.CreateFormNozzleFlexMessageDetail(data,user_type)
elif message == 'battery_status':
return creating_flex_messages.CreateFormBatteryFlexMessageDetail(data,user_type)
def RequestLastVisStatusRecord(name_id): # สำหรับเช็ค status vis ล่าสุดเพื่อตอบกลับไปให้เครื่อง VIS ดำเนินการต่อ
# for vis_check in (payload): # Loop each nozzle for update into database
name_id = name_id['events'][0]['name_id']
try :
vis_last_status = Status.objects.filter(name_id=name_id).values('VIS_status').distinct().first()
if vis_last_status != None :
return vis_last_status['VIS_status'] # หากค้นหาข้อมูลเจอ หรือเคยมีการบันทึกไว้ก่อนหน้า
else :
vis_last_status = 'not_found'
return vis_last_status # หากค้นหาไม่เจอ หรือ สถานีใหม่ ที่ยังไม่เคยรับข้อมูลเข้า
# for i in vis_last_status :
# print (i)
# return vis_last_status['VIS_status'] # สำหรับเช็ค status vis ล่าสุดเพื่อตอบกลับไปให้เครื่อง VIS ดำเนินการต่อ
except Status.DoesNotExist:
print ('Cannot sent battery back to Decive')
def RequestDataDBForAllUser(user_type,message):
VIS_SUM_OFFLINE = Status.objects.filter(VIS_status='offline',site__station_active=True).values('DataUnitMap_IP').annotate(dcount=Count('DataUnitMap_IP')).count()
MWGT_SUM_OFFLINE = Status.objects.filter(MWGT_status='offline',site__station_active=True).values('DataUnitMap_IP').annotate(dcount=Count('DataUnitMap_IP')).count()
if message in ('nozzle_status','battery_status') :
TOTAL_SITE_ACTIVE = Nozzle.objects.filter(site__station_active=True,active_nozzle=True,).values('id').count()
if message not in ('nozzle_status','battery_status') :
TOTAL_SITE_ACTIVE = Site.objects.filter(station_active=True).values('station_ip').annotate(dcount=Count('station_ip')).count()
# MWGT_LAST_OFFLINE = Status.objects.filter(MWGT_status='offline',site__station_active=True).latest('Timestramp')
# MWGT_LAST_OFFLINE = Status.objects.filter(MWGT_status='offline',site__station_active=True).latest('Timestramp')
NOZZLE_OFFLINE = Status.objects.filter(NOZZLE_status_check='offline',site__station_active=True).count()
# NOZZLE_LAST_OFFLINE = Status.objects.filter(NOZZLE_status_check='offline',site__station_active=True).latest('Timestramp')
BATTERY_OFFLINE = Status.objects.filter(BATTERY_status_check='low', site__station_active=True).count()
# BATTERY_LAST_OFFLINE = Status.objects.filter(BATTERY_status_check='low', site__station_active=True).latest('Timestramp')
GET_VIS_DATA = Status.objects.select_related('site').filter(VIS_status='offline',site__station_active=True)
GET_MWGT_DATA = Status.objects.select_related('site').filter(MWGT_status='offline', site__station_active=True)
GET_NOZZLE_DATA = Status.objects.select_related('site').filter(NOZZLE_status_check='offline', site__station_active=True)
GET_BATTERY_DATA = Status.objects.select_related('site').filter(BATTERY_status_check='low',site__station_active=True)
STATUS_CONFIG = Setup_Config.objects.values()
for setup_config in STATUS_CONFIG :
time_alert_alarm_hours = setup_config['time_alert_alarm_hours']
time_alert_warning_hours = setup_config['time_alert_warning_hours']
battery_level_alarm_volt = setup_config['battery_level_alarm_volt']
battery_level_low_volt = setup_config['battery_level_low_volt']
battery_level_failed_volt = setup_config['battery_level_failed_volt']
data_store = []
vis_check = []
mwgt_check = []
vis_result = []
mwgt_result = []
nozzle_result = []
battery_result = []
for data in GET_VIS_DATA:
if data.DataUnitMap_IP not in vis_check:
vis_check.append(data.DataUnitMap_IP)
# vis_check2.append(data)
time_def_check = connect_data_to_db.different_time_calculate(timezone.now(),data.VIS_last_time)
vis_result.append({'name':data.site,'ip_address':data.site.station_ip,'type':'VIS',
'NOZZLE_Last_conn':data.NOZZLE_Last_conn,'time_dif':{'day':time_def_check[0],'hour':time_def_check[1],'minutes':time_def_check[2],'hours_deff':time_def_check[3]},
'NOZZLE_Battery_Status':data.NOZZLE_Battery_Status_Volts ,
'TEAM_ID':data.site.team_support.team ,
'TEAM_NAME': data.site.team_support.team_name , 'VIS_last_time':data.VIS_last_time,
'TIME_UPDATE':timezone.now()})
for data in GET_MWGT_DATA:
if data.DataUnitMap_IP not in mwgt_check:
mwgt_check.append(data.DataUnitMap_IP)
# vis_check2.append(data)
time_def_check = connect_data_to_db.different_time_calculate(timezone.now(),data.MWGT_last_time)
mwgt_result.append({'name':data.site,'ip_address':data.site.station_ip,'type':'MWGT',
'NOZZLE_Last_conn':data.NOZZLE_Last_conn,'time_dif':{'day':time_def_check[0],'hour':time_def_check[1],'minutes':time_def_check[2],'hours_deff':time_def_check[3]},
'NOZZLE_Battery_Status':data.NOZZLE_Battery_Status_Volts ,
'TEAM_ID':data.site.team_support.team ,'DataUnitMap_IP':data.DataUnitMap_IP,'MWGT_last_time':data.MWGT_last_time,
'TEAM_NAME': data.site.team_support.team_name , 'TIME_UPDATE':timezone.now()})
# print('mwgt_result',mwgt_result)
for data in GET_NOZZLE_DATA:
time_def_check = connect_data_to_db.different_time_calculate(timezone.now(),data.MWGT_last_time)
# print('time_def_check',time_def_check)
# print('time',data.MWGT_last_time)
nozzle_result.append({'name':data.site,'ip_address':data.site.station_ip,'type':'NOZZLE',
'NOZZLE_Last_conn':data.NOZZLE_Last_conn,'time_dif':{'day':time_def_check[0],'hour':time_def_check[1],'minutes':time_def_check[2],'hours_deff':time_def_check[3]},
'NOZZLE_Battery_Status':data.NOZZLE_Battery_Status_Volts ,
'TEAM_ID':data.site.team_support.team ,'VIS_last_time':data.VIS_last_time,'Unit_log_address':data.Unit_log_address,
'TEAM_NAME': data.site.team_support.team_name , 'NOZZLE_pump_log_address':data.NOZZLE_pump_log_address , 'NOZZLE_num':data.NOZZLE_num , 'TIME_UPDATE':timezone.now()})
# print('mwgt_result',nozzle_result)
for data in GET_BATTERY_DATA:
time_def_check = connect_data_to_db.different_time_calculate(timezone.now(),data.MWGT_last_time)
battery_result.append({'name':data.site,'ip_address':data.site.station_ip,'type':'BATT',
'NOZZLE_Last_conn':data.NOZZLE_Last_conn,'time_dif':{'day':time_def_check[0],'hour':time_def_check[1],'minutes':time_def_check[2],'hours_deff':time_def_check[3]},
'NOZZLE_Battery_Status':data.NOZZLE_Battery_Status_Volts ,'NOZZLE_SN':data.NOZZLE_SN,
'TEAM_ID':data.site.team_support.team , 'BATTERY_status_check':data.BATTERY_status_check,'NOZZLE_Battery_Status_Volts':data.NOZZLE_Battery_Status_Volts,
'TEAM_NAME': data.site.team_support.team_name , 'NOZZLE_pump_log_address':data.NOZZLE_pump_log_address , 'NOZZLE_num':data.NOZZLE_num , 'TIME_UPDATE':timezone.now()})
# print('mwgt_result',battery_result)
data = {'user_type':user_type,'TIME_UPDATE':timezone.now(),'VIS_SUM_OFFLINE':VIS_SUM_OFFLINE,'MWGT_SUM_OFFLINE':MWGT_SUM_OFFLINE,
'TOTAL_SITE_ACTIVE':TOTAL_SITE_ACTIVE,'NOZZLE_OFFLINE':NOZZLE_OFFLINE,
'BATTERY_OFFLINE':BATTERY_OFFLINE,
'VIS_DETAIL':vis_result ,'MWTG_DETAIL':mwgt_result ,'NOZZLE_DETAIL':nozzle_result ,'BATTERY_DETAIL':battery_result,
'time_alert_alarm_hours':time_alert_alarm_hours,'time_alert_warning_hours':time_alert_warning_hours,'battery_level_alarm_volt':battery_level_alarm_volt,
'battery_level_low_volt':battery_level_low_volt,'battery_level_failed_volt':battery_level_failed_volt}
if message == 'vis_status' :
return creating_flex_messages.CreateFormVisFlexMessageDetail(data,user_type)
elif message == 'mwgt_status' :
return creating_flex_messages.CreateFormMwgtFlexMessageDetail(data,user_type)
elif message == 'nozzle_status':
return creating_flex_messages.CreateFormNozzleFlexMessageDetail(data,user_type)
elif message == 'battery_status':
return creating_flex_messages.CreateFormBatteryFlexMessageDetail(data,user_type)
|
StarcoderdataPython
|
3237221
|
<filename>cert_issuer/revoker.py<gh_stars>1-10
"""
Base class for building blockchain transactions to issue Blockchain Certificates.
"""
import logging
import json
from pycoin.serialize import h2b
from cert_issuer.errors import BroadcastError
MAX_TX_RETRIES = 5
def ensure_string(value):
if isinstance(value, str):
return value
return value.decode('utf-8')
def get_revocation_hashes(app_config):
revocation_list_file = app_config.revocation_list_file
with open(revocation_list_file, "r") as f:
data = f.read()
revocations = json.loads(data)
hashes = revocations["hashes_to_be_revoked"]
return hashes
def remove_from_revocations_list(app_config, hash):
revocation_list_file = app_config.revocation_list_file
with open(revocation_list_file, "r") as f:
data = f.read()
revocations = json.loads(data)
revocations["hashes_to_be_revoked"].remove(hash)
with open(revocation_list_file, "w+") as f:
data = json.dump(revocations, f, indent=4)
class Revoker:
def __init__(self, transaction_handler, max_retry=MAX_TX_RETRIES):
self.transaction_handler = transaction_handler
self.max_retry = max_retry
def revoke(self, app_config):
"""
Revoke certificates or batches on the blockchain listed in revocation_list_file.
Multiple transactions will be executed.
:return:
"""
hashes = get_revocation_hashes(app_config)
tx_ids = []
if hashes == []:
logging.info('No hashes to revoke. Check your revocation_list_file if you meant to revoke hashes.')
return None
else:
logging.info('Revoking the following hashes: %s', hashes)
while len(hashes) > 0:
hash = hashes.pop()
# ensure balance before every transaction
self.transaction_handler.ensure_balance()
# transform to hex
blockchain_bytes = h2b(ensure_string(hash))
try:
txid = self.transaction_handler.revoke_transaction(blockchain_bytes, app_config)
logging.info('Broadcast revocation of hash %s in tx with txid %s', hash, txid)
tx_ids.append(txid)
remove_from_revocations_list(app_config, hash)
except BroadcastError:
logging.warning('Failed broadcast of transaction.')
return tx_ids
|
StarcoderdataPython
|
6642130
|
# coding:utf-8
import os
import unittest
import subprocess
import json
import socket
import time
from six.moves import queue
from captain_comeback.restart.engine import restart
from captain_comeback.restart.adapter import (docker, docker_wipe_fs, null)
from captain_comeback.cgroup import Cgroup
from captain_comeback.restart.messages import RestartCompleteMessage
from captain_comeback.activity.messages import (RestartCgroupMessage,
RestartTimeoutMessage)
from captain_comeback.test.queue_assertion_helper import (
QueueAssertionHelper)
CG_DOCKER_ROOT_DIR = "/sys/fs/cgroup/memory/docker/"
EXITS_WITH_TERM_1 = ["krallin/ubuntu-tini", "sleep", "100"]
EXITS_WITH_TERM_ALL = ["ubuntu", "sh", "-c", "sleep 100"]
EXITS_IF_FILE = [
"ubuntu", "sh", "-c",
"if test -f foo; then exit 1; else touch foo && sleep 100; fi"
]
NEVER_EXITS = ["ubuntu", "sleep", "100"] # No default sighanders as PID 1
def docker_json(cg):
j = subprocess.check_output(["docker", "inspect", cg.name()])
j = j.decode("utf-8")
return json.loads(j)[0]
def random_free_port():
sock = socket.socket()
sock.bind(('', 0))
port = sock.getsockname()[1]
sock.close()
return port
class RestartTestIntegration(unittest.TestCase, QueueAssertionHelper):
def _launch_container(self, options):
cmd = ["docker", "run", "-d"] + options
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = p.communicate()
if p.returncode:
m = "{0} failed with status {1}:\n{2}\n{3}".format(cmd,
p.returncode,
out, err)
self.fail(m)
cid = out.decode("utf-8").strip()
self._cids.append(cid)
return Cgroup("/".join([CG_DOCKER_ROOT_DIR, cid]))
def _wait_pids(self, cgroup, count):
for _ in range(100):
time.sleep(0.05)
if len(cgroup.pids()) >= count:
return
self.fail("{0} never had {1} pids", cgroup.name(), count)
def setUp(self):
self._cids = []
def tearDown(self):
for cid in self._cids:
subprocess.check_output(["docker", "rm", "-f", cid])
def test_notifies_queues(self):
cg = self._launch_container(EXITS_WITH_TERM_1)
self._wait_pids(cg, 2)
job_q = queue.Queue()
activity_q = queue.Queue()
restart(docker, 10, cg, job_q, activity_q)
self.assertHasMessageForCg(job_q, RestartCompleteMessage, cg.path)
self.assertHasMessageForCg(activity_q, RestartCgroupMessage, cg.path)
self.assertHasNoMessages(activity_q)
def test_notifies_queues_timeout(self):
cg = self._launch_container(NEVER_EXITS)
self._wait_pids(cg, 1)
job_q = queue.Queue()
activity_q = queue.Queue()
restart(docker, 3, cg, job_q, activity_q)
self.assertHasMessageForCg(job_q, RestartCompleteMessage, cg.path)
self.assertHasMessageForCg(activity_q, RestartCgroupMessage, cg.path)
self.assertHasMessageForCg(activity_q, RestartTimeoutMessage, cg.path,
grace_period=3)
def test_restart_container_with_term_1(self):
cg = self._launch_container(EXITS_WITH_TERM_1)
self._wait_pids(cg, 2)
pid_before = docker_json(cg)["State"]["Pid"]
time_before = time.time()
q = queue.Queue()
restart(docker, 10, cg, q, q)
time_after = time.time()
pid_after = docker_json(cg)["State"]["Pid"]
self.assertNotEqual(pid_before, pid_after)
self.assertLess(time_after - time_before, 5)
def test_restart_container_with_term_all(self):
cg = self._launch_container(EXITS_WITH_TERM_ALL)
self._wait_pids(cg, 2)
pid_before = docker_json(cg)["State"]["Pid"]
time_before = time.time()
q = queue.Queue()
restart(docker, 10, cg, q, q)
time_after = time.time()
pid_after = docker_json(cg)["State"]["Pid"]
self.assertNotEqual(pid_before, pid_after)
self.assertLess(time_after - time_before, 5)
def test_restarts_misbehaved_container(self):
cg = self._launch_container(NEVER_EXITS)
self._wait_pids(cg, 1)
pid_before = docker_json(cg)["State"]["Pid"]
time_before = time.time()
q = queue.Queue()
restart(docker, 3, cg, q, q)
time_after = time.time()
pid_after = docker_json(cg)["State"]["Pid"]
self.assertNotEqual(pid_before, pid_after)
self.assertGreater(time_after - time_before, 2)
def test_restarts_with_ports(self):
host_port = random_free_port()
options = ["-p", "{0}:80".format(host_port)] + EXITS_WITH_TERM_1
cg = self._launch_container(options)
q = queue.Queue()
restart(docker, 10, cg, q, q)
binding = docker_json(cg)["NetworkSettings"]["Ports"]["80/tcp"][0]
port = int(binding["HostPort"])
self.assertEqual(host_port, port)
def test_restart_does_not_wipe_fs(self):
q = queue.Queue()
cg = self._launch_container(EXITS_IF_FILE)
time.sleep(2)
restart(docker, 1, cg, q, q)
time.sleep(2)
self.assertFalse(docker_json(cg)["State"]["Running"])
def test_restart_kills_processes(self):
q = queue.Queue()
cg = self._launch_container(NEVER_EXITS)
time.sleep(2)
restart(null, 1, cg, q, q)
time.sleep(2)
self.assertFalse(docker_json(cg)["State"]["Running"])
@unittest.skipUnless(os.geteuid() == 0, "requires root")
def test_restart_wipes_fs(self):
q = queue.Queue()
cg = self._launch_container(EXITS_IF_FILE)
time.sleep(2)
restart(docker_wipe_fs, 1, cg, q, q)
time.sleep(2)
self.assertTrue(docker_json(cg)["State"]["Running"])
@unittest.skipUnless(os.geteuid() == 0, "requires root")
def test_restart_with_memory_limit(self):
options = ["--memory", "10mb"] + EXITS_WITH_TERM_1
cg = self._launch_container(options)
q = queue.Queue()
restart(docker, 10, cg, q, q)
|
StarcoderdataPython
|
3201236
|
from .entity import Entity
from .entity_sets.log_record_set import LogRecordSet
from .entity_providers.model_providers.log_record_provider import LogRecordProvider
from .entity_fields import EntityField, RelatedEntityField, ReadOnlyField, ManagedEntityField, CurrentTimeManager
from .entity_exceptions import EntityOperationNotPermitted
class LogRecord(Entity):
"""
Defines record that can be attached to every log. Any corefacility module can add one or many records
for a single log
"""
_entity_set_class = LogRecordSet
_entity_provider_list = [
LogRecordProvider()
]
_required_fields = ["log", "record_time", "level", "message"]
_public_field_description = {
"log": RelatedEntityField("core.entity.log.Log", description="Log to this entity relates to"),
"record_time": ManagedEntityField(CurrentTimeManager, description="Log record time"),
"level": EntityField(str, min_length=3, max_length=3, description="Log level identifier"),
"message": EntityField(str, min_length=1, max_length=1024, description="Entity message"),
}
def update(self):
"""
Throws an exception because log records can't be fraud
:return:
"""
raise EntityOperationNotPermitted()
def delete(self):
"""
Throws an exception because log records can't be removed
:return:
"""
raise EntityOperationNotPermitted()
|
StarcoderdataPython
|
11254988
|
#!/usr/bin/env python
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
"""
class for handling configuration data files
Reads a .conf file and obtains its metadata
"""
# Copyright (C) 2003, 2004 <NAME>
# Copyright (C) 2003, 2004 <NAME>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import re, bb.data, os, sys
from bb.parse import ParseError, resolve_file, ast
#__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<lazyques>\?\?=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
__include_regexp__ = re.compile( r"include\s+(.+)" )
__require_regexp__ = re.compile( r"require\s+(.+)" )
__export_regexp__ = re.compile( r"export\s+(.+)" )
def init(data):
topdir = bb.data.getVar('TOPDIR', data)
if not topdir:
bb.data.setVar('TOPDIR', os.getcwd(), data)
def supports(fn, d):
return fn[-5:] == ".conf"
def include(oldfn, fn, data, error_out):
"""
error_out If True a ParseError will be reaised if the to be included
"""
if oldfn == fn: # prevent infinate recursion
return None
import bb
fn = bb.data.expand(fn, data)
oldfn = bb.data.expand(oldfn, data)
if not os.path.isabs(fn):
dname = os.path.dirname(oldfn)
bbpath = "%s:%s" % (dname, bb.data.getVar("BBPATH", data, 1))
abs_fn = bb.which(bbpath, fn)
if abs_fn:
fn = abs_fn
from bb.parse import handle
try:
ret = handle(fn, data, True)
except IOError:
if error_out:
raise ParseError("Could not %(error_out)s file %(fn)s" % vars() )
bb.msg.debug(2, bb.msg.domain.Parsing, "CONF file '%s' not found" % fn)
def handle(fn, data, include):
init(data)
if include == 0:
oldfile = None
else:
oldfile = bb.data.getVar('FILE', data)
abs_fn = resolve_file(fn, data)
f = open(abs_fn, 'r')
if include:
bb.parse.mark_dependency(data, abs_fn)
statements = ast.StatementGroup()
lineno = 0
while 1:
lineno = lineno + 1
s = f.readline()
if not s: break
w = s.strip()
if not w: continue # skip empty lines
s = s.rstrip()
if s[0] == '#': continue # skip comments
while s[-1] == '\\':
s2 = f.readline()[:-1].strip()
lineno = lineno + 1
s = s[:-1] + s2
feeder(lineno, s, fn, statements)
# DONE WITH PARSING... time to evaluate
bb.data.setVar('FILE', fn, data)
statements.eval(data)
if oldfile:
bb.data.setVar('FILE', oldfile, data)
return data
def feeder(lineno, s, fn, statements):
m = __config_regexp__.match(s)
if m:
groupd = m.groupdict()
ast.handleData(statements, groupd)
return
m = __include_regexp__.match(s)
if m:
ast.handleInclude(statements, m, fn, lineno, False)
return
m = __require_regexp__.match(s)
if m:
ast.handleInclude(statements, m, fn, lineno, True)
return
m = __export_regexp__.match(s)
if m:
ast.handleExport(statements, m)
return
raise ParseError("%s:%d: unparsed line: '%s'" % (fn, lineno, s));
# Add us to the handlers list
from bb.parse import handlers
handlers.append({'supports': supports, 'handle': handle, 'init': init})
del handlers
|
StarcoderdataPython
|
86334
|
from ray.rllib.algorithms.apex_ddpg import ( # noqa
ApexDDPG as ApexDDPGTrainer,
APEX_DDPG_DEFAULT_CONFIG,
)
|
StarcoderdataPython
|
6514231
|
<filename>addons/io_scene_gltf2/io/com/gltf2_io_color_management.py
# Copyright 2019 The glTF-Blender-IO authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
def color_srgb_to_scene_linear(c):
"""
Convert from sRGB to scene linear color space.
Source: Cycles addon implementation, node_color.h.
"""
if c < 0.04045:
return 0.0 if c < 0.0 else c * (1.0 / 12.92)
else:
return pow((c + 0.055) * (1.0 / 1.055), 2.4)
def color_linear_to_srgb(c):
"""
Convert from linear to sRGB color space.
Source: Cycles addon implementation, node_color.h.
c may be a single color value or an array.
If c's last dimension is 4, it's assumed to be RGBA and the
alpha channel is not converted.
"""
if type(c) in (list, np.ndarray):
colors = np.array(c, np.float32) if type(c) == list else c
if colors.ndim > 1 and colors.shape[-1] == 4:
colors_noa = colors[..., 0:3] # only process RGB for speed
else:
colors_noa = colors
not_small = colors_noa >= 0.0031308
small_result = np.where(colors_noa < 0.0, 0.0, colors_noa * 12.92)
large_result = 1.055 * np.power(colors_noa, 1.0 / 2.4, where=not_small) - 0.055
result = np.where(not_small, large_result, small_result)
if colors.ndim > 1 and colors.shape[-1] == 4:
# copy alpha from original
result = np.concatenate((result, colors[..., 3, np.newaxis]), axis=-1)
return result
else:
if c < 0.0031308:
return 0.0 if c < 0.0 else c * 12.92
else:
return 1.055 * pow(c, 1.0 / 2.4) - 0.055
|
StarcoderdataPython
|
9613559
|
<gh_stars>0
#
# Copyright 2022- IBM Inc. All rights reserved
# SPDX-License-Identifier: Apache2.0
#
# Taken from https://github.com/icoz69/CEC-CVPR2021/blob/main/models/resnet20_cifar.py
import torch.nn as nn
import math
import torch.utils.model_zoo as model_zoo
import numpy as np
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None, last=False):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
self.last = last
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class ResNet20(nn.Module):
def __init__(self, block=BasicBlock, layers=[3,3,3], num_classes=512):
self.inplanes = 16
super(ResNet20, self).__init__()
self.conv_embedding = nn.Sequential(
nn.Conv2d(3, 16, kernel_size=3, stride=1, padding=1,
bias=False),
nn.BatchNorm2d(16),
nn.ReLU(inplace=True),
self._make_layer(block, 16, layers[0]),
self._make_layer(block, 32, layers[1], stride=2),
self._make_layer(block, 64, layers[2], stride=2, last_phase=True),
nn.AdaptiveAvgPool2d(1),
nn.Flatten()
)
self.n_interm_feat = 64
self.fc = nn.Linear(self.n_interm_feat,num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.xavier_uniform(m.weight)
m.bias.data.zero_()
def _make_layer(self, block, planes, blocks, stride=1, last_phase=False):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
if last_phase:
for i in range(1, blocks-1):
layers.append(block(self.inplanes, planes))
layers.append(block(self.inplanes, planes, last=True))
else:
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv_embedding(x)
x = self.fc(x)
return x
def forward_conv(self,x):
x = self.conv_embedding(x)
return x
|
StarcoderdataPython
|
11379305
|
<filename>generate_readme_rst.py
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
f = open('README.txt','w+')
f.write(long_description)
f.close()
|
StarcoderdataPython
|
8120676
|
from lintreview.review import Problems
from lintreview.review import Comment
from lintreview.tools.flake8 import Flake8
from unittest import TestCase
from nose.tools import eq_
class TestFlake8(TestCase):
fixtures = [
'tests/fixtures/pep8/no_errors.py',
'tests/fixtures/pep8/has_errors.py',
]
def setUp(self):
self.problems = Problems()
self.tool = Flake8(self.problems)
def test_match_file(self):
self.assertFalse(self.tool.match_file('test.php'))
self.assertFalse(self.tool.match_file('test.js'))
self.assertFalse(self.tool.match_file('dir/name/test.js'))
self.assertTrue(self.tool.match_file('test.py'))
self.assertTrue(self.tool.match_file('dir/name/test.py'))
def test_process_files__one_file_pass(self):
self.tool.process_files([self.fixtures[0]])
eq_([], self.problems.all(self.fixtures[0]))
def test_process_files__one_file_fail(self):
self.tool.process_files([self.fixtures[1]])
problems = self.problems.all(self.fixtures[1])
eq_(8, len(problems))
fname = self.fixtures[1]
expected = Comment(fname, 2, 2, "W402 're' imported but unused")
eq_(expected, problems[0])
expected = Comment(fname, 11, 11, "W603 '<>' is deprecated, use '!='")
eq_(expected, problems[7])
def test_process_files_two_files(self):
self.tool.process_files(self.fixtures)
eq_([], self.problems.all(self.fixtures[0]))
problems = self.problems.all(self.fixtures[1])
eq_(8, len(problems))
fname = self.fixtures[1]
expected = Comment(fname, 2, 2, "W402 're' imported but unused")
eq_(expected, problems[0])
expected = Comment(fname, 11, 11, "W603 '<>' is deprecated, use '!='")
eq_(expected, problems[7])
def test_config_options_and_process_file(self):
options = {
'ignore': 'E2,W603'
}
self.tool = Flake8(self.problems, options)
self.tool.process_files([self.fixtures[1]])
problems = self.problems.all(self.fixtures[1])
eq_(6, len(problems))
for p in problems:
self.assertFalse('E2' in p)
self.assertFalse('W603' in p)
|
StarcoderdataPython
|
395393
|
<gh_stars>0
# -*- coding:uft-8 -*-
from os import path
from netCDF4 import Dataset, num2date
from scipy.io import loadmat
from yaml import full_load
from RBR.ctd import convert2nc as conv2nc_rbr
from RDI.util import gen_time
from util import detect_brand
def ctd_ref_data(adcp_path, time_offset, adcp_hgt):
ext = adcp_path.split('.')[-1]
if ext.upper() == 'NC':
data = Dataset(adcp_path)
ref_dep = data['dep'][:] + adcp_hgt
ref_time = data['time']
return ref_dep, num2date(ref_time[:], ref_time.units, ref_time.calendar)
elif ext.upper() == 'MAT':
data = loadmat(adcp_path)
ref_dep = data['AnDepthmm'] / 1000
ref_time = gen_time(data, time_offset)
return ref_dep, ref_time
def ctd2nc(prefix, ctd_paths, rslt_path, time_units, calendar, ctd_info_all,
adcp_info_all, sta_info_all, zlib, complevel, author, email):
this_file_dir = path.split(path.abspath(__file__))[0]
conf = full_load(open('/'.join([this_file_dir, 'config.yml'])))
valid_brands = [x.upper() for x in conf['ValidBrands']['ctd']]
for key, val in ctd_paths.items():
ctd_path = ''.join([prefix, val])
save_path = ''.join([prefix, rslt_path[key]])
sta_info = sta_info_all[key]
bin_size = ctd_info_all['bin_size']
ref_data_path = ctd_info_all['ref_data'][key]
if 'time_offset' in ctd_info_all:
time_offset = ctd_info_all['time_offset']
else:
time_offset = None
adcp_time_offset = adcp_info_all['time_offset']
adcp_hgt = adcp_info_all['adcp_hgt']
ref_dep, ref_time = ctd_ref_data(''.join([prefix, ref_data_path]),
adcp_time_offset, adcp_hgt)
brands = detect_brand(sta_info['observe_instrument'], 'CTD')
for brand in brands:
tmp_brand = brand.upper()
if tmp_brand not in valid_brands:
raise NotImplementedError(
'Invalid brand. You can update the config file and add some'
' method to process the related dataset and export them.')
elif tmp_brand == 'RBR':
conv2nc_rbr(ctd_path, save_path, time_units, calendar, sta_info,
bin_size, ref_dep, ref_time, time_offset, zlib,
complevel, author, email)
else:
raise NotImplementedError('Unrealized functions.')
return True, None
|
StarcoderdataPython
|
1695484
|
<reponame>p2o-lab/planteye<filename>src/planteye_vision/shell/rest_api_shell.py
from flask import Flask, request, jsonify
import logging
import threading
from planteye_vision.shell.shell import Shell
from planteye_vision.configuration.shell_configuration import RestAPIShellConfiguration
from planteye_vision.configuration.planteye_configuration import PlantEyeConfiguration
class RestAPIShell(Shell):
"""
This class describes an REST API shell
"""
def __init__(self, config: RestAPIShellConfiguration):
self.config = config
self.webserver = None
self.webserver_thread = None
self.response_callback = None
self.planteye_config = None
def apply_configuration(self):
host = self.config.parameters['host']
port = self.config.parameters['port']
self.webserver = RestAPIWebserver('PlantEye', host, port)
endpoint = self.config.parameters['endpoint']
endpoint_name = 'PlantEye REST API Shell'
self.webserver.add_url_rule(endpoint, endpoint_name, self.response_callback, ['GET'])
self.webserver.add_url_rule('/upload_config', 'configuration update', self.upload_configuration_callback, ['POST'])
self.webserver.add_url_rule('/get_config', 'configuration', self.download_configuration_callback, ['GET'])
self.webserver.add_url_rule('/', 'homepage', self.homepage_callback, ['GET'])
self.connect()
def attach_planteye_configuration(self, config: PlantEyeConfiguration):
self.planteye_config = config
def attach_callback(self, callback: callable):
self.response_callback = callback
def homepage_callback(self):
welcome_str = 'Welcome to PlantEye API. Available endpoint is %s' % self.config.parameters['endpoint']
return welcome_str
def download_configuration_callback(self):
return jsonify(self.planteye_config.cfg_dict)
def upload_configuration_callback(self):
if not hasattr(self, 'pipeline_executor'):
return
content_type = request.headers.get('Content-Type')
if content_type == 'application/json':
uploaded_cfg = request.json
print(uploaded_cfg)
self.pipeline_executor.config.update(uploaded_cfg)
self.pipeline_executor.update_configuration()
return 'Configuration applied'
else:
return 'Content-Type not supported! Only json application/json is supported!'
def enable_configuration_update_via_restapi(self, pipeline_executor):
self.pipeline_executor = pipeline_executor
def connect(self):
self.webserver_thread = threading.Thread(target=self.webserver.run)
self.webserver_thread.start()
def disconnect(self):
pass
class RestAPIWebserver:
def __init__(self, name: str, host: str, port: int):
self.name = name
self.host = host
self.port = port
self.endpoint_flask_app = Flask(name)
def add_url_rule(self, endpoint: str, name: str, rule: callable, methods):
self.endpoint_flask_app.add_url_rule(endpoint, name, rule, methods=methods)
def run(self):
try:
self.endpoint_flask_app.run(host=self.host, port=self.port)
except PermissionError:
logging.error('Cannot not start flask server with given configuration')
|
StarcoderdataPython
|
303255
|
<filename>src/fastapi_aad_auth/_base/state.py
"""Authentication State Handler."""
from enum import Enum
import importlib
import json
from typing import List, Optional
import uuid
from itsdangerous import URLSafeSerializer
from itsdangerous.exc import BadSignature
from pydantic import Field, root_validator, validator
from starlette.authentication import AuthCredentials, SimpleUser, UnauthenticatedUser
from fastapi_aad_auth.errors import AuthenticationError
from fastapi_aad_auth.mixins import LoggingMixin
from fastapi_aad_auth.utilities import InheritableBaseModel, InheritablePropertyBaseModel
SESSION_STORE_KEY = 'auth'
class AuthenticationOptions(Enum):
"""Authentication Options."""
unauthenticated = 0
not_allowed = -1
authenticated = 1
class User(InheritablePropertyBaseModel):
"""User Model."""
name: str = Field(..., description='Full name')
email: str = Field(..., description='User email')
username: str = Field(..., description='Username')
roles: Optional[List[str]] = Field(None, description='Any roles provided')
groups: Optional[List[str]] = Field(None, description='Any groups provided')
scopes: Optional[List[str]] = Field(None, description='Token scopes provided')
@property
def permissions(self):
"""User Permissions."""
permissions = []
if self.scopes:
for scope in self.scopes:
if not scope.startswith('.'):
permissions.append(scope)
return permissions[:]
@property
def klass(self):
"""Return the user klass information for loading from a session."""
return f'{self.__class__.__module__}:{self.__class__.__name__}'
@validator('scopes', always=True, pre=True)
def _validate_scopes(cls, value):
if isinstance(value, str):
value = value.split(' ')
return value
class AuthenticationState(LoggingMixin, InheritableBaseModel):
"""Authentication State."""
_logger = None
session_state: str = str(uuid.uuid4())
state: AuthenticationOptions = AuthenticationOptions.unauthenticated
user: Optional[User] = None
class Config: # noqa: D106
underscore_attrs_are_private = True
@validator('user', always=True, pre=True)
def _validate_user_klass(cls, value):
if isinstance(value, dict):
klass = value.get('klass', None)
if klass:
module, name = klass.split(':')
mod = importlib.import_module(module)
klass = getattr(mod, name)
else:
klass = User
value = klass(**value)
return value
@root_validator(pre=True)
def _validate_user(cls, values):
if values.get('user', None) is None:
values['state'] = AuthenticationOptions.unauthenticated
return values
def check_session_state(self, session_state):
"""Check state against session state."""
if session_state != self.session_state:
raise AuthenticationError("Session states do not match")
return True
def store(self, serializer):
"""Store in serializer."""
return serializer.dumps(self.json())
@classmethod
def load(cls, serializer: URLSafeSerializer, encoded_state: Optional[str] = None):
"""Load from encoded state.
Args:
serializer: Serializer object containing the en/decoding secrets
Keyword Args:
encoded_state: The encoded state to be decoded
"""
if encoded_state:
try:
state = json.loads(serializer.loads(encoded_state))
loaded_state = cls(**state)
except BadSignature:
loaded_state = cls()
else:
loaded_state = cls()
return loaded_state
@classmethod
def logout(cls, serializer: URLSafeSerializer, session):
"""Clear the sessions state."""
state = cls.load_from_session(serializer, session)
state.user = None
state.state = AuthenticationOptions.unauthenticated
session[SESSION_STORE_KEY] = state.store(serializer)
@classmethod
def load_from_session(cls, serializer: URLSafeSerializer, session):
"""Load from a session."""
return cls.load(serializer, session.get(SESSION_STORE_KEY, None))
def save_to_session(self, serializer: URLSafeSerializer, session):
"""Save to a session."""
session[SESSION_STORE_KEY] = self.store(serializer)
return session
def is_authenticated(self):
"""Check if the state is authenticated."""
return self.user is not None and self.state == AuthenticationOptions.authenticated
@property
def authenticated_user(self):
"""Get the authenticated user."""
if self.is_authenticated() and self.user:
if isinstance(self.user, User):
return SimpleUser(self.user.email)
return UnauthenticatedUser()
@property
def credentials(self):
"""Get the credentials object."""
if self.user and self.is_authenticated():
return AuthCredentials(['authenticated'] + self.user.permissions)
else:
return AuthCredentials()
@classmethod
def authenticate_as(cls, user, serializer, session):
"""Store the authenticated user."""
state = cls(user=user, state=AuthenticationOptions.authenticated)
if serializer is not None and session is not None:
state.save_to_session(serializer, session)
return state
@classmethod
def as_unauthenticated(cls, serializer, session):
"""Store as an un-authenticated user."""
return cls.authenticate_as(None, serializer, session)
|
StarcoderdataPython
|
3374356
|
import hashlib
from enum import Enum
from django.core.cache import cache
from django.db.models import Q
from .models import Student
class response_msg(Enum):
MSG_ERROR = "请正确填写信息"
MSG_NOT_FOUND = "没有查到你的信息"
MSG_SYSTEM_ERROR = "系统错误请联系精弘客服"
class index_type(Enum):
Dorm = "寝室"
Sid = "学号"
class index_link(Enum):
Dorm = "/dorm/info"
Sid = "/sid/info"
img_type = ['pf_scenery', 'zh_scenery', 'pf_canteen', 'zh_canteen', 'pf_doom', 'zh_doom']
img_folder = {'pf_scenery': '屏峰风光', 'zh_scenery': '朝晖风光', 'pf_canteen': '屏峰食堂',
'zh_canteen': '朝晖食堂', 'pf_doom': '屏峰寝室', 'zh_doom': '朝晖寝室'}
def get_student(uf):
stu_name = uf.cleaned_data['sname']
sid = uf.cleaned_data['sid'].upper().replace("•", "·").replace(".", "·").replace("。", "·").replace(" ", '')
sha = hashlib.md5()
sha.update(sid.encode('utf8'))
sid = sha.hexdigest()
stu_cache = cache.get('GetID_' + stu_name + sid)
if stu_cache is None:
stu = Student.objects.filter(sname=stu_name, sid=sid)
if not stu:
return None
stu = stu[0]
cache.set('GetID_' + stu.sname + stu.sid, stu)
else:
stu = stu_cache
return stu
def get_roommates(stu):
if stu.shouse is None or stu.sroom is None:
return None
room_cache = cache.get('GetRoom_' + stu.shouse + stu.sroom)
if room_cache is None:
roommates = Student.objects.filter(sroom=stu.sroom, shouse=stu.shouse)
cache.set('GetRoom_' + stu.shouse + stu.sroom, roommates)
else:
roommates = room_cache
return roommates.filter(~Q(sid=stu.sid))
|
StarcoderdataPython
|
8120271
|
"""
1.1
Implement an algorithm to determine if a string has all unique charactors.
What if you cannot use additional data structures?
Examples
--------
input: 'string'
output: True
input: 'unique'
output: False
"""
# SOLUTION 1 - HASHTABLE
# Efficiency
# space: O(n) time: O(1)
def unique_chars1(string):
d = {}
for char in string:
if char in d:
return False
else:
d[char] = True
return True
# SOLUTION 2 - IN-PLACE SORT
# Efficiency
# space: O(1) time: O(2n log n)
def unique_chars2(string):
l = list(string)
l.sort()
for i in range(len(l)-1):
if l[i] == l[i + 1]:
return False
return True
"""
1.2
Given 2 strings, determine if one is a permutation of the other
Examples
--------
input: 'listen' 'silent'
output: True
input: 'loot' 'tool'
output: True
input: 'listens' 'silent'
output: False
input: 'loot', 'look'
output: False
"""
# SOLUTION 1 - HASHTABLE
# Efficiency
# space: O(n) time: O(2n)
def permutuation_ht(str1, str2):
if len(str1) != len(str2):
return False
d = {}
for char in str1:
if char in d:
d[char] += 1
else:
d[char] = 1
for char in str2:
if char in d:
if d[char] > 0:
d[char] -= 1
else:
return False
else:
return False
return True
# SOLUTION 2 - SORT & COMPARE
# Efficiency
# space: O(1) time: O(6n log n)
def permutuation_ht(str1, str2):
if len(str1) != len(str2):
return False
l1 = list(str1)
l2 = list(str2)
l1.sort()
l2.sort()
for i in range(len(str1)):
if l1[i] != l2[i]:
return False
return True
"""
1.3
Write a method to replace all spaces in a string with '%20'.
Assume that the string has sufficient space at the end to hold the additional characters,
and that you are given the "true" length of the string.
Examples
--------
input: '<NAME> ' '13'
output: 'Mr%20John%20Smith%20'
"""
# SOLUTION - SHIFT & REPLACE FROM END
# Efficiency
# space: O(n) time: O(n)
def urlify(string, num):
string = list(string)
r = len(string) - 1
for i in range(num -1, -1, -1):
if string[i] != ' ':
string[r] = string[i]
r -= 1
else:
string[r] = '0'
string[r-1] = '2'
string[r-2] = '%'
r -= 3
return ''.join(string)
# if __name__ == "__main__":
# Tests 1.3
# phrase = '<NAME> '
# phrase = 'a b c '
# length = 5
# print(urlify(phrase, length))
|
StarcoderdataPython
|
1741848
|
<reponame>district10/snippet-manager<filename>snippets/rstrip.py
from builtins import float
class FormattedFloat(float):
def __str__(self):
return "{:.10f}".format(self).rstrip('0')
|
StarcoderdataPython
|
8030656
|
<reponame>AndrejOrsula/ecard<filename>ecard/launch/manipulation.launch.py
import os
import yaml
from launch import LaunchDescription
from launch_ros.actions import Node
from ament_index_python.packages import get_package_share_directory
def load_file(package_name, file_path):
package_path = get_package_share_directory(package_name)
absolute_file_path = os.path.join(package_path, file_path)
try:
with open(absolute_file_path, 'r') as file:
return file.read()
except EnvironmentError:
return None
def load_yaml(package_name, file_path):
package_path = get_package_share_directory(package_name)
absolute_file_path = os.path.join(package_path, file_path)
try:
with open(absolute_file_path, 'r') as file:
return yaml.load(file)
except EnvironmentError:
return None
def generate_launch_description():
moveit_cpp_yaml_file_name = get_package_share_directory(
'ecard') + "/config/moveit/moveit_cpp.yaml"
robot_description_config = load_file(
'ecard', 'config/moveit/panda_urdf/panda.urdf')
robot_description = {'robot_description': robot_description_config}
robot_description_semantic_config = load_file(
'ecard', 'config/moveit/panda_moveit_config/panda.srdf')
robot_description_semantic = {
'robot_description_semantic': robot_description_semantic_config}
kinematics_yaml = load_yaml(
'ecard', 'config/moveit/panda_moveit_config/kinematics.yaml')
robot_description_kinematics = {
'robot_description_kinematics': kinematics_yaml}
controllers_yaml = load_yaml('ecard', 'config/moveit/controllers.yaml')
moveit_controllers = {'moveit_simple_controller_manager': controllers_yaml}
ompl_planning_pipeline_config = {'ompl': {
'planning_plugin': 'ompl_interface/OMPLPlanner',
'request_adapters': """default_planner_request_adapters/AddTimeOptimalParameterization default_planner_request_adapters/FixWorkspaceBounds default_planner_request_adapters/FixStartStateBounds default_planner_request_adapters/FixStartStateCollision default_planner_request_adapters/FixStartStatePathConstraints""",
'start_state_max_bounds_error': 0.1}}
ompl_planning_yaml = load_yaml(
'ecard', 'config/moveit/panda_moveit_config/ompl_planning.yaml')
ompl_planning_pipeline_config['ompl'].update(ompl_planning_yaml)
rviz_config_file = get_package_share_directory(
'ecard') + "/config/moveit/rviz2.rviz"
return LaunchDescription([
Node(package='tf2_ros',
node_executable='static_transform_publisher',
node_name='static_transform_publisher',
output='log',
arguments=['0.0', '0.0', '0.0', '0.0', '0.0', '0.0', 'world', 'panda_link0']),
Node(package='fake_joint_driver',
node_executable='fake_joint_driver_node',
parameters=[os.path.join(get_package_share_directory("ecard"), "config", "moveit", "panda_controllers.yaml"),
os.path.join(get_package_share_directory(
"ecard"), "config", "moveit", "start_positions.yaml"),
robot_description,
robot_description_kinematics]
),
Node(node_name='ecard',
package='ecard',
node_executable='ecard',
output='screen',
parameters=[moveit_cpp_yaml_file_name,
robot_description,
robot_description_semantic,
kinematics_yaml,
ompl_planning_pipeline_config,
moveit_controllers],
remappings=[('object_of_interest', 'ecard/gaze_correlation/object_of_interest'),
('point_of_gaze', 'ecard/gaze_correlation/point_of_gaze')],
),
Node(package='rviz2',
node_executable='rviz2',
node_name='rviz2',
output='log',
arguments=['-d', rviz_config_file],
parameters=[robot_description]),
])
|
StarcoderdataPython
|
1639516
|
<gh_stars>0
# -*- coding: utf-8 -*-
""" Tests for Bearer authentication class. """
import json
import httpretty
import mock
from django.contrib.auth import get_user_model
from django.test import RequestFactory, TestCase, override_settings
from requests import RequestException
from rest_framework.exceptions import AuthenticationFailed
from edx_rest_framework_extensions.auth.bearer.authentication import (
BearerAuthentication,
)
from edx_rest_framework_extensions.tests import factories
OAUTH2_USER_INFO_URL = 'http://example.com/oauth2/user_info/'
USER_INFO = {
'username': 'jdoe',
'first_name': 'Jane',
'last_name': 'Doê',
'email': '<EMAIL>',
}
User = get_user_model()
class AccessTokenMixin:
""" Test mixin for dealing with OAuth2 access tokens. """
DEFAULT_TOKEN = 'abc123'
def mock_user_info_response(self, status=200, username=None):
""" Mock the user info endpoint response of the OAuth2 provider. """
username = username or USER_INFO['username']
data = {
'family_name': USER_INFO['last_name'],
'preferred_username': username,
'given_name': USER_INFO['first_name'],
'email': USER_INFO['email'],
}
httpretty.register_uri(
httpretty.GET,
OAUTH2_USER_INFO_URL,
body=json.dumps(data),
content_type='application/json',
status=status
)
@override_settings(EDX_DRF_EXTENSIONS={'OAUTH2_USER_INFO_URL': OAUTH2_USER_INFO_URL})
class BearerAuthenticationTests(AccessTokenMixin, TestCase):
""" Tests for the BearerAuthentication class. """
TOKEN_NAME = 'Bearer'
def setUp(self):
super(BearerAuthenticationTests, self).setUp()
self.auth = BearerAuthentication()
self.factory = RequestFactory()
def create_authenticated_request(self, token=AccessTokenMixin.DEFAULT_TOKEN, token_name=TOKEN_NAME):
""" Returns a Request with the authorization set using the specified values. """
auth_header = '{token_name} {token}'.format(token_name=token_name, token=token)
request = self.factory.get('/', HTTP_AUTHORIZATION=auth_header)
return request
def assert_user_authenticated(self):
""" Assert a user can be authenticated with a bearer token. """
user = factories.UserFactory()
self.mock_user_info_response(username=user.username)
request = self.create_authenticated_request()
self.assertEqual(self.auth.authenticate(request), (user, self.DEFAULT_TOKEN))
def assert_authentication_failed(self, token=AccessTokenMixin.DEFAULT_TOKEN, token_name=TOKEN_NAME):
""" Assert authentication fails for a generated request. """
request = self.create_authenticated_request(token=token, token_name=token_name)
self.assertRaises(AuthenticationFailed, self.auth.authenticate, request)
def test_authenticate_header(self):
""" The method should return the string Bearer. """
self.assertEqual(self.auth.authenticate_header(self.create_authenticated_request()), 'Bearer')
@override_settings(EDX_DRF_EXTENSIONS={'OAUTH2_USER_INFO_URL': None})
def test_authenticate_no_user_info_url(self):
""" If the setting OAUTH2_USER_INFO_URL is not set, the method returns None. """
# Empty value
self.assertIsNone(self.auth.authenticate(self.create_authenticated_request()))
# Missing value
with override_settings(EDX_DRF_EXTENSIONS={}):
self.assertIsNone(self.auth.authenticate(self.create_authenticated_request()))
def test_authenticate_invalid_token(self):
""" If no token is supplied, or if the token contains spaces, the method should raise an exception. """
# Missing token
self.assert_authentication_failed(token='')
# Token with spaces
self.assert_authentication_failed(token='<KEY>')
def test_authenticate_invalid_token_name(self):
""" If the token name is not Bearer, the method should return None. """
request = self.create_authenticated_request(token_name='foobar')
self.assertIsNone(self.auth.authenticate(request))
@httpretty.activate
def test_authenticate_inactive_user(self):
""" If the user matching the access token is inactive, the method should raise an exception. """
user = factories.UserFactory(is_active=False)
self.mock_user_info_response(username=user.username)
self.assert_authentication_failed()
@httpretty.activate
def test_authenticate_invalid_token_response(self):
""" If the user info endpoint does not return HTTP 200, the method should return raise an exception. """
self.mock_user_info_response(status=400)
self.assert_authentication_failed()
@httpretty.activate
def test_authenticate(self):
""" If the access token is valid, the user exists, and is active, a tuple containing
the user and token should be returned.
"""
self.assert_user_authenticated()
@httpretty.activate
def test_authenticate_as_new_user(self):
""" Verify a new user is created. """
self.mock_user_info_response()
request = self.create_authenticated_request()
actual_user, actual_token = self.auth.authenticate(request)
self.assertEqual(actual_token, self.DEFAULT_TOKEN)
self.assertEqual(actual_user, User.objects.get(username=USER_INFO['username']))
@httpretty.activate
def test_authenticate_user_creation_with_existing_user(self):
""" Verify an existing user is returned, if the user already exists. """
user = factories.UserFactory(username=USER_INFO['username'])
self.mock_user_info_response()
request = self.create_authenticated_request()
actual_user, actual_token = self.auth.authenticate(request)
self.assertEqual(actual_token, self.DEFAULT_TOKEN)
self.assertEqual(actual_user, user)
@httpretty.activate
def test_authenticate_user_creation_with_request_status_failure(self):
""" Verify authentication fails if the request to retrieve user info returns a non-200 status. """
original_user_count = User.objects.all().count()
self.mock_user_info_response(status=401)
request = self.create_authenticated_request()
self.assertRaises(AuthenticationFailed, self.auth.authenticate, request)
self.assertEqual(User.objects.all().count(), original_user_count)
def test_authenticate_user_creation_with_request_exception(self):
""" Verify authentication fails if the request to retrieve user info raises an exception. """
original_user_count = User.objects.all().count()
request = self.create_authenticated_request()
with mock.patch('requests.get', mock.Mock(side_effect=RequestException)):
self.assertRaises(AuthenticationFailed, self.auth.authenticate, request)
self.assertEqual(User.objects.all().count(), original_user_count)
|
StarcoderdataPython
|
188548
|
<gh_stars>10-100
class Crc8():
"""
Implements the 1-wire CRC8 checksum.
(The polynomial should be X^8 + X^5 + X^4 + X^0)
"""
R1 = [0x00, 0x5e, 0xbc, 0xe2, 0x61, 0x3f, 0xdd, 0x83,
0xc2, 0x9c, 0x7e, 0x20, 0xa3, 0xfd, 0x1f, 0x41]
R2 = [0x00, 0x9d, 0x23, 0xbe, 0x46, 0xdb, 0x65, 0xf8,
0x8c, 0x11, 0xaf, 0x32, 0xca, 0x57, 0xe9, 0x74]
def __init__(self, data = None):
self.crc = 0
if data:
self.add_bytes(data)
def add_bytes(self, data):
for byte in data:
x = (byte ^ self.crc) & 0xFF
self.crc = (self.R1[x & 0xF] ^ self.R2[(x >> 4) & 0xF]) & 0xFF
return self.crc
def get_crc(self):
return self.crc
|
StarcoderdataPython
|
5177080
|
import copy
import re
from .common_func import modify_dict_result
from .common_func import remove_root_duplicate
from .key_monad import key_monad
NFS_REGEX = r"^nfs://"
NFS_REGEX_C = re.compile(NFS_REGEX, flags=re.IGNORECASE)
NO_NFS_REGEX = r"^(?!nfs://).*$"
NO_NFS_REGEX_C = re.compile(NO_NFS_REGEX, flags=re.IGNORECASE)
EMPTY_REGEX = r"^$"
EMPTY_REGEX_C = re.compile(EMPTY_REGEX)
def __add_nfs_protocal(matchgroup):
return "nfs://" + matchgroup.group(0)
def aggregate_map(current_keymonad):
def aggregate(assigned_keymonad):
final_result = copy.deepcopy(current_keymonad.result)
final_result.update(assigned_keymonad.result)
aggregate_keys = list(
set(current_keymonad.absolute_key +
assigned_keymonad.absolute_key))
return key_monad(aggregate_keys,
final_result,
current_keymonad.key_separator)
return aggregate
def default_root_user_value_map(current_keymonad):
ROOTUSER_KEYS = ("rootuser",)
modified_result = modify_dict_result(
current_keymonad.result,
ROOTUSER_KEYS,
EMPTY_REGEX_C,
r"/root/root")
return key_monad(
current_keymonad.absolute_key,
modified_result,
current_keymonad.key_separator)
def remove_nfs_protocol_map(current_keymonad):
CHECK_NFS_KEYS = ("nfs_location", "uri")
modified_result = modify_dict_result(
current_keymonad.result,
CHECK_NFS_KEYS,
NFS_REGEX_C,
"")
return key_monad(
current_keymonad.absolute_key,
modified_result,
current_keymonad.key_separator)
def add_nfs_protocol_map(current_keymonad):
CHECK_NFS_KEYS = ("nfs_location", "uri")
modified_result = modify_dict_result(
current_keymonad.result,
CHECK_NFS_KEYS,
NO_NFS_REGEX_C,
__add_nfs_protocal)
return key_monad(
current_keymonad.absolute_key,
modified_result,
current_keymonad.key_separator)
def default_site_type_map(current_keymonad):
""" Setup default site_type to 'dev' """
SITE_TYPE_KEYS = ("site_type",)
modified_result = modify_dict_result(
current_keymonad.result,
SITE_TYPE_KEYS,
EMPTY_REGEX_C,
r"prod")
return key_monad(
current_keymonad.absolute_key,
modified_result,
current_keymonad.key_separator)
def remove_duplicate_entry_map(current_keymonad):
modified_result = remove_root_duplicate(
current_keymonad.result)
return key_monad(
current_keymonad.absolute_key,
modified_result,
current_keymonad.key_separator)
|
StarcoderdataPython
|
5042133
|
<gh_stars>0
import random
import itertools
import numpy
def permute_training_ex(training_ex):
"""
Takes an array of shape (num_channels, 12, 12), randomly shuffles each set of 3 rows
and columns in each channel, and returns the resulting (num_channels, 12, 12) array.
Params:
training_ex: an array of shape (num_channels, 12, 12), representing 44 channels of
a 12x12 matrix
Returns:
an array of shape (num_channels, 12, 12)
"""
perms = list(itertools.permutations([1, 2, 3, 4]))
# random.seed(0)
perm = random.choice(perms)
new_training_ex = []
for channel in training_ex:
temp_channel = numpy.zeros([12, 12])
temp_channel[0:3,:] = channel[(perm[0]-1)*3:((perm[0]-1)*3)+3,:]
temp_channel[3:6,:] = channel[(perm[1]-1)*3:((perm[1]-1)*3)+3,:]
temp_channel[6:9,:] = channel[(perm[2]-1)*3:((perm[2]-1)*3)+3,:]
temp_channel[9:12,:] = channel[(perm[3]-1)*3:((perm[3]-1)*3)+3,:]
new_channel = numpy.zeros([12, 12])
new_channel[:,0:3] = temp_channel[:,(perm[0]-1)*3:((perm[0]-1)*3)+3]
new_channel[:,3:6] = temp_channel[:,(perm[1]-1)*3:((perm[1]-1)*3)+3]
new_channel[:,6:9] = temp_channel[:,(perm[2]-1)*3:((perm[2]-1)*3)+3]
new_channel[:,9:12] = temp_channel[:,(perm[3]-1)*3:((perm[3]-1)*3)+3]
new_training_ex.append(new_channel)
return numpy.array(new_training_ex)
|
StarcoderdataPython
|
284084
|
from . import operation
from oslo_versionedobjects import fields
from oslo_versionedobjects import base
class MessagingBase(operation.Operation):
# Version 1.0: Initial version
VERSION = "1.0"
fields = {
'server': fields.StringField(nullable=True),
'topic': fields.StringField(),
'namespace': fields.StringField(nullable=True),
'version': fields.StringField(),
'method': fields.StringField(),
'params': fields.DictOfStringsField(),
}
@base.VersionedObjectRegistry.register
class MessagingCast(MessagingBase):
pass
@base.VersionedObjectRegistry.register
class MessagingCall(MessagingBase):
pass
@base.VersionedObjectRegistry.register
class MessagingDispatch(MessagingBase):
pass
|
StarcoderdataPython
|
3350215
|
<filename>molfunc-reaction/A200-sync-go-chebi-rels.py
import os, json, argparse, sys, datetime, time
import pronto, six
"""
grep ^in.*CHEBI /home/ralf/go-ontology/src/ontology/go-edit.obo |sed 's+ CHEB.*++g' |sort|uniq
The relevant data to sync is in these lines in the Gene Ontology:
intersection_of: PROPERTY CHEBI:
where PROPERTY is one of:
has_part --> has part
has_input
has_intermediate
has_output
has_participant
has_primary_input
has_primary_input_or_output
has_primary_output
process_has_causal_agent
regulates_levels_of
exports
imports
transports_or_maintains_localization_of
"""
wdeecnt = 0
def wdee(j):
global wdeecnt
wdeecnt = wdeecnt + 1
# if wdeecnt > 10:
# exit()
f = open('t.json', 'w')
f.write(json.dumps(j))
f.close()
print(json.dumps(j), flush=True)
ret = os.popen('wd ee t.json')
print(ret.read())
if ret.close() is not None:
print('ERROR')
def ensure(subit, obit, role, prop):
print('ensure {} {}'.format(subit, obit))
GOREF = "Q93741199"
ss = stmts.get(subit)
qprop = 'P3831'
if prop == 'P361':
qprop = 'P2868'
if ss is not None:
os = ss.get(obit)
if os is not None and os[0] == prop:
if ((os[2] is not None) or
(role is None and (os[3] is not None or os[4] is not None))):
return
if role is None:
# add ref to stmt
j = {"id": subit, "claims": { prop: [{ "id": os[1],
"value": obit,
"references": { "P248": GOREF }}] } }
wdee(j)
return
# add ref to stmt
j = {"id": subit, "claims": { prop: [{ "id": os[1],
"value": obit,
"qualifiers": { qprop: role },
"references": { "P248": GOREF }}] } }
wdee(j)
return
# create stmt
if role is None:
j = {"id": subit, "claims": { prop: [{ "value": obit,
"references": { "P248": GOREF }}] } }
else:
j = {"id": subit, "claims": { prop: [{ "value": obit,
"qualifiers": { qprop: role },
"references": { "P248": GOREF }}] } }
wdee(j)
# Initiate the parser
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--output_qs", help="output to QS",
action="store_true")
parser.add_argument("-q", "--query", help="perform SPARQL query",
action="store_true")
# Read arguments from the command line
args = parser.parse_args()
# Check for --version or -V
QS = args.output_qs
dontquery = not args.query
script = os.path.basename(sys.argv[0])[:-3]
if dontquery is False:
print('performing query...')
ret = os.popen('wd sparql {}.rq >{}.json'.format(script, script))
if ret.close() is not None:
raise
file = open('{}.json'.format(script))
s = file.read()
jol = json.loads(s)
chits = {}
goits = {}
stmts = {}
for d in jol:
item = d.get('item')
chid = d.get('chid')
goid = d.get('goid')
if (chid is None) == (goid is None):
print('CANT HAPPEN: {}'.format(chid))
exit()
if chid is not None:
chits[chid] = item
else:
goits[goid] = item
prop = d.get('prop')
if prop is not None:
prop = prop[prop.rfind('/')+1:]
s = stmts.get(item)
obj = d.get('obj')
tup = (prop, d.get('stmt'), d.get('ref'),
d.get('srole'), d.get('orole'))
if s is not None:
if s.get(obj) is not None:
if s.get(obj)[1] == tup[1]:
continue
print('CANT HAPPEN: {} {}'.format(item, obj))
exit()
s[obj] = tup
else:
stmts[item] = { obj : tup }
print('Reading GO')
ont = pronto.Ontology('/home/ralf/go-ontology/src/ontology/go-edit.obo')
for term in ont.terms():
goid = term.id
if not goid.startswith('GO:'):
continue
goit = goits.get(goid)
if goit is None:
continue
try:
for i in term.intersection_of:
if (not type(i) is tuple) or (not type(i[0]) is pronto.Relationship):
continue
chid = i[1].id
if not chid.startswith('CHEBI:'):
continue
chid = chid[6:]
chit = chits.get(chid)
if chit is None:
continue
type_ = i[0].name
print(goid, chit, type_)
if (type_ == 'has output'
or type_ == 'has primary output'):
ensure(goit, chit, 'Q542929', 'P527')
ensure(chit, goit, 'Q542929', 'P361')
if (type_ == 'has input'
or type_ == 'has primary input'):
ensure(goit, chit, 'Q45342565', 'P527')
ensure(chit, goit, 'Q45342565', 'P361')
if type_ == 'has intermediate':
ensure(goit, chit, 'Q7458208', 'P527')
ensure(chit, goit, 'Q7458208', 'P361')
if (type_ == 'has participant'
or type_ == 'has primary input or output'):
ensure(goit, chit, 'Q75232720', 'P527')
ensure(chit, goit, 'Q75232720', 'P361')
if (type_ == 'transports or maintains localization_of'
or type_ == 'exports'
or type_ == 'imports'):
ensure(goit, chit, 'Q75152245', 'P527')
ensure(chit, goit, 'Q75152245', 'P361')
if type_ == 'regulates levels of':
ensure(goit, chit, 'Q7247312', 'P527')
ensure(chit, goit, 'Q7247312', 'P361')
if type_ == 'process has causal agent':
ensure(goit, chit, 'Q2574811', 'P527')
ensure(chit, goit, 'Q2574811', 'P361')
if type_ == 'has part':
ensure(goit, chit, None, 'P527')
ensure(chit, goit, None, 'P361')
except KeyError:
pass
|
StarcoderdataPython
|
3426363
|
<filename>codewar/Going to the cinema -7kyu/Going to the cinema.py
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from math import pow, ceil
debug = 1
#ceil AC
# movie(500, 15, 0.9), 43
# movie(100, 10, 0.95), 24
def debug_print(flag, out):
if debug:
print("temp" + str(flag) + ":" + str(out))
def movie(card, ticket, perc):
sum_a = card
sum_b = 0.0
i = 1
while 1:
sum_a += (ticket * pow(perc, i))
sum_b += ticket
if sum_b > ceil(sum_a):
return i
else:
i += 1
if __name__ == "__main__":
print(movie(100, 10, 0.95))
print(movie(500, 15, 0.9))
|
StarcoderdataPython
|
11359946
|
<reponame>rookuu/AdventOfCode-2015<gh_stars>0
#!/usr/bin/env python
"""
Solution to Day 3 - Puzzle 1 of the Advent Of Code 2015 series of challenges.
--- Day 3: I Was Told There Would Be No Math ---
<^v> determines what coordinate the pointer moves to. Count the amount of houses that the pointer visits at least once.
-----------------------------
Author: Luke "rookuu" Roberts
"""
coordinate = [0,0]
visitedCoords = set()
visitedCoords.add((coordinate[0],coordinate[1]))
inputFile = open('input.txt')
dataFromFile = inputFile.read()
for characters in dataFromFile:
if characters == "<":
coordinate[0] += -1
elif characters == ">":
coordinate[0] += 1
elif characters == "^":
coordinate[1] += 1
elif characters == "v":
coordinate[1] += -1
visitedCoords.add((coordinate[0],coordinate[1]))
print "The number of unique houses visited is: " + str(len(visitedCoords))
|
StarcoderdataPython
|
1716052
|
import xarray as _xr
import copy as _copy
import xgcm as _xgcm
import numpy as _np
import warnings as _warnings
import sys as _sys
from . import compute as _compute
from . import plot as _plot
from . import animate as _animate
from . import utils as _utils
from . subsample import _subsampleMethdos
from . compute import _computeMethdos
from . plot import _plotMethdos
from . animate import _animateMethdos
try:
import cartopy.crs as _ccrs
except ImportError:
pass
try:
from scipy import spatial as _spatial
except ImportError:
pass
try:
from dask.diagnostics import ProgressBar as _ProgressBar
except ImportError:
pass
# TODO: add more xgcm options. E.g., default boundary method.
# TODO: add attributes to new coordinates (XU, XV, ...)
# TODO: implement xgcm autogenerate in _set_coords, set_grid_coords, set_coords when released
# TODO: _create_grid will be useless with the future release of xgcm. We will pass dictionary in xgcm.Grid,
# and we can have the option of usining comodo attributes (currently cleaned up so switched off)
class OceanDataset:
"""
OceanDataset combines a xarray.Dataset with other objects used by OceanSpy (e.g., xgcm.Grid).
Additional objects are attached to the xarray.Dataset as global attributes.
OceanDataset adds, reads, and decodes dataset global attributes.
"""
def __init__(self,
dataset):
"""
Parameters
----------
dataset: xarray.Dataset
The multi-dimensional, in memory, array database.
References
----------
http://xarray.pydata.org/en/stable/generated/xarray.Dataset.html
"""
# Check parameters
if not isinstance(dataset, _xr.Dataset):
raise TypeError("`dataset` must be a xarray.Dataset")
# Initialize dataset
self._ds = dataset.copy()
# Apply aliases
self = self._apply_aliases()
def __copy__(self):
"""
Shallow copy
"""
return OceanDataset(dataset = self.dataset.copy())
def __deepcopy__(self):
"""
Deep copy
"""
return OceanDataset(dataset = self.dataset.copy(deep=True))
def __repr__(self):
main_info = ['<oceanspy.OceanDataset>']
main_info.append('\nMain attributes:')
if self.dataset is not None:
main_info.append(" .dataset: %s" % self.dataset.__repr__()[self.dataset.__repr__().find('<'):
self.dataset.__repr__().find('>')+1])
if self.grid is not None:
main_info.append(" .grid: %s" % self.grid.__repr__()[self.grid.__repr__().find('<'):
self.grid.__repr__().find('>')+1])
if self.projection is not None:
main_info.append(" .projection: %s" % self.projection.__repr__()[self.projection.__repr__().find('<'):
self.projection.__repr__().find('>')+1])
more_info = ['\n\nMore attributes:']
if self.name:
more_info.append(" .name: %s" % self.name)
if self.description:
more_info.append(" .description: %s" % self.description)
if self.parameters:
more_info.append(" .parameters: %s" % type(self.parameters))
if self.aliases:
more_info.append(" .aliases: %s" % type(self.aliases))
if self.grid_coords:
more_info.append(" .grid_coords: %s" % type(self.grid_coords))
if self.grid_periodic:
more_info.append(" .grid_periodic: %s" % type(self.grid_periodic))
info = '\n'.join(main_info)
if len(more_info)>1:
info = info+'\n'.join(more_info)
return info
# ==================================
# IMPORT (used by open_oceandataset)
# ==================================
def _shift_averages(self):
"""
Shift average variables to time_midp.
Average variables must have attribute original_output = 'average'.
"""
for var in self._ds.data_vars:
original_output = self._ds[var].attrs.pop('original_output', None)
if original_output == 'average':
self._ds[var] = self._ds[var].drop('time').isel(time=slice(1, None)).rename({'time': 'time_midp'})
if original_output is not None:
self._ds[var].attrs['original_output'] = original_output
return self
def _set_coords(self, fillna=False, coords1Dfrom2D=False, coords2Dfrom1D=False, coordsUVfromG=False):
"""
Set dataset coordinates: dimensions + 2D horizontal coordinates.
Parameters
----------
fillna: bool
If True, fill NaNs in 2D coordinates propagating backward and forward.
coords1Dfrom2D: bool
If True, compute 1D coordinates from 2D coordinates (means).
Use with rectilinear grid only!
coords2Dfrom1D: bool
If True, compute 2D coordinates from 1D coordinates (brodacast).
coordsUVfromCG: bool
If True, compute missing coords (U and V points) from G points.
"""
# Check parameters
if not isinstance(fillna, bool):
raise TypeError('`fillna` must be bool')
if not isinstance(coords1Dfrom2D, bool):
raise TypeError('`coords1Dfrom2D` must be bool')
if not isinstance(coordsUVfromG, bool):
raise TypeError('`coordsUVfromG` must be bool')
if coords1Dfrom2D and coords2Dfrom1D:
raise TypeError('`coords1Dfrom2D` and `coords2Dfrom1D` can not be both True')
# Copy because the dataset will change
self = _copy.copy(self)
# Coordinates are dimensions only
self._ds = self._ds.reset_coords()
# Fill nans (e.g., because of exch2)
if fillna:
coords = ['YC', 'XC', 'YG', 'XG', 'YU', 'XU', 'YV', 'XV']
dims = ['X', 'Y', 'Xp1', 'Yp1', 'Xp1', 'Y', 'X', 'Yp1']
for i, (coord, dim) in enumerate(zip(coords, dims)):
if coord in self._ds.variables:
self._ds[coord] = self._ds[coord].ffill(dim).bfill(dim).persist()
# Get U and V by rolling G
if coordsUVfromG:
for i, (point_pos, dim2roll) in enumerate(zip(['U', 'V'], ['Yp1', 'Xp1'])):
for dim in ['Y', 'X']:
coord = self._ds[dim+'G'].rolling(**{dim2roll: 2}).mean().dropna(dim2roll)
coord = coord.drop(coord.coords).rename({dim2roll: dim2roll[0]})
self._ds[dim+point_pos] = coord
if 'units' in self._ds[dim+'G'].attrs:
self._ds[dim+point_pos].attrs['units'] = self._ds[dim+'G'].attrs['units']
# For cartesian grid we can use 1D coordinates
if coords1Dfrom2D:
# Take mean
self._ds['Y'] = self._ds['YC'].mean('X', keep_attrs=True).persist()
self._ds['X'] = self._ds['XC'].mean('Y', keep_attrs=True).persist()
self._ds['Yp1'] = self._ds['YG'].mean('Xp1', keep_attrs=True).persist()
self._ds['Xp1'] = self._ds['XG'].mean('Yp1', keep_attrs=True).persist()
# Get 2D coordinates broadcasting 1D
if coords2Dfrom1D:
# Broadcast
self._ds['YC'], self._ds['XC'] = _xr.broadcast(self._ds['Y'], self._ds['X'])
self._ds['YG'], self._ds['XG'] = _xr.broadcast(self._ds['Yp1'], self._ds['Xp1'])
self._ds['YU'], self._ds['XU'] = _xr.broadcast(self._ds['Y'], self._ds['Xp1'])
self._ds['YV'], self._ds['XV'] = _xr.broadcast(self._ds['Yp1'], self._ds['X'])
# Add units
for i, (D2, D1) in enumerate(zip(['YC', 'XC', 'YG', 'XG', 'YU', 'XU', 'YV', 'XV'],
['Y', 'X', 'Yp1', 'Xp1', 'Y', 'Xp1', 'Yp1', 'X'])):
if 'units' in self._ds[D1].attrs: self._ds[D2].attrs['units'] = self._ds[D1].attrs['units']
# Set 2D coordinates
self._ds = self._ds.set_coords(['YC', 'XC',
'YG', 'XG',
'YU', 'XU',
'YV', 'XV'])
return self
def import_MITgcm_rect_nc(self, shift_averages = True):
"""
Set coordinates of a dataset from a MITgcm run with rectilinear grid and data stored in NetCDF format.
Open and concatentate dataset before running this function.
Parameters
----------
shift_averages: bool
If True, shift average variable to time_midp.
Average variables must have attribute original_output = 'average'
"""
# Check parameters
if not isinstance(shift_averages, bool):
raise TypeError('`shift_averages` must be bool')
# Shift averages
if shift_averages is True:
self = self._shift_averages()
# Set coordinates
self = self._set_coords(fillna=True, coords1Dfrom2D=True)
grid_coords = {'Y' : {'Y': None, 'Yp1': 0.5},
'X' : {'X': None, 'Xp1': 0.5},
'Z' : {'Z': None, 'Zp1': 0.5, 'Zu': 0.5, 'Zl': -0.5},
'time' : {'time': -0.5}}
self = self.set_grid_coords(grid_coords = grid_coords, add_midp=True)
return self
def import_MITgcm_rect_bin(self, shift_averages = True):
"""
Set coordinates of a dataset from a MITgcm run with rectilinear grid and data stored in bin format.
Open and concatentate dataset before running this function.
Parameters
----------
shift_averages: bool
If True, shift average variable to time_midp.
Average variables must have attribute original_output = 'average'
"""
# Check parameters
if not isinstance(shift_averages, bool):
raise TypeError('`shift_averages` must be bool')
# Shift averages
if shift_averages is True:
self = self._shift_averages()
# Set coordinates
self = self._set_coords(coords2Dfrom1D=True)
grid_coords = {'Y' : {'Y': None, 'Yp1': 0.5},
'X' : {'X': None, 'Xp1': 0.5},
'Z' : {'Z': None, 'Zp1': 0.5, 'Zu': 0.5, 'Zl': -0.5},
'time' : {'time': -0.5}}
self = self.set_grid_coords(grid_coords = grid_coords, add_midp=True)
return self
def import_MITgcm_curv_nc(self, shift_averages = True):
"""
Set coordinates of a dataset from a MITgcm run with curvilinear grid and data stored in NetCDF format.
Open and concatentate dataset before running this function.
Parameters
----------
shift_averages: bool
If True, shift average variable to time_midp.
Average variables must have attribute original_output = 'average'
"""
# Check parameters
if not isinstance(shift_averages, bool):
raise TypeError('`shift_averages` must be bool')
# Shift averages
if shift_averages is True:
self = self._shift_averages()
# Set coordinates
self = self._set_coords(coordsUVfromG=True)
grid_coords = {'Y' : {'Y': None, 'Yp1': 0.5},
'X' : {'X': None, 'Xp1': 0.5},
'Z' : {'Z': None, 'Zp1': 0.5, 'Zu': 0.5, 'Zl': -0.5},
'time' : {'time': -0.5}}
self = self.set_grid_coords(grid_coords = grid_coords, add_midp=True)
return self
# ===========
# ATTRIBUTES
# ===========
# -------------------
# name
# -------------------
@property
def name(self):
"""
Name of the OceanDataset
"""
name = self._read_from_global_attr('name')
return name
@name.setter
def name(self, name):
"""
Inhibit setter
"""
raise AttributeError(_setter_error_message('name'))
def set_name(self, name, overwrite=None):
"""
Set name of the OceanDataset.
Parameters
----------
name: str
Name of the OceanDataset
overwrite: bool or None
If None, raise error if name has been previously set.
If True, overwrite previous name.
If False, combine with previous name.
"""
# Check parameters
if not isinstance(name, str):
raise TypeError("`name` must be str")
# Set name
self = self._store_as_global_attr(name = 'name',
attr = name,
overwrite = overwrite)
return self
# -------------------
# description
# -------------------
@property
def description(self):
"""
Description of the OceanDataset
"""
description = self._read_from_global_attr('description')
return description
@description.setter
def description(self, description):
"""
Inhibit setter
"""
raise AttributeError(_setter_error_message('description'))
def set_description(self, description, overwrite=None):
"""
Set description of the OceanDataset.
Parameters
----------
description: str
Desription of the OceanDataset
overwrite: bool or None
If None, raise error if description has been previously set.
If True, overwrite previous description.
If False, combine with previous description.
"""
# Check parameters
if not isinstance(description, str):
raise TypeError("`description` must be str")
# Set description
self = self._store_as_global_attr(name = 'description',
attr = description,
overwrite = overwrite)
return self
# -------------------
# dataset
# -------------------
@property
def dataset(self):
"""
xarray.Dataset: A multi-dimensional, in memory, array database.
References
----------
http://xarray.pydata.org/en/stable/generated/xarray.Dataset.html
"""
# Show _ds with renamed variables.
dataset = self._ds.copy()
if self.aliases:
aliases = {ospy: custom for ospy, custom in self.aliases.items()
if ospy in self._ds
or ospy in self._ds.dims}
dataset = dataset.rename(aliases)
return dataset
@dataset.setter
def dataset(self, dataset):
"""
Inhibit setter
"""
raise AttributeError("Set a new dataset using `oceanspy.OceanDataset(dataset)`")
# -------------------
# aliases
# -------------------
@property
def aliases(self):
"""
A dictionary to connect custom variable names to OceanSpy reference names.
Keys are OceanSpy names, values are custom names: {'ospy_name': 'custom_name'}
"""
aliases = self._read_from_global_attr('aliases')
return aliases
@property
def _aliases_flipped(self):
"""
Flip aliases: Keys are values names, values are ospy_name names: {'ospy_name': 'custom_name'}
"""
if self.aliases:
aliases_flipped = {custom: ospy for ospy, custom in self.aliases.items()}
else: return self.aliases
return aliases_flipped
@aliases.setter
def aliases(self, aliases):
"""
Inhibit setter
"""
raise AttributeError(_setter_error_message('aliases'))
def set_aliases(self, aliases, overwrite=None):
"""
Set aliases to connect custom variables names to OceanSpy reference names.
Parameters
----------
aliases: dict
Keys are OceanSpy names, values are custom names: {'ospy_name': 'custom_name'}
overwrite: bool or None
If None, raise error if aliases has been previously set.
If True, overwrite previous aliases.
If False, combine with previous aliases.
"""
# Check parameters
if not isinstance(aliases, dict):
raise TypeError("`aliases` must be dict")
# Set aliases
self = self._store_as_global_attr(name = 'aliases',
attr = aliases,
overwrite = overwrite)
# Apply aliases
self = self._apply_aliases()
return self
def _apply_aliases(self):
"""
Check if there are variables with custom name in _ds, and rename to ospy name
"""
if self._aliases_flipped:
aliases = {custom: ospy for custom, ospy in self._aliases_flipped.items()
if custom in self._ds.variables
or custom in self._ds.dims}
self._ds = self._ds.rename(aliases)
return self
# -------------------
# parameters
# -------------------
@property
def parameters(self):
"""
A dictionary defining model parameters that are used by OceanSpy.
{'parameter_name': parameter value}
If a parameter is not available, use default.
"""
from oceanspy import DEFAULT_PARAMETERS
parameters = self._read_from_global_attr('parameters')
if parameters is None:
parameters = DEFAULT_PARAMETERS
else:
parameters = {**DEFAULT_PARAMETERS, **parameters}
return parameters
@parameters.setter
def parameters(self, parameters):
"""
Inhibit setter
"""
raise AttributeError(_setter_error_message('parameters'))
def set_parameters(self, parameters):
"""
Set model parameters used by OceanSpy (see oceanspy.DEFAULT_PARAMETERS)
Parameters
----------
parameters: dict
{'parameter_name': parameter_value}
"""
from oceanspy import DEFAULT_PARAMETERS, AVAILABLE_PARAMETERS, TYPE_PARAMETERS
# Check parameters
if not isinstance(parameters, dict):
raise TypeError("`parameters` must be dict")
# Check parameters
warn_params = []
for key, value in parameters.items():
if key not in DEFAULT_PARAMETERS.keys(): warn_params = warn_params + [key]
else:
if not isinstance(value, TYPE_PARAMETERS[key]):
raise TypeError("Invalid [{}]. Check oceanspy.TYPE_PARAMETERS".format(key))
if key in AVAILABLE_PARAMETERS.keys() and value not in AVAILABLE_PARAMETERS[key]:
raise ValueError("Requested [{}] not available. Check oceanspy.AVAILABLE_PARAMETERS".format(key))
if len(warn_params)!=0:
_warnings.warn(("{} are not OceanSpy parameters").format(warn_params), stacklevel=2)
# Set parameters
self = self._store_as_global_attr(name = 'parameters',
attr = parameters,
overwrite = True)
return self
# -------------------
# grid_coords
# -------------------
@property
def grid_coords(self):
"""
Grid coordinates used by xgcm.Grid
References
----------
https://xgcm.readthedocs.io/en/stable/grids.html#Grid-Metadata
"""
grid_coords = self._read_from_global_attr('grid_coords')
return grid_coords
@grid_coords.setter
def grid_coords(self, grid_coords):
"""
Inhibit setter
"""
raise AttributeError(_setter_error_message('grid_coords'))
def set_grid_coords(self, grid_coords, add_midp=False, overwrite=None):
"""
Set grid coordinates used by xgcm.Grid (see oceanspy.OCEANSPY_AXES).
Parameters
----------
grid_coords: str
Grid coordinates used by xgcm.Grid.
Keys are axis, and values are dict with key=dim and value=c_grid_axis_shift.
Available c_grid_axis_shift are {0.5, None, -0.5}
add_midp: bool
If true, add inner dimension (mid points) to axis with outer dimension only.
The new dimension will be called as the outer dimension + '_midp'
overwrite: bool or None
If None, raise error if grid_coords has been previously set.
If True, overwrite previous grid_coors.
If False, combine with previous grid_coors.
References
----------
https://xgcm.readthedocs.io/en/stable/grids.html#Grid-Metadata
"""
# Check parameters
if not isinstance(grid_coords, dict):
raise TypeError("`grid_coords` must be dict")
if not isinstance(add_midp, (bool, type(None))):
raise TypeError("`add_midp` must be bool")
# Check axes
_check_oceanspy_axes(list(grid_coords.keys()))
# Check shifts
list_shift = [0.5, None, -0.5]
for axis in grid_coords:
if grid_coords[axis] is None: continue
elif not isinstance(grid_coords[axis], dict):
example_grid_coords = {'Y' : {'Y': None, 'Yp1': 0.5}}
raise TypeError("Invalid grid_coords. grid_coords example: {}".format(example_grid_coords))
else:
for dim in grid_coords[axis]:
if grid_coords[axis][dim] not in list_shift:
raise ValueError("[{}] not a valid c_grid_axis_shift."
" Available options are {}".format(grid_coords[axis][dim],
list_shift))
# Set grid_coords
self = self._store_as_global_attr(name = 'grid_coords',
attr = grid_coords,
overwrite = overwrite)
if add_midp:
grid_coords = {}
for axis in self.grid_coords:
if len(self.grid_coords[axis])==1 and list(self.grid_coords[axis].values())[0] is not None:
# Deal with aliases
dim = list(self.grid_coords[axis].keys())[0]
if self._aliases_flipped and dim in self._aliases_flipped:
_dim = self._aliases_flipped[dim]
self = self.set_aliases({_dim+'_midp': dim+'_midp'}, overwrite=False)
else: _dim = dim
# Midpoints are averages of outpoints
midp = (self._ds[_dim].values[:-1]+self._ds[_dim].diff(_dim)/2).rename({_dim: _dim+'_midp'})
self._ds[_dim+'_midp'] = _xr.DataArray(midp,
dims=(_dim+'_midp'))
if 'units' in self._ds[_dim].attrs:
self._ds[_dim+'_midp'].attrs['units'] = self._ds[_dim].attrs['units']
# Update grid_coords
grid_coords[axis] = {**self.grid_coords[axis], dim+'_midp': None}
self = self._store_as_global_attr(name = 'grid_coords',
attr = grid_coords,
overwrite = False)
return self
# -------------------
# grid_periodic
# -------------------
@property
def grid_periodic(self):
"""
List of xgcm.Grid axes that are periodic
"""
grid_periodic = self._read_from_global_attr('grid_periodic')
if not grid_periodic:
grid_periodic = []
return grid_periodic
@grid_periodic.setter
def grid_periodic(self, grid_periodic):
"""
Inhibit setter
"""
raise AttributeError(_setter_error_message('grid_periodic'))
def set_grid_periodic(self, grid_periodic, overwrite=None):
"""
Set grid axes that need to be treated as periodic by xgcm.Grid.
Axis that are not set periodic are non-periodic by default.
Note that this is opposite than xgcm, which sets periodic=True by default.
Parameters
----------
grid_periodic: list
List of periodic axes.
Available axis are {'X', 'Y', 'Z', 'time'}.
overwrite: bool or None
If None, raise error if grid_periodic has been previously set.
If True, overwrite previous grid_periodic.
If False, combine with previous grid_periodic.
"""
# Check parameters
if not isinstance(grid_periodic, list):
raise TypeError("`grid_periodic` must be list")
# Check axes
_check_oceanspy_axes(grid_periodic)
# Set grid_periodic
self = self._store_as_global_attr(name = 'grid_periodic',
attr = grid_periodic,
overwrite = overwrite)
return self
# -------------------
# grid
# -------------------
@property
def grid(self):
"""
xgcm.Grid: A collection of axis, which is a group of coordinates that all lie along the same physical dimension but describe different positions relative to a grid cell.
References
----------
https://xgcm.readthedocs.io/en/stable/api.html#Grid
"""
dataset = self.dataset.copy()
coords = self.grid_coords
periodic = self.grid_periodic
grid = _create_grid(dataset, coords, periodic)
return grid
@property
def _grid(self):
"""
xgcm.Grid using aliases
"""
aliases = self.aliases
coords = self.grid_coords
if aliases and coords:
# Flip aliases
aliases = {custom: ospy for ospy, custom in aliases.items()}
# Rename coords
for axis in coords:
for dim in coords[axis]:
if dim in aliases:
coords[axis][aliases[dim]] = coords[axis].pop(dim)
dataset = self._ds.copy()
periodic = self.grid_periodic
grid = _create_grid(dataset, coords, periodic)
return grid
@grid.setter
def grid(self, grid):
"""
Inhibit setter
"""
raise AttributeError("Set a new grid using .set_grid_coords and .set_periodic")
@_grid.setter
def _grid(self, grid):
"""
Inhibit setter
"""
raise AttributeError("Set a new _grid using .set_grid_coords and .set_periodic")
# -------------------
# projection
# -------------------
@property
def projection(self):
"""
Projection of the OceanDataset.
"""
projection = self._read_from_global_attr('projection')
if projection:
if projection=='None':
projection = eval(projection)
else:
if 'cartopy' not in _sys.modules:
_warnings.warn(("cartopy is not available, so projection is None").format(da.name), stacklevel=2)
projection = None
else:
projection = eval('_ccrs.{}'.format(projection))
return projection
@projection.setter
def projection(self, projection):
"""
Inhibit setter
"""
raise AttributeError(_setter_error_message('projection'))
def set_projection(self, projection, **kwargs):
"""
Projection of the OceanDataset.
Parameters
----------
projection: str
cartopy projection of the OceanDataset
**kwargs:
Keyword arguments used by cartopy
E.g., central_longitude=0.0 for PlateCarree
References
----------
https://scitools.org.uk/cartopy/docs/latest/crs/projections.html
"""
# Check parameters
if not isinstance(projection, (type(None), str)):
raise TypeError("`projection` must be str or None")
if projection is not None:
if not hasattr(_ccrs, projection):
raise TypeError("{} is not a cartopy projection".format(projection))
projection = '{}(**{})'.format(projection, kwargs)
else:
projection = str(projection)
# Set projection
self = self._store_as_global_attr(name = 'projection',
attr = projection,
overwrite = True)
return self
# ===========
# METHODS
# ===========
def create_tree(self, grid_pos = 'C'):
"""
Create a scipy.spatial.cKDTree for quick nearest-neighbor lookup.
Parameters
-----------
grid_pos: str
Grid position. Option: {'C', 'G', 'U', 'V'}
Reference grid: https://mitgcm.readthedocs.io/en/latest/algorithm/horiz-grid.html
Returns
-------
tree: scipy.spatial.cKDTree
Return tree that can be used to query a point.
References
----------
https://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.cKDTree.html
"""
if 'scipy' not in _sys.modules:
raise ImportError("cKDTree can not be created because scipy is not installed")
# Check parameters
if not isinstance(grid_pos, str):
raise TypeError('`grid_pos` must be str')
grid_pos_list = ['C', 'G', 'U', 'V']
if grid_pos not in grid_pos_list:
raise ValueError(("`grid_pos` must be on of {}:"
"\nhttps://mitgcm.readthedocs.io/en/latest/algorithm/horiz-grid.html").format(grid_pos_list))
# Convert if is not cartesian
Y = self._ds['Y'+grid_pos]
X = self._ds['X'+grid_pos]
R = self.parameters['rSphere']
if R: x, y, z = _utils.spherical2cartesian(Y = Y, X = X, R = R)
else: x = X; y = Y; z = _xr.zeros_like(Y)
# Stack
x_stack = x.stack(points=x.dims).values
y_stack = y.stack(points=y.dims).values
z_stack = z.stack(points=z.dims).values
# Construct KD-tree
tree = _spatial.cKDTree(_np.column_stack((x_stack, y_stack, z_stack)))
return tree
def merge_into_oceandataset(self, obj, overwrite=False):
"""
Merge a dataset or DataArray into the oceandataset
Parameters
----------
obj: xarray.DataArray or xarray.Dataset
xarray object to merge
overwrite: bool or None
If True, overwrite existing DataArrays with same name.
If False, use xarray.merge
"""
# Check and make dataset
if not isinstance(obj, (_xr.DataArray, _xr.Dataset)):
raise TypeError('`obj` must be xarray.DataArray or xarray.Dataset')
obj = obj.drop(obj.coords)
if isinstance(obj, _xr.DataArray):
if obj.name is None:
raise ValueError("xarray.DataArray doesn't have a name. Set it using da.rename()")
else:
obj = obj.to_dataset()
if not isinstance(overwrite, bool):
raise TypeError("`overwrite` must be bool")
# Merge
dataset = self.dataset
var2drop = [var for var in obj.variables if var in dataset]
if overwrite is False:
obj = obj.drop(var2drop)
if len(var2drop)!=0: _warnings.warn('{} will not be merged.'
'\nSet `overwrite=True` if you wish otherwise.'.format(var2drop), stacklevel=2)
else:
if len(var2drop)!=0: _warnings.warn('{} will be overwritten.'.format(var2drop), stacklevel=2)
for var in obj.data_vars:
dataset[var] = obj[var]
return OceanDataset(dataset)
def set_coords(self, fillna=False, coords1Dfrom2D=False, coords2Dfrom1D=False, coordsUVfromG=False):
"""
Set dataset coordinates: dimensions + 2D horizontal coordinates.
Parameters
----------
fillna: bool
If True, fill NaNs in 2D coordinates propagating backward and forward.
coords1Dfrom2D: bool
If True, compute 1D coordinates from 2D coordinates (means).
Use with rectilinear grid only!
coords2Dfrom1D: bool
If True, compute 2D coordinates from 1D coordinates (brodacast).
coordsUVfromCG: bool
If True, compute missing coords (U and V points) from G points.
"""
# Check parameters
if not isinstance(fillna, bool):
raise TypeError('`fillna` must be bool')
if not isinstance(coords1Dfrom2D, bool):
raise TypeError('`coords1Dfrom2D` must be bool')
if not isinstance(coordsUVfromG, bool):
raise TypeError('`coordsUVfromG` must be bool')
if coords1Dfrom2D and coords2Dfrom1D:
raise TypeError('`coords1Dfrom2D` and `coords2Dfrom1D` can not be both True')
# Copy because the dataset will change
self = _copy.copy(self)
# Coordinates are dimensions only
self._ds = self._ds.reset_coords()
# Fill nans (e.g., because of exch2)
if fillna:
coords = ['YC', 'XC', 'YG', 'XG', 'YU', 'XU', 'YV', 'XV']
dims = ['X', 'Y', 'Xp1', 'Yp1', 'Xp1', 'Y', 'X', 'Yp1']
for i, (coord, dim) in enumerate(zip(coords, dims)):
if coord in self._ds.variables:
self._ds[coord] = self._ds[coord].ffill(dim).bfill(dim).persist()
# Get U and V by rolling G
if coordsUVfromG:
for i, (point_pos, dim2roll) in enumerate(zip(['U', 'V'], ['Yp1', 'Xp1'])):
for dim in ['Y', 'X']:
coord = self._ds[dim+'G'].rolling(**{dim2roll: 2}).mean().dropna(dim2roll)
coord = coord.drop(coord.coords).rename({dim2roll: dim2roll[0]})
self._ds[dim+point_pos] = coord
if 'units' in self._ds[dim+'G'].attrs:
self._ds[dim+point_pos].attrs['units'] = self._ds[dim+'G'].attrs['units']
# For cartesian grid we can use 1D coordinates
if coords1Dfrom2D:
# Take mean
self._ds['Y'] = self._ds['YC'].mean('X', keep_attrs=True).persist()
self._ds['X'] = self._ds['XC'].mean('Y', keep_attrs=True).persist()
self._ds['Yp1'] = self._ds['YG'].mean('Xp1', keep_attrs=True).persist()
self._ds['Xp1'] = self._ds['XG'].mean('Yp1', keep_attrs=True).persist()
# Get 2D coordinates broadcasting 1D
if coords2Dfrom1D:
# Broadcast
self._ds['YC'], self._ds['XC'] = _xr.broadcast(self._ds['Y'], self._ds['X'])
self._ds['YG'], self._ds['XG'] = _xr.broadcast(self._ds['Yp1'], self._ds['Xp1'])
self._ds['YU'], self._ds['XU'] = _xr.broadcast(self._ds['Y'], self._ds['Xp1'])
self._ds['YV'], self._ds['XV'] = _xr.broadcast(self._ds['Yp1'], self._ds['X'])
# Add units
for i, (D2, D1) in enumerate(zip(['YC', 'XC', 'YG', 'XG', 'YU', 'XU', 'YV', 'XV'],
['Y', 'X', 'Yp1', 'Xp1', 'Y', 'Xp1', 'Yp1', 'X'])):
if 'units' in self._ds[D1].attrs: self._ds[D2].attrs['units'] = self._ds[D1].attrs['units']
# Set 2D coordinates
self._ds = self._ds.set_coords(['YC', 'XC',
'YG', 'XG',
'YU', 'XU',
'YV', 'XV'])
return self
def to_netcdf(self, path, **kwargs):
"""
Write dataset contents to a netCDF file.
Parameters
----------
path: str
Path to which to save this dataset.
**kwargs:
Keyword arguments for xarray.DataSet.to_netcdf()
References
----------
http://xarray.pydata.org/en/stable/generated/xarray.Dataset.to_netcdf.html
"""
# Check parameters
if not isinstance(path, str):
raise TypeError('`path` must be str')
# to_netcdf doesn't like coordinates attribute
dataset = self.dataset
for var in dataset.variables:
attrs = dataset[var].attrs
coordinates = attrs.pop('coordinates', None)
dataset[var].attrs = attrs
if coordinates is not None: dataset[var].attrs['_coordinates'] = coordinates
compute = kwargs.pop('compute', None)
print('Writing dataset to [{}].'.format(path))
if compute is None or compute is False:
delayed_obj = dataset.to_netcdf(path, compute=False, **kwargs)
with _ProgressBar():
results = delayed_obj.compute()
else:
dataset.to_netcdf(path, compute=compute, **kwargs)
def _store_as_global_attr(self, name, attr, overwrite):
"""
Store an OceanSpy attribute as dataset global attribute.
Parameters
----------
name: str
Name of the attribute. Attribute will be stored as 'OceanSpy_'+name.
attr: str or dict
Attribute to store
overwrite: bool or None
If None, raise error if attr has been previously set.
If True, overwrite previous attributes.
If False, combine with previous attributes.
"""
# Check parameters
if not isinstance(name, str):
raise TypeError("`name` must be str")
if not isinstance(attr, (str, dict, list)):
raise TypeError("`attr` must be str, dict, or list")
if not isinstance(overwrite, (bool, type(None))):
raise TypeError("`overwrite` must be bool or None")
# Attribute name
name = 'OceanSpy_'+name
if overwrite is None and name in self._ds.attrs:
raise ValueError("[{}] has been previously set: "
"`overwrite` must be bool".format(name.replace("OceanSpy_", "")))
# Copy because attributes are added to _ds
self = _copy.copy(self)
# Store
if not overwrite and name in self._ds.attrs:
prev_attr = self._ds.attrs[name]
if prev_attr[0] == "{" and prev_attr[-1] == "}":
attr = {**eval(prev_attr), **attr}
elif prev_attr[0] == "[" and prev_attr[-1] == "]":
attr = list(set(eval(prev_attr) + attr))
else:
attr = prev_attr + '_' + attr
self._ds.attrs[name] = str(attr)
return self
def _read_from_global_attr(self, name):
"""
Read an OceanSpy attribute stored as dataset global attribute.
Parameters
----------
name: str
Name of the attribute. Attribute will be read from 'OceanSpy_'+name.
Returns
-------
attr: str or dict
Attribute that has been read
"""
if not isinstance(name, str):
raise TypeError("`name` must be str")
# Attribute name
name = 'OceanSpy_'+name
# Check if attributes exists
if name not in self._ds.attrs:
return None
# Read attribute
attr = self._ds.attrs[name]
if (attr[0]=='{' and attr[-1]=='}') or (attr[0]=='[' and attr[-1]==']'):
attr = eval(attr)
return attr
# ===========
# SHORTCUTS
# ===========
@property
def subsample(self):
"""
Access subsampling functions.
Examples
--------
>>> od = ospy.open_oceandataset.get_started()
>>> od.subsample.cutout(ZRange=[0, -100], varList=['Temp'])
"""
return _subsampleMethdos(self)
@property
def compute(self):
"""
Access computing functions, and merge the computed dataset into the oceandataset.
Set overwrite=True to overwrite DataArrays already existing in the oceandataset.
Examples
--------
>>> od = ospy.open_oceandataset.get_started()
>>> od.compute.gradient(varNameList='Temp', overwrite=True)
"""
return _computeMethdos(self)
@property
def plot(self):
"""
Access plotting functions.
Examples
--------
>>> od = ospy.open_oceandataset.get_started()
>>> od.plot.TS_diagram(meanAxes=['time', 'Z'], cutout_kwargs={'ZRange': [0, -100]})
"""
return _plotMethdos(self)
@property
def animate(self):
"""
Access animating functions.
Examples
--------
>>> od = ospy.open_oceandataset.get_started()
>>> od.animate.TS_diagram(meanAxes=['time', 'Z'], cutout_kwargs={'ZRange': [0, -100]})
"""
return _animateMethdos(self)
# ERROR HANDLING
def _check_oceanspy_axes(axes2check):
"""
Check axes
"""
from oceanspy import OCEANSPY_AXES
for axis in axes2check:
if axis not in OCEANSPY_AXES:
raise ValueError(_wrong_axes_error_message(axes2check))
def _wrong_axes_error_message(axes2check):
from oceanspy import OCEANSPY_AXES
return ("{} contains non-valid axes."
" OceanSpy axes are: {}").format(axes2check, OCEANSPY_AXES)
def _setter_error_message(attribute_name):
"""
Use the same error message for attributes
"""
return "Set new `{}` using .set_{}".format(attribute_name, attribute_name)
# USEFUL FUNCTIONS
def _create_grid(dataset, coords, periodic):
# Clean up comodo (currently force user to specify axis using set_coords).
for dim in dataset.dims:
dataset[dim].attrs.pop('axis', None)
dataset[dim].attrs.pop('c_grid_axis_shift', None)
# Add comodo attributes.
# We won't need this step in the future because future versions of xgcm will allow to pass coords in Grid.
warn_dims = []
if coords:
for axis in coords:
for dim in coords[axis]:
shift = coords[axis][dim]
if dim in dataset.dims:
dataset[dim].attrs['axis'] = axis
if shift:
dataset[dim].attrs['c_grid_axis_shift'] = str(shift)
else:
warn_dims = warn_dims + [dim]
if len(warn_dims)!=0:
_warnings.warn('{} are not dimensions of the dataset and will be omitted'.format(warn_dims), stacklevel=2)
# Create grid
grid = _xgcm.Grid(dataset, periodic = periodic)
if len(grid.axes)==0:
grid = None
return grid
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.