ext
stringclasses
9 values
sha
stringlengths
40
40
content
stringlengths
3
1.04M
py
1a5b36f6a26232c461c78f6aa4d923e4dc10f47b
#!/usr/bin/env python from setuptools import setup def load_requirements(*requirements_paths): """ Load all requirements from the specified requirements files. Returns a list of requirement strings. """ requirements = set() for path in requirements_paths: with open(path) as reqs: requirements.update( line.split('#')[0].strip() for line in reqs if is_requirement(line.strip()) ) return list(requirements) def is_requirement(line): """ Return True if the requirement line is a package requirement; that is, it is not blank, a comment, a URL, or an included file. """ return line and not line.startswith(('-r', '#', '-e', 'git+', '-c')) setup( name='edx-i18n-tools', version='0.5.3', description='edX Internationalization Tools', author='edX', author_email='[email protected]', url='https://github.com/edx/i18n-tools', packages=[ 'i18n', ], install_requires=load_requirements('requirements/base.in'), entry_points={ 'console_scripts': [ 'i18n_tool = i18n.main:main', ], }, license='Apache License 2.0', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.8', 'Framework :: Django', 'Framework :: Django :: 2.2', ], )
py
1a5b38203187c0dabe6a7d026b7f193575e7f7d4
from detectors.mmdetection.mmdet.apis.inference import init_detector, inference_detector from detectors.base_detector import Base_detector from utilities.preprocessing import non_max_suppression from utilities.helper import many_xyxy2xywh import numpy as np from utilities.helper import bboxes_round_int import os import mmcv class Mmdetection_detector(Base_detector): def __init__(self,cfg): #Initialize the detector Base_detector.__init__(self, cfg) print("Mmdetection_detector init called") mmdetection_checkpoint_file = os.path.join(self.cfg.general.repository_root,self.cfg.detector.mmdetection_checkpoint_file) mmdetection_config = os.path.join(self.cfg.general.repository_root,self.cfg.detector.mmdetection_config) self.detector_model = init_detector(mmdetection_config , mmdetection_checkpoint_file , device=self.cfg.detector.device) pass def detect(self,img): # Run person detector result = inference_detector(self.detector_model, img) bboxes_with_score = result[0] # Only take those detections with a higher confidence than the min confidence bboxes_with_score = np.array( [bbox_with_s for bbox_with_s in bboxes_with_score if bbox_with_s[4] >= self.cfg.detector.min_confidence]) if len(bboxes_with_score) == 0: return (np.array([]),np.array([])) #columns 0 to 3 are bbox bboxes = bboxes_with_score[:, 0:4] bboxes = many_xyxy2xywh(bboxes) # Run non max suppression scores = bboxes_with_score[:, 4] indices = non_max_suppression( bboxes, max_bbox_overlap=0.8, scores=scores) bboxes = [bboxes[i, :] for i in indices] bboxes = bboxes_round_int(bboxes) return bboxes, scores
py
1a5b388493139561c8382011760ab6f436748a69
from __future__ import division from __future__ import unicode_literals import sys import os import zlib import base64 import xdrlib import test1_encoded # Our modules import vespa.common.constants as constants import vespa.common.util.xml_ as util_xml import vespa.common.util.fileio as util_fileio # We encode numeric lists (also numpy arrays) in a three step process. # First is XDR (http://en.wikipedia.org/wiki/External_Data_Representation), # second is zlib to save space, third is base64 to make the output of # zlib palatable to XML. NUMERIC_LIST_ENCODING = "xdr zlib base64" def read_fake_bitmap(fname='test1.bin'): bitmap = [] try: bytes_read = open(fname, "rb").read() for item in bytes_read: bitmap.append(ord(item)) except: e = "bitmap error" + sys.exc_info()[0] print "======> bjs sample.py python got here 3 - except = "+str(e) return bitmap def encode_fake_bitmap(bitmap): data = bitmap p = xdrlib.Packer() p.pack_farray(len(data), data, p.pack_int) data = p.get_buffer() data = zlib.compress(data, 9) data = base64.b64encode(data) return data def write_fake_bitmap_to_file(bitmap, fname='temp.txt'): textfile = open(fname, 'w') textfile.write(bitmap) textfile.close() def decode_fake_bitmap(bitmap): data = bitmap data = base64.b64decode(data) data = zlib.decompress(data) p = xdrlib.Unpacker(data) data = p.unpack_farray(len(data) // 4, p.unpack_int) # 4 since we expect INT32 here return data if __name__ == '__main__': bitmap0 = read_fake_bitmap() bitmap1 = encode_fake_bitmap(bitmap0) bitmap2 = decode_fake_bitmap(bitmap1) bitmap3 = decode_fake_bitmap(test1_encoded.test1) if bitmap2 == bitmap0: print 'bitmap2 == bitmap0' if bitmap3 == bitmap0: print 'bitmap3 == bitmap0' bob = 10 bob += 1
py
1a5b39668ca3f50c17aaf6f12704825e2be2a824
from django.conf import settings from django.db.backends.postgresql.creation import DatabaseCreation from psycopg2_extension.utils import init_database def _patch_execute_create_test_db(self, cursor, parameters, keepdb=False): if keepdb and self._database_exists(cursor, parameters['dbname']): return self._tmp_execute_create_test_db(cursor, parameters, keepdb) if not parameters['suffix']: self.log(f'Init test database with alias {self.connection.settings_dict["NAME"]}') origin_name = self.connection.settings_dict['NAME'] self.set_as_test_mirror({'NAME': parameters['dbname'][1:-1]}) init_database(self.connection, self.log) self.set_as_test_mirror({'NAME': origin_name}) DatabaseCreation._tmp_execute_create_test_db = DatabaseCreation._execute_create_test_db DatabaseCreation._execute_create_test_db = _patch_execute_create_test_db
py
1a5b39ce44825df9a4d6490efdb87f24d55e008e
# -*- coding: utf-8 -*- """ Created on Sun Nov 25 23:05:30 2018 @author: paulo """ # Standard imports import cv2 import numpy as np; # Read image im = cv2.imread('C:/Users/PauloRenato/Desktop/img3.jpg', cv2.IMREAD_GRAYSCALE) im = cv2.GaussianBlur(im, (3,3), 1) im = cv2.Canny(im.copy(),10, 80) #im = 255-im # Setup SimpleBlobDetector parameters. params = cv2.SimpleBlobDetector_Params() # Change thresholds params.minThreshold = im.min() params.maxThreshold = im.max() params.thresholdStep = 100 # Filter by Area. #params.filterByArea = True #params.minArea = 1500 # Filter by Circularity params.filterByCircularity = True #params.minCircularity = 0.500 params.minCircularity = 0.7 # Filter by Convexity #params.filterByConvexity = True #params.minConvexity = 0.87 # Filter by Inertia #params.filterByInertia = True #params.minInertiaRatio = 0.01 # Create a detector with the parameters detector = cv2.SimpleBlobDetector_create(params) # Detect blobs. keypoints = detector.detect(im) # Draw detected blobs as red circles. # cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS ensures # the size of the circle corresponds to the size of blob im_with_keypoints = cv2.drawKeypoints(im, keypoints, np.array([]), (0,0,255), cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS) # Show blobs cv2.imshow("Keypoints", im_with_keypoints) cv2.waitKey(0) cv2.destroyAllWindows()
py
1a5b3a6fc515f57c7dbc97ca8b5541f7455dda92
"""Support for HDMI CEC devices as media players.""" from __future__ import annotations import logging from pycec.commands import CecCommand, KeyPressCommand, KeyReleaseCommand from pycec.const import ( KEY_BACKWARD, KEY_FORWARD, KEY_MUTE_TOGGLE, KEY_PAUSE, KEY_PLAY, KEY_STOP, KEY_VOLUME_DOWN, KEY_VOLUME_UP, POWER_OFF, POWER_ON, STATUS_PLAY, STATUS_STILL, STATUS_STOP, TYPE_AUDIO, TYPE_PLAYBACK, TYPE_RECORDER, TYPE_TUNER, ) from homeassistant.components.media_player import MediaPlayerEntity from homeassistant.components.media_player.const import ( DOMAIN, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_STOP, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_STEP, ) from homeassistant.const import ( STATE_IDLE, STATE_OFF, STATE_ON, STATE_PAUSED, STATE_PLAYING, ) from . import ATTR_NEW, CecEntity _LOGGER = logging.getLogger(__name__) ENTITY_ID_FORMAT = DOMAIN + ".{}" def setup_platform(hass, config, add_entities, discovery_info=None): """Find and return HDMI devices as +switches.""" if ATTR_NEW in discovery_info: _LOGGER.debug("Setting up HDMI devices %s", discovery_info[ATTR_NEW]) entities = [] for device in discovery_info[ATTR_NEW]: hdmi_device = hass.data.get(device) entities.append(CecPlayerEntity(hdmi_device, hdmi_device.logical_address)) add_entities(entities, True) class CecPlayerEntity(CecEntity, MediaPlayerEntity): """Representation of a HDMI device as a Media player.""" def __init__(self, device, logical) -> None: """Initialize the HDMI device.""" CecEntity.__init__(self, device, logical) self.entity_id = f"{DOMAIN}.hdmi_{hex(self._logical_address)[2:]}" def send_keypress(self, key): """Send keypress to CEC adapter.""" _LOGGER.debug( "Sending keypress %s to device %s", hex(key), hex(self._logical_address) ) self._device.send_command(KeyPressCommand(key, dst=self._logical_address)) self._device.send_command(KeyReleaseCommand(dst=self._logical_address)) def send_playback(self, key): """Send playback status to CEC adapter.""" self._device.async_send_command(CecCommand(key, dst=self._logical_address)) def mute_volume(self, mute): """Mute volume.""" self.send_keypress(KEY_MUTE_TOGGLE) def media_previous_track(self): """Go to previous track.""" self.send_keypress(KEY_BACKWARD) def turn_on(self): """Turn device on.""" self._device.turn_on() self._state = STATE_ON def clear_playlist(self): """Clear players playlist.""" raise NotImplementedError() def turn_off(self): """Turn device off.""" self._device.turn_off() self._state = STATE_OFF def media_stop(self): """Stop playback.""" self.send_keypress(KEY_STOP) self._state = STATE_IDLE def play_media(self, media_type, media_id, **kwargs): """Not supported.""" raise NotImplementedError() def media_next_track(self): """Skip to next track.""" self.send_keypress(KEY_FORWARD) def media_seek(self, position): """Not supported.""" raise NotImplementedError() def set_volume_level(self, volume): """Set volume level, range 0..1.""" raise NotImplementedError() def media_pause(self): """Pause playback.""" self.send_keypress(KEY_PAUSE) self._state = STATE_PAUSED def select_source(self, source): """Not supported.""" raise NotImplementedError() def media_play(self): """Start playback.""" self.send_keypress(KEY_PLAY) self._state = STATE_PLAYING def volume_up(self): """Increase volume.""" _LOGGER.debug("%s: volume up", self._logical_address) self.send_keypress(KEY_VOLUME_UP) def volume_down(self): """Decrease volume.""" _LOGGER.debug("%s: volume down", self._logical_address) self.send_keypress(KEY_VOLUME_DOWN) @property def state(self) -> str | None: """Cache state of device.""" return self._state def update(self): """Update device status.""" device = self._device if device.power_status in [POWER_OFF, 3]: self._state = STATE_OFF elif not self.support_pause: if device.power_status in [POWER_ON, 4]: self._state = STATE_ON elif device.status == STATUS_PLAY: self._state = STATE_PLAYING elif device.status == STATUS_STOP: self._state = STATE_IDLE elif device.status == STATUS_STILL: self._state = STATE_PAUSED else: _LOGGER.warning("Unknown state: %s", device.status) @property def supported_features(self): """Flag media player features that are supported.""" if self.type_id == TYPE_RECORDER or self.type == TYPE_PLAYBACK: return ( SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PLAY_MEDIA | SUPPORT_PAUSE | SUPPORT_STOP | SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK ) if self.type == TYPE_TUNER: return ( SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_PLAY_MEDIA | SUPPORT_PAUSE | SUPPORT_STOP ) if self.type_id == TYPE_AUDIO: return ( SUPPORT_TURN_ON | SUPPORT_TURN_OFF | SUPPORT_VOLUME_STEP | SUPPORT_VOLUME_MUTE ) return SUPPORT_TURN_ON | SUPPORT_TURN_OFF
py
1a5b3b3a20556e6bc22d7758e8729e19721b47c9
# Copyright 2022 UW-IT, University of Washington # SPDX-License-Identifier: Apache-2.0 """ This module accesses the DB table object UserCourseDisplay """ import logging import traceback from django.db import IntegrityError from myuw.models import UserCourseDisplay from myuw.dao.user import get_user_model TOTAL_COURSE_COLORS = 8 logger = logging.getLogger(__name__) def set_course_display_pref(request, schedule): """ Add display elements on the sections in the given schedule """ user = get_user_model(request) existing_color_dict, colors_taken, pin_on_teaching_2nds =\ UserCourseDisplay.get_course_display(user, schedule.term.year, schedule.term.quarter) primary_color_dict = {} # record primary colors used {section_labels: color_id} for section in schedule.sections: section_label = section.section_label() if section_label in pin_on_teaching_2nds: section.pin_on_teaching = True else: section.pin_on_teaching = False if section_label in existing_color_dict: # exists in DB table existing_color_id = existing_color_dict[section_label] color_id = _validated_color(user, primary_color_dict, section, existing_color_id) _record_primary_colors(primary_color_dict, section, color_id) else: # a section with no color yet if not section.is_primary_section: primary_label = section.primary_section_label() else: primary_label = section_label if primary_label in primary_color_dict: color_id = primary_color_dict[primary_label] else: color_id, colors_taken = _get_next_color(colors_taken) _record_primary_colors(primary_color_dict, section, color_id) _save_section_color(user, section, color_id) section.color_id = _make_colorid(section, color_id) def _get_next_color(colors_taken): """ Return the next available color in the eight color list """ times = int(len(colors_taken) / TOTAL_COURSE_COLORS) if len(colors_taken) >= TOTAL_COURSE_COLORS: colors_taken = colors_taken[TOTAL_COURSE_COLORS * times:] for new_color in range(1, TOTAL_COURSE_COLORS + 1, 1): if new_color not in colors_taken: colors_taken.append(new_color) return new_color, colors_taken def _make_colorid(section, color_id): if section.is_primary_section: return color_id return "{}a".format(color_id) def _record_primary_colors(primary_color_dict, section, color_id): """ Remember the primary colors we have used for the term to be referenced by the the follow up secondary sections """ if not section.is_primary_section: label = section.primary_section_label() else: label = section.section_label() if label not in primary_color_dict: primary_color_dict[label] = color_id def _save_section_color(user, section, color_id): """ Store the color of the section in DB """ section_label = section.section_label() if not UserCourseDisplay.exists_section_display(user, section_label): try: UserCourseDisplay.objects.create(user=user, year=section.term.year, quarter=section.term.quarter, section_label=section_label, color_id=color_id) except Exception as ex: logger.warning({'user': user.uwnetid, 'at': "create ({} color_id: {}) in DB".format( section_label, color_id), 'err': ex}) if '1062, "Duplicate entry ' not in str(ex): raise def _update_color(user, section_label, color_id): UserCourseDisplay.set_color(user, section_label, color_id) def _validated_color(user, primary_color_dict, sec_section, existing_color_id): primary_section_label = sec_section.primary_section_label() primary_color_id = primary_color_dict.get(primary_section_label, None) if primary_color_id and primary_color_id != existing_color_id: _update_color(user, sec_section.section_label(), primary_color_id) return primary_color_id return existing_color_id
py
1a5b3c1cb2cc9f4ee3bcb9fcb7a21ac6bf09564c
from django.db import models from django.contrib.auth.models import User from markdownx.models import MarkdownxField from hknweb.utils import get_semester import hknweb.events.google_calendar_utils as gcal from hknweb.events.models.event_type import EventType from hknweb.events.models.google_calendar import GCalAccessLevelMapping from hknweb.events.models.constants import ACCESS_LEVELS class Event(models.Model): name = models.CharField(max_length=255) start_time = models.DateTimeField() end_time = models.DateTimeField() location = models.CharField(max_length=255) event_type = models.ForeignKey(EventType, models.CASCADE) description = MarkdownxField() rsvp_limit = models.PositiveIntegerField(null=True, blank=True) access_level = models.IntegerField( choices=ACCESS_LEVELS, default=0, ) created_by = models.ForeignKey(User, on_delete=models.CASCADE, default=None) created_at = models.DateTimeField(auto_now_add=True) google_calendar_event_id = models.CharField(max_length=255, null=True, blank=True) @property def semester(self): """A string representation of the candidate semester of this event. Assumes that there are only spring and fall semesters, separated at 07/01. Example: "Spring 2020" """ return get_semester(self.start_time) def get_absolute_url(self): return "/events/{}".format(self.id) def __repr__(self): return "Event(name={}, location={})".format(self.name, self.location) def __str__(self): return "{} - {} to {}".format(self.name, self.start_time, self.end_time) def admitted_set(self): return self.rsvp_set.order_by("created_at")[: self.rsvp_limit] def waitlist_set(self): if not self.rsvp_limit: return self.rsvp_set.none() return self.rsvp_set.order_by("created_at")[self.rsvp_limit :] def on_waitlist(self, user): if not self.rsvp_limit: return False return ( list( self.rsvp_set.order_by("created_at").values_list("user", flat=True) ).index(user.id) >= self.rsvp_limit ) def newly_off_waitlist_rsvps(self, old_admitted): """old_admitted must be a set, not a QuerySet. QuerySets are mutable views into the database.""" new_admitted = set(self.admitted_set()) return new_admitted - old_admitted def save(self, *args, **kwargs): calendar_id = GCalAccessLevelMapping.get_calendar_id(self.access_level) if self.google_calendar_event_id is None: self.google_calendar_event_id = gcal.create_event( self.name, self.location, self.description, self.start_time.isoformat(), self.end_time.isoformat(), calendar_id=calendar_id, ) else: gcal.update_event( self.google_calendar_event_id, summary=self.name, location=self.location, description=self.description, start=self.start_time.isoformat(), end=self.end_time.isoformat(), calendar_id=calendar_id, ) for r in self.rsvp_set.all(): r.save() super().save(*args, **kwargs) def delete(self, *args, **kwargs): calendar_id = GCalAccessLevelMapping.get_calendar_id(self.access_level) gcal.delete_event(self.google_calendar_event_id, calendar_id=calendar_id) super().delete(*args, **kwargs)
py
1a5b3c4a33aa4e9d743732bfa714f6b1ec892bd4
"""Support for Meteo-France raining forecast sensor.""" from meteofrance_api.helpers import ( get_warning_text_status_from_indice_color, readeable_phenomenoms_dict, ) from homeassistant.components.sensor import SensorEntity from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, ) from homeassistant.util import dt as dt_util from .const import ( ATTR_NEXT_RAIN_1_HOUR_FORECAST, ATTR_NEXT_RAIN_DT_REF, ATTRIBUTION, COORDINATOR_ALERT, COORDINATOR_FORECAST, COORDINATOR_RAIN, DOMAIN, MANUFACTURER, MODEL, SENSOR_TYPES, SENSOR_TYPES_ALERT, SENSOR_TYPES_PROBABILITY, SENSOR_TYPES_RAIN, MeteoFranceSensorEntityDescription, ) async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities ) -> None: """Set up the Meteo-France sensor platform.""" coordinator_forecast = hass.data[DOMAIN][entry.entry_id][COORDINATOR_FORECAST] coordinator_rain = hass.data[DOMAIN][entry.entry_id][COORDINATOR_RAIN] coordinator_alert = hass.data[DOMAIN][entry.entry_id][COORDINATOR_ALERT] entities = [ MeteoFranceSensor(coordinator_forecast, description) for description in SENSOR_TYPES ] # Add rain forecast entity only if location support this feature if coordinator_rain: entities.extend( [ MeteoFranceRainSensor(coordinator_rain, description) for description in SENSOR_TYPES_RAIN ] ) # Add weather alert entity only if location support this feature if coordinator_alert: entities.extend( [ MeteoFranceAlertSensor(coordinator_alert, description) for description in SENSOR_TYPES_ALERT ] ) # Add weather probability entities only if location support this feature if coordinator_forecast.data.probability_forecast: entities.extend( [ MeteoFranceSensor(coordinator_forecast, description) for description in SENSOR_TYPES_PROBABILITY ] ) async_add_entities(entities, False) class MeteoFranceSensor(CoordinatorEntity, SensorEntity): """Representation of a Meteo-France sensor.""" entity_description: MeteoFranceSensorEntityDescription def __init__( self, coordinator: DataUpdateCoordinator, description: MeteoFranceSensorEntityDescription, ) -> None: """Initialize the Meteo-France sensor.""" super().__init__(coordinator) self.entity_description = description if hasattr(coordinator.data, "position"): city_name = coordinator.data.position["name"] self._attr_name = f"{city_name} {description.name}" self._attr_unique_id = f"{coordinator.data.position['lat']},{coordinator.data.position['lon']}_{description.key}" self._attr_extra_state_attributes = {ATTR_ATTRIBUTION: ATTRIBUTION} @property def device_info(self): """Return the device info.""" return { "identifiers": {(DOMAIN, self.platform.config_entry.unique_id)}, "name": self.coordinator.name, "manufacturer": MANUFACTURER, "model": MODEL, "entry_type": "service", } @property def native_value(self): """Return the state.""" path = self.entity_description.data_path.split(":") data = getattr(self.coordinator.data, path[0]) # Specific case for probability forecast if path[0] == "probability_forecast": if len(path) == 3: # This is a fix compared to other entitty as first index is always null in API result for unknown reason value = _find_first_probability_forecast_not_null(data, path) else: value = data[0][path[1]] # General case else: if len(path) == 3: value = data[path[1]][path[2]] else: value = data[path[1]] if self.entity_description.key in ("wind_speed", "wind_gust"): # convert API wind speed from m/s to km/h value = round(value * 3.6) return value class MeteoFranceRainSensor(MeteoFranceSensor): """Representation of a Meteo-France rain sensor.""" @property def native_value(self): """Return the state.""" # search first cadran with rain next_rain = next( (cadran for cadran in self.coordinator.data.forecast if cadran["rain"] > 1), None, ) return ( dt_util.utc_from_timestamp(next_rain["dt"]).isoformat() if next_rain else None ) @property def extra_state_attributes(self): """Return the state attributes.""" reference_dt = self.coordinator.data.forecast[0]["dt"] return { ATTR_NEXT_RAIN_DT_REF: dt_util.utc_from_timestamp(reference_dt).isoformat(), ATTR_NEXT_RAIN_1_HOUR_FORECAST: { f"{int((item['dt'] - reference_dt) / 60)} min": item["desc"] for item in self.coordinator.data.forecast }, ATTR_ATTRIBUTION: ATTRIBUTION, } class MeteoFranceAlertSensor(MeteoFranceSensor): """Representation of a Meteo-France alert sensor.""" def __init__( self, coordinator: DataUpdateCoordinator, description: MeteoFranceSensorEntityDescription, ) -> None: """Initialize the Meteo-France sensor.""" super().__init__(coordinator, description) dept_code = self.coordinator.data.domain_id self._attr_name = f"{dept_code} {description.name}" self._attr_unique_id = self._attr_name @property def native_value(self): """Return the state.""" return get_warning_text_status_from_indice_color( self.coordinator.data.get_domain_max_color() ) @property def extra_state_attributes(self): """Return the state attributes.""" return { **readeable_phenomenoms_dict(self.coordinator.data.phenomenons_max_colors), ATTR_ATTRIBUTION: ATTRIBUTION, } def _find_first_probability_forecast_not_null( probability_forecast: list, path: list ) -> int: """Search the first not None value in the first forecast elements.""" for forecast in probability_forecast[0:3]: if forecast[path[1]][path[2]] is not None: return forecast[path[1]][path[2]] # Default return value if no value founded return None
py
1a5b3cc3616effeb8c2215b6de19d4817c13c59e
"""The operator module Functional Equivalents to operators In the last lecture we wrote code such as: l = [2, 3, 4] reduce(lambda a, b: a * b, l) We used a lambda expression to create a functional version of the * operator This is something that happens quite often, so the operator module was created This module is a convenience module I can always use my own functions and lambda expressions instead The operator module Arithmetic Functions """ add(a, b) mul(a, b) pow(a, b) mod(a, b) floordiv(a, b) neg(a) # and many more lt(a, b) # less than le(a, b) # less than or equal gt(a, b) # greater than ge(a, b) # greater than or equal to eq(a, b) # equal to ne(a, b) # not equal to is_(a, b) # is is_not(a,b) # is not and_(a, ) # and or_(a, b) # or not_(a, b) # not """ Sequence/mapping operators concat(s1, s2) contains(s, val) countOf(s, val) getitems(s, i) ------------------------ \ setitem(s, i, val) \ | mutable objects ---| variants that use slices delitem(s, i) --------------------------/ Item getters The item getter function returns a callable getitems(s, i) takes two parameters, and returns a value: s[i] s = [1, 2, 3] getitem(s, 1) # 2 itemgetter(i) # returns a callable which takesone parameter: a sequence object f = itemgetter(1) s = [1, 2, 3] s = 'python' f(s) # 2 f(s) # 'y' I can pass more than one index to tem getter: l = [1, 2, 3, 4, 5, 6] s = 'python' f = itemgetter(1, 3, 4) f(l) # (2, 4, 5) f(s) # ('y', 'h', 'o') Attribute Getters The attrgetter function is similarto itemgetter, but is used to retrive object attributes It also returns a callable, that takes thr object as an argument Suppose my_obj is an object with three properties: my_ob.a = 10 my_obj """
py
1a5b3fadebc12cb7510cf84792394a22c66e6207
# Generated by Django 2.2.1 on 2019-06-07 10:12 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('website', '0007_auto_20190607_1011'), ] operations = [ migrations.AlterField( model_name='verenigingen', name='verhaal', field=models.CharField(blank=True, max_length=600, null=True), ), ]
py
1a5b40670890d255d0c102b22bcfa998c697c6eb
# -*- coding: utf-8 -*- """ LaunchDarkly REST API # Overview ## Authentication All REST API resources are authenticated with either [personal or service access tokens](https://docs.launchdarkly.com/home/account-security/api-access-tokens), or session cookies. Other authentication mechanisms are not supported. You can manage personal access tokens on your [Account settings](https://app.launchdarkly.com/settings/tokens) page. LaunchDarkly also has SDK keys, mobile keys, and client-side IDs that are used by our server-side SDKs, mobile SDKs, and client-side SDKs, respectively. **These keys cannot be used to access our REST API**. These keys are environment-specific, and can only perform read-only operations (fetching feature flag settings). | Auth mechanism | Allowed resources | Use cases | | ----------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------- | -------------------------------------------------- | | [Personal access tokens](https://docs.launchdarkly.com/home/account-security/api-access-tokens) | Can be customized on a per-token basis | Building scripts, custom integrations, data export | | SDK keys | Can only access read-only SDK-specific resources and the firehose, restricted to a single environment | Server-side SDKs, Firehose API | | Mobile keys | Can only access read-only mobile SDK-specific resources, restricted to a single environment | Mobile SDKs | | Client-side ID | Single environment, only flags marked available to client-side | Client-side JavaScript | > #### Keep your access tokens and SDK keys private > > Access tokens should _never_ be exposed in untrusted contexts. Never put an access token in client-side JavaScript, or embed it in a mobile application. LaunchDarkly has special mobile keys that you can embed in mobile apps. If you accidentally expose an access token or SDK key, you can reset it from your [Account Settings](https://app.launchdarkly.com/settings#/tokens) page. > > The client-side ID is safe to embed in untrusted contexts. It's designed for use in client-side JavaScript. ### Via request header The preferred way to authenticate with the API is by adding an `Authorization` header containing your access token to your requests. The value of the `Authorization` header must be your access token. Manage personal access tokens from the [Account Settings](https://app.launchdarkly.com/settings/tokens) page. ### Via session cookie For testing purposes, you can make API calls directly from your web browser. If you're logged in to the application, the API will use your existing session to authenticate calls. If you have a [role](https://docs.launchdarkly.com/home/team/built-in-roles) other than Admin, or have a [custom role](https://docs.launchdarkly.com/home/team/custom-roles) defined, you may not have permission to perform some API calls. You will receive a `401` response code in that case. > ### Modifying the Origin header causes an error > > LaunchDarkly validates that the Origin header for any API request authenticated by a session cookie matches the expected Origin header. The expected Origin header is `https://app.launchdarkly.com`. > > If the Origin header does not match what's expected, LaunchDarkly returns an error. This error can prevent the LaunchDarkly app from working correctly. > > Any browser extension that intentionally changes the Origin header can cause this problem. For example, the `Allow-Control-Allow-Origin: *` Chrome extension changes the Origin header to `http://evil.com` and causes the app to fail. > > To prevent this error, do not modify your Origin header. > > LaunchDarkly does not require origin matching when authenticating with an access token, so this issue does not affect normal API usage. ## Representations All resources expect and return JSON response bodies. Error responses will also send a JSON body. Read [Errors](#section/Errors) for a more detailed description of the error format used by the API. In practice this means that you always get a response with a `Content-Type` header set to `application/json`. In addition, request bodies for `PUT`, `POST`, `REPORT` and `PATCH` requests must be encoded as JSON with a `Content-Type` header set to `application/json`. ### Summary and detailed representations When you fetch a list of resources, the response includes only the most important attributes of each resource. This is a _summary representation_ of the resource. When you fetch an individual resource (for example, a single feature flag), you receive a _detailed representation_ containing all of the attributes of the resource. The best way to find a detailed representation is to follow links. Every summary representation includes a link to its detailed representation. ### Links and addressability The best way to navigate the API is by following links. These are attributes in representations that link to other resources. The API always uses the same format for links: - Links to other resources within the API are encapsulated in a `_links` object. - If the resource has a corresponding link to HTML content on the site, it is stored in a special `_site` link. Each link has two attributes: an href (the URL) and a type (the content type). For example, a feature resource might return the following: ```json { \"_links\": { \"parent\": { \"href\": \"/api/features\", \"type\": \"application/json\" }, \"self\": { \"href\": \"/api/features/sort.order\", \"type\": \"application/json\" } }, \"_site\": { \"href\": \"/features/sort.order\", \"type\": \"text/html\" } } ``` From this, you can navigate to the parent collection of features by following the `parent` link, or navigate to the site page for the feature by following the `_site` link. Collections are always represented as a JSON object with an `items` attribute containing an array of representations. Like all other representations, collections have `_links` defined at the top level. Paginated collections include `first`, `last`, `next`, and `prev` links containing a URL with the respective set of elements in the collection. ## Updates Resources that accept partial updates use the `PATCH` verb, and support the [JSON Patch](https://datatracker.ietf.org/doc/html/rfc6902) format. Some resources also support the [JSON Merge Patch](https://datatracker.ietf.org/doc/html/rfc7386) format. In addition, some resources support optional comments that can be submitted with updates. Comments appear in outgoing webhooks, the audit log, and other integrations. ### Updates via JSON Patch [JSON Patch](https://datatracker.ietf.org/doc/html/rfc6902) is a way to specify the modifications to perform on a resource. For example, in this feature flag representation: ```json { \"name\": \"New recommendations engine\", \"key\": \"engine.enable\", \"description\": \"This is the description\", ... } ``` You can change the feature flag's description with the following patch document: ```json [{ \"op\": \"replace\", \"path\": \"/description\", \"value\": \"This is the new description\" }] ``` JSON Patch documents are always arrays. You can specify multiple modifications to perform in a single request. You can also test that certain preconditions are met before applying the patch: ```json [ { \"op\": \"test\", \"path\": \"/version\", \"value\": 10 }, { \"op\": \"replace\", \"path\": \"/description\", \"value\": \"The new description\" } ] ``` The above patch request tests whether the feature flag's `version` is `10`, and if so, changes the feature flag's description. Attributes that aren't editable, like a resource's `_links`, have names that start with an underscore. ### Updates via JSON Merge Patch The API also supports the [JSON Merge Patch](https://datatracker.ietf.org/doc/html/rfc7386) format, as well as the [Update feature flag](/tag/Feature-flags#operation/patchFeatureFlag) resource. JSON Merge Patch is less expressive than JSON Patch but in many cases, it is simpler to construct a merge patch document. For example, you can change a feature flag's description with the following merge patch document: ```json { \"description\": \"New flag description\" } ``` ### Updates with comments You can submit optional comments with `PATCH` changes. The [Update feature flag](/tag/Feature-flags#operation/patchFeatureFlag) resource supports comments. To submit a comment along with a JSON Patch document, use the following format: ```json { \"comment\": \"This is a comment string\", \"patch\": [{ \"op\": \"replace\", \"path\": \"/description\", \"value\": \"The new description\" }] } ``` To submit a comment along with a JSON Merge Patch document, use the following format: ```json { \"comment\": \"This is a comment string\", \"merge\": { \"description\": \"New flag description\" } } ``` ### Updates via semantic patches The API also supports the Semantic patch format. A semantic `PATCH` is a way to specify the modifications to perform on a resource as a set of executable instructions. JSON Patch uses paths and a limited set of operations to describe how to transform the current state of the resource into a new state. Semantic patch allows you to be explicit about intent using precise, custom instructions. In many cases, semantic patch instructions can also be defined independently of the current state of the resource. This can be useful when defining a change that may be applied at a future date. For example, in this feature flag configuration in environment Production: ```json { \"name\": \"Alternate sort order\", \"kind\": \"boolean\", \"key\": \"sort.order\", ... \"environments\": { \"production\": { \"on\": true, \"archived\": false, \"salt\": \"c29ydC5vcmRlcg==\", \"sel\": \"8de1085cb7354b0ab41c0e778376dfd3\", \"lastModified\": 1469131558260, \"version\": 81, \"targets\": [ { \"values\": [ \"[email protected]\" ], \"variation\": 0 }, { \"values\": [ \"1461797806429-33-861961230\", \"438580d8-02ee-418d-9eec-0085cab2bdf0\" ], \"variation\": 1 } ], \"rules\": [], \"fallthrough\": { \"variation\": 0 }, \"offVariation\": 1, \"prerequisites\": [], \"_site\": { \"href\": \"/default/production/features/sort.order\", \"type\": \"text/html\" } } } } ``` You can add a date you want a user to be removed from the feature flag's user targets. For example, “remove user 1461797806429-33-861961230 from the user target for variation 0 on the Alternate sort order flag in the production environment on Wed Jul 08 2020 at 15:27:41 pm”. This is done using the following: ```json { \"comment\": \"update expiring user targets\", \"instructions\": [ { \"kind\": \"removeExpireUserTargetDate\", \"userKey\": \"userKey\", \"variationId\": \"978d53f9-7fe3-4a63-992d-97bcb4535dc8\" }, { \"kind\": \"updateExpireUserTargetDate\", \"userKey\": \"userKey2\", \"variationId\": \"978d53f9-7fe3-4a63-992d-97bcb4535dc8\", \"value\": 1587582000000 }, { \"kind\": \"addExpireUserTargetDate\", \"userKey\": \"userKey3\", \"variationId\": \"978d53f9-7fe3-4a63-992d-97bcb4535dc8\", \"value\": 1594247266386 } ] } ``` Here is another example. In this feature flag configuration: ```json { \"name\": \"New recommendations engine\", \"key\": \"engine.enable\", \"environments\": { \"test\": { \"on\": true } } } ``` You can change the feature flag's description with the following patch document as a set of executable instructions. For example, “add user X to targets for variation Y and remove user A from targets for variation B for test flag”: ```json { \"comment\": \"\", \"instructions\": [ { \"kind\": \"removeUserTargets\", \"values\": [\"438580d8-02ee-418d-9eec-0085cab2bdf0\"], \"variationId\": \"852cb784-54ff-46b9-8c35-5498d2e4f270\" }, { \"kind\": \"addUserTargets\", \"values\": [\"438580d8-02ee-418d-9eec-0085cab2bdf0\"], \"variationId\": \"1bb18465-33b6-49aa-a3bd-eeb6650b33ad\" } ] } ``` > ### Supported semantic patch API endpoints > > - [Update feature flag](/tag/Feature-flags#operation/patchFeatureFlag) > - [Update expiring user targets on feature flag](/tag/Feature-flags#operation/patchExpiringUserTargets) > - [Update expiring user target for flags](/tag/User-settings#operation/patchExpiringFlagsForUser) > - [Update expiring user targets on segment](/tag/Segments#operation/patchExpiringUserTargetsForSegment) ## Errors The API always returns errors in a common format. Here's an example: ```json { \"code\": \"invalid_request\", \"message\": \"A feature with that key already exists\", \"id\": \"30ce6058-87da-11e4-b116-123b93f75cba\" } ``` The general class of error is indicated by the `code`. The `message` is a human-readable explanation of what went wrong. The `id` is a unique identifier. Use it when you're working with LaunchDarkly support to debug a problem with a specific API call. ### HTTP Status - Error Response Codes | Code | Definition | Desc. | Possible Solution | | ---- | ----------------- | ------------------------------------------------------------------------------------------- | ---------------------------------------------------------------- | | 400 | Bad Request | A request that fails may return this HTTP response code. | Ensure JSON syntax in request body is correct. | | 401 | Unauthorized | User doesn't have permission to an API call. | Ensure your SDK key is good. | | 403 | Forbidden | User does not have permission for operation. | Ensure that the user or access token has proper permissions set. | | 409 | Conflict | The API request could not be completed because it conflicted with a concurrent API request. | Retry your request. | | 429 | Too many requests | See [Rate limiting](/#section/Rate-limiting). | Wait and try again later. | ## CORS The LaunchDarkly API supports Cross Origin Resource Sharing (CORS) for AJAX requests from any origin. If an `Origin` header is given in a request, it will be echoed as an explicitly allowed origin. Otherwise, a wildcard is returned: `Access-Control-Allow-Origin: *`. For more information on CORS, see the [CORS W3C Recommendation](http://www.w3.org/TR/cors). Example CORS headers might look like: ```http Access-Control-Allow-Headers: Accept, Content-Type, Content-Length, Accept-Encoding, Authorization Access-Control-Allow-Methods: OPTIONS, GET, DELETE, PATCH Access-Control-Allow-Origin: * Access-Control-Max-Age: 300 ``` You can make authenticated CORS calls just as you would make same-origin calls, using either [token or session-based authentication](#section/Authentication). If you’re using session auth, you should set the `withCredentials` property for your `xhr` request to `true`. You should never expose your access tokens to untrusted users. ## Rate limiting We use several rate limiting strategies to ensure the availability of our APIs. Rate-limited calls to our APIs will return a `429` status code. Calls to our APIs will include headers indicating the current rate limit status. The specific headers returned depend on the API route being called. The limits differ based on the route, authentication mechanism, and other factors. Routes that are not rate limited may not contain any of the headers described below. > ### Rate limiting and SDKs > > LaunchDarkly SDKs are never rate limited and do not use the API endpoints defined here. LaunchDarkly uses a different set of approaches, including streaming/server-sent events and a global CDN, to ensure availability to the routes used by LaunchDarkly SDKs. > > The client-side ID is safe to embed in untrusted contexts. It's designed for use in client-side JavaScript. ### Global rate limits Authenticated requests are subject to a global limit. This is the maximum number of calls that can be made to the API per ten seconds. All personal access tokens on the account share this limit, so exceeding the limit with one access token will impact other tokens. Calls that are subject to global rate limits will return the headers below: | Header name | Description | | ------------------------------ | -------------------------------------------------------------------------------- | | `X-Ratelimit-Global-Remaining` | The maximum number of requests the account is permitted to make per ten seconds. | | `X-Ratelimit-Reset` | The time at which the current rate limit window resets in epoch milliseconds. | We do not publicly document the specific number of calls that can be made globally. This limit may change, and we encourage clients to program against the specification, relying on the two headers defined above, rather than hardcoding to the current limit. ### Route-level rate limits Some authenticated routes have custom rate limits. These also reset every ten seconds. Any access tokens hitting the same route share this limit, so exceeding the limit with one access token may impact other tokens. Calls that are subject to route-level rate limits will return the headers below: | Header name | Description | | ----------------------------- | ----------------------------------------------------------------------------------------------------- | | `X-Ratelimit-Route-Remaining` | The maximum number of requests to the current route the account is permitted to make per ten seconds. | | `X-Ratelimit-Reset` | The time at which the current rate limit window resets in epoch milliseconds. | A _route_ represents a specific URL pattern and verb. For example, the [Delete environment](/tag/Environments#operation/deleteEnvironment) endpoint is considered a single route, and each call to delete an environment counts against your route-level rate limit for that route. We do not publicly document the specific number of calls that can be made to each endpoint per ten seconds. These limits may change, and we encourage clients to program against the specification, relying on the two headers defined above, rather than hardcoding to the current limits. ### IP-based rate limiting We also employ IP-based rate limiting on some API routes. If you hit an IP-based rate limit, your API response will include a `Retry-After` header indicating how long to wait before re-trying the call. Clients must wait at least `Retry-After` seconds before making additional calls to our API, and should employ jitter and backoff strategies to avoid triggering rate limits again. ## OpenAPI (Swagger) We have a [complete OpenAPI (Swagger) specification](https://app.launchdarkly.com/api/v2/openapi.json) for our API. You can use this specification to generate client libraries to interact with our REST API in your language of choice. This specification is supported by several API-based tools such as Postman and Insomnia. In many cases, you can directly import our specification to ease use in navigating the APIs in the tooling. ## Client libraries We auto-generate multiple client libraries based on our OpenAPI specification. To learn more, visit [GitHub](https://github.com/search?q=topic%3Alaunchdarkly-api+org%3Alaunchdarkly&type=Repositories). ## Method Overriding Some firewalls and HTTP clients restrict the use of verbs other than `GET` and `POST`. In those environments, our API endpoints that use `PUT`, `PATCH`, and `DELETE` verbs will be inaccessible. To avoid this issue, our API supports the `X-HTTP-Method-Override` header, allowing clients to \"tunnel\" `PUT`, `PATCH`, and `DELETE` requests via a `POST` request. For example, if you wish to call one of our `PATCH` resources via a `POST` request, you can include `X-HTTP-Method-Override:PATCH` as a header. ## Beta resources We sometimes release new API resources in **beta** status before we release them with general availability. Resources that are in beta are still undergoing testing and development. They may change without notice, including becoming backwards incompatible. We try to promote resources into general availability as quickly as possible. This happens after sufficient testing and when we're satisfied that we no longer need to make backwards-incompatible changes. We mark beta resources with a \"Beta\" callout in our documentation, pictured below: > ### This feature is in beta > > To use this feature, pass in a header including the `LD-API-Version` key with value set to `beta`. Use this header with each call. To learn more, read [Beta resources](/#section/Beta-resources). ### Using beta resources To use a beta resource, you must include a header in the request. If you call a beta resource without this header, you'll receive a `403` response. Use this header: ``` LD-API-Version: beta ``` ## Versioning We try hard to keep our REST API backwards compatible, but we occasionally have to make backwards-incompatible changes in the process of shipping new features. These breaking changes can cause unexpected behavior if you don't prepare for them accordingly. Updates to our REST API include support for the latest features in LaunchDarkly. We also release a new version of our REST API every time we make a breaking change. We provide simultaneous support for multiple API versions so you can migrate from your current API version to a new version at your own pace. ### Setting the API version per request You can set the API version on a specific request by sending an `LD-API-Version` header, as shown in the example below: ``` LD-API-Version: 20191212 ``` The header value is the version number of the API version you'd like to request. The number for each version corresponds to the date the version was released. In the example above the version `20191212` corresponds to December 12, 2019. ### Setting the API version per access token When creating an access token, you must specify a specific version of the API to use. This ensures that integrations using this token cannot be broken by version changes. Tokens created before versioning was released have their version set to `20160426` (the version of the API that existed before versioning) so that they continue working the same way they did before versioning. If you would like to upgrade your integration to use a new API version, you can explicitly set the header described above. > ### Best practice: Set the header for every client or integration > > We recommend that you set the API version header explicitly in any client or integration you build. > > Only rely on the access token API version during manual testing. # noqa: E501 The version of the OpenAPI document: 2.0 Contact: [email protected] Generated by: https://openapi-generator.tech """ import sys import unittest import launchdarkly_api from launchdarkly_api.model.resource_access import ResourceAccess class TestResourceAccess(unittest.TestCase): """ResourceAccess unit test stubs""" def setUp(self): pass def tearDown(self): pass def testResourceAccess(self): """Test ResourceAccess""" # FIXME: construct object with mandatory attributes with example values # model = ResourceAccess() # noqa: E501 pass if __name__ == '__main__': unittest.main()
py
1a5b40b568487916bf1d26253501ab841efa047f
import errno import operator import os import shutil import site from optparse import SUPPRESS_HELP, Values from typing import Iterable, List, Optional from pip._vendor.packaging.utils import canonicalize_name from pip._internal.cache import WheelCache from pip._internal.cli import cmdoptions from pip._internal.cli.cmdoptions import make_target_python from pip._internal.cli.req_command import ( RequirementCommand, warn_if_run_as_root, with_cleanup, ) from pip._internal.cli.status_codes import ERROR, SUCCESS from pip._internal.exceptions import CommandError, InstallationError from pip._internal.locations import get_scheme from pip._internal.metadata import get_environment from pip._internal.models.format_control import FormatControl from pip._internal.operations.build.build_tracker import get_build_tracker from pip._internal.operations.check import ConflictDetails, check_install_conflicts from pip._internal.req import install_given_reqs from pip._internal.req.req_install import InstallRequirement from pip._internal.utils.compat import WINDOWS from pip._internal.utils.distutils_args import parse_distutils_args from pip._internal.utils.filesystem import test_writable_dir from pip._internal.utils.logging import getLogger from pip._internal.utils.misc import ( ensure_dir, get_pip_version, protect_pip_from_modification_on_windows, write_output, ) from pip._internal.utils.temp_dir import TempDirectory from pip._internal.utils.virtualenv import ( running_under_virtualenv, virtualenv_no_global, ) from pip._internal.wheel_builder import ( BinaryAllowedPredicate, build, should_build_for_install_command, ) logger = getLogger(__name__) def get_check_binary_allowed(format_control: FormatControl) -> BinaryAllowedPredicate: def check_binary_allowed(req: InstallRequirement) -> bool: canonical_name = canonicalize_name(req.name or "") allowed_formats = format_control.get_allowed_formats(canonical_name) return "binary" in allowed_formats return check_binary_allowed class InstallCommand(RequirementCommand): """ Install packages from: - PyPI (and other indexes) using requirement specifiers. - VCS project urls. - Local project directories. - Local or remote source archives. pip also supports installing from "requirements files", which provide an easy way to specify a whole environment to be installed. """ usage = """ %prog [options] <requirement specifier> [package-index-options] ... %prog [options] -r <requirements file> [package-index-options] ... %prog [options] [-e] <vcs project url> ... %prog [options] [-e] <local project path> ... %prog [options] <archive url/path> ...""" def add_options(self) -> None: self.cmd_opts.add_option(cmdoptions.requirements()) self.cmd_opts.add_option(cmdoptions.constraints()) self.cmd_opts.add_option(cmdoptions.no_deps()) self.cmd_opts.add_option(cmdoptions.pre()) self.cmd_opts.add_option(cmdoptions.editable()) self.cmd_opts.add_option( "-t", "--target", dest="target_dir", metavar="dir", default=None, help=( "Install packages into <dir>. " "By default this will not replace existing files/folders in " "<dir>. Use --upgrade to replace existing packages in <dir> " "with new versions." ), ) cmdoptions.add_target_python_options(self.cmd_opts) self.cmd_opts.add_option( "--user", dest="use_user_site", action="store_true", help=( "Install to the Python user install directory for your " "platform. Typically ~/.local/, or %APPDATA%\\Python on " "Windows. (See the Python documentation for site.USER_BASE " "for full details.)" ), ) self.cmd_opts.add_option( "--no-user", dest="use_user_site", action="store_false", help=SUPPRESS_HELP, ) self.cmd_opts.add_option( "--root", dest="root_path", metavar="dir", default=None, help="Install everything relative to this alternate root directory.", ) self.cmd_opts.add_option( "--prefix", dest="prefix_path", metavar="dir", default=None, help=( "Installation prefix where lib, bin and other top-level " "folders are placed" ), ) self.cmd_opts.add_option(cmdoptions.src()) self.cmd_opts.add_option( "-U", "--upgrade", dest="upgrade", action="store_true", help=( "Upgrade all specified packages to the newest available " "version. The handling of dependencies depends on the " "upgrade-strategy used." ), ) self.cmd_opts.add_option( "--upgrade-strategy", dest="upgrade_strategy", default="only-if-needed", choices=["only-if-needed", "eager"], help=( "Determines how dependency upgrading should be handled " "[default: %default]. " '"eager" - dependencies are upgraded regardless of ' "whether the currently installed version satisfies the " "requirements of the upgraded package(s). " '"only-if-needed" - are upgraded only when they do not ' "satisfy the requirements of the upgraded package(s)." ), ) self.cmd_opts.add_option( "--force-reinstall", dest="force_reinstall", action="store_true", help="Reinstall all packages even if they are already up-to-date.", ) self.cmd_opts.add_option( "-I", "--ignore-installed", dest="ignore_installed", action="store_true", help=( "Ignore the installed packages, overwriting them. " "This can break your system if the existing package " "is of a different version or was installed " "with a different package manager!" ), ) self.cmd_opts.add_option(cmdoptions.ignore_requires_python()) self.cmd_opts.add_option(cmdoptions.no_build_isolation()) self.cmd_opts.add_option(cmdoptions.use_pep517()) self.cmd_opts.add_option(cmdoptions.no_use_pep517()) self.cmd_opts.add_option(cmdoptions.install_options()) self.cmd_opts.add_option(cmdoptions.global_options()) self.cmd_opts.add_option( "--compile", action="store_true", dest="compile", default=True, help="Compile Python source files to bytecode", ) self.cmd_opts.add_option( "--no-compile", action="store_false", dest="compile", help="Do not compile Python source files to bytecode", ) self.cmd_opts.add_option( "--no-warn-script-location", action="store_false", dest="warn_script_location", default=True, help="Do not warn when installing scripts outside PATH", ) self.cmd_opts.add_option( "--no-warn-conflicts", action="store_false", dest="warn_about_conflicts", default=True, help="Do not warn about broken dependencies", ) self.cmd_opts.add_option(cmdoptions.no_binary()) self.cmd_opts.add_option(cmdoptions.only_binary()) self.cmd_opts.add_option(cmdoptions.prefer_binary()) self.cmd_opts.add_option(cmdoptions.require_hashes()) self.cmd_opts.add_option(cmdoptions.progress_bar()) self.cmd_opts.add_option(cmdoptions.warn_about_root_user()) index_opts = cmdoptions.make_option_group( cmdoptions.index_group, self.parser, ) self.parser.insert_option_group(0, index_opts) self.parser.insert_option_group(0, self.cmd_opts) @with_cleanup def run(self, options: Values, args: List[str]) -> int: if options.use_user_site and options.target_dir is not None: raise CommandError("Can not combine '--user' and '--target'") cmdoptions.check_install_build_global(options) upgrade_strategy = "to-satisfy-only" if options.upgrade: upgrade_strategy = options.upgrade_strategy cmdoptions.check_dist_restriction(options, check_target=True) install_options = options.install_options or [] logger.verbose("Using %s", get_pip_version()) options.use_user_site = decide_user_install( options.use_user_site, prefix_path=options.prefix_path, target_dir=options.target_dir, root_path=options.root_path, isolated_mode=options.isolated_mode, ) target_temp_dir: Optional[TempDirectory] = None target_temp_dir_path: Optional[str] = None if options.target_dir: options.ignore_installed = True options.target_dir = os.path.abspath(options.target_dir) if ( # fmt: off os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir) # fmt: on ): raise CommandError( "Target path exists but is not a directory, will not continue." ) # Create a target directory for using with the target option target_temp_dir = TempDirectory(kind="target") target_temp_dir_path = target_temp_dir.path self.enter_context(target_temp_dir) global_options = options.global_options or [] session = self.get_default_session(options) target_python = make_target_python(options) finder = self._build_package_finder( options=options, session=session, target_python=target_python, ignore_requires_python=options.ignore_requires_python, ) wheel_cache = WheelCache(options.cache_dir, options.format_control) build_tracker = self.enter_context(get_build_tracker()) directory = TempDirectory( delete=not options.no_clean, kind="install", globally_managed=True, ) try: reqs = self.get_requirements(args, options, finder, session) # Only when installing is it permitted to use PEP 660. # In other circumstances (pip wheel, pip download) we generate # regular (i.e. non editable) metadata and wheels. for req in reqs: req.permit_editable_wheels = True reject_location_related_install_options(reqs, options.install_options) preparer = self.make_requirement_preparer( temp_build_dir=directory, options=options, build_tracker=build_tracker, session=session, finder=finder, use_user_site=options.use_user_site, verbosity=self.verbosity, ) resolver = self.make_resolver( preparer=preparer, finder=finder, options=options, wheel_cache=wheel_cache, use_user_site=options.use_user_site, ignore_installed=options.ignore_installed, ignore_requires_python=options.ignore_requires_python, force_reinstall=options.force_reinstall, upgrade_strategy=upgrade_strategy, use_pep517=options.use_pep517, ) self.trace_basic_info(finder) requirement_set = resolver.resolve( reqs, check_supported_wheels=not options.target_dir ) try: pip_req = requirement_set.get_requirement("pip") except KeyError: modifying_pip = False else: # If we're not replacing an already installed pip, # we're not modifying it. modifying_pip = pip_req.satisfied_by is None protect_pip_from_modification_on_windows(modifying_pip=modifying_pip) check_binary_allowed = get_check_binary_allowed(finder.format_control) reqs_to_build = [ r for r in requirement_set.requirements.values() if should_build_for_install_command(r, check_binary_allowed) ] _, build_failures = build( reqs_to_build, wheel_cache=wheel_cache, verify=True, build_options=[], global_options=[], ) # If we're using PEP 517, we cannot do a legacy setup.py install # so we fail here. pep517_build_failure_names: List[str] = [ r.name for r in build_failures if r.use_pep517 # type: ignore ] if pep517_build_failure_names: raise InstallationError( "Could not build wheels for {}, which is required to " "install pyproject.toml-based projects".format( ", ".join(pep517_build_failure_names) ) ) # For now, we just warn about failures building legacy # requirements, as we'll fall through to a setup.py install for # those. for r in build_failures: if not r.use_pep517: r.legacy_install_reason = 8368 to_install = resolver.get_installation_order(requirement_set) # Check for conflicts in the package set we're installing. conflicts: Optional[ConflictDetails] = None should_warn_about_conflicts = ( not options.ignore_dependencies and options.warn_about_conflicts ) if should_warn_about_conflicts: conflicts = self._determine_conflicts(to_install) # Don't warn about script install locations if # --target or --prefix has been specified warn_script_location = options.warn_script_location if options.target_dir or options.prefix_path: warn_script_location = False installed = install_given_reqs( to_install, install_options, global_options, root=options.root_path, home=target_temp_dir_path, prefix=options.prefix_path, warn_script_location=warn_script_location, use_user_site=options.use_user_site, pycompile=options.compile, ) lib_locations = get_lib_location_guesses( user=options.use_user_site, home=target_temp_dir_path, root=options.root_path, prefix=options.prefix_path, isolated=options.isolated_mode, ) env = get_environment(lib_locations) installed.sort(key=operator.attrgetter("name")) items = [] for result in installed: item = result.name try: installed_dist = env.get_distribution(item) if installed_dist is not None: item = f"{item}-{installed_dist.version}" except Exception: pass items.append(item) if conflicts is not None: self._warn_about_conflicts( conflicts, resolver_variant=self.determine_resolver_variant(options), ) installed_desc = " ".join(items) if installed_desc: write_output( "Successfully installed %s", installed_desc, ) except OSError as error: show_traceback = self.verbosity >= 1 message = create_os_error_message( error, show_traceback, options.use_user_site, ) logger.error(message, exc_info=show_traceback) # noqa return ERROR if options.target_dir: assert target_temp_dir self._handle_target_dir( options.target_dir, target_temp_dir, options.upgrade ) if options.warn_about_root_user: warn_if_run_as_root() return SUCCESS def _handle_target_dir( self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool ) -> None: ensure_dir(target_dir) # Checking both purelib and platlib directories for installed # packages to be moved to target directory lib_dir_list = [] # Checking both purelib and platlib directories for installed # packages to be moved to target directory scheme = get_scheme("", home=target_temp_dir.path) purelib_dir = scheme.purelib platlib_dir = scheme.platlib data_dir = scheme.data if os.path.exists(purelib_dir): lib_dir_list.append(purelib_dir) if os.path.exists(platlib_dir) and platlib_dir != purelib_dir: lib_dir_list.append(platlib_dir) if os.path.exists(data_dir): lib_dir_list.append(data_dir) for lib_dir in lib_dir_list: for item in os.listdir(lib_dir): if lib_dir == data_dir: ddir = os.path.join(data_dir, item) if any(s.startswith(ddir) for s in lib_dir_list[:-1]): continue target_item_dir = os.path.join(target_dir, item) if os.path.exists(target_item_dir): if not upgrade: logger.warning( "Target directory %s already exists. Specify " "--upgrade to force replacement.", target_item_dir, ) continue if os.path.islink(target_item_dir): logger.warning( "Target directory %s already exists and is " "a link. pip will not automatically replace " "links, please remove if replacement is " "desired.", target_item_dir, ) continue if os.path.isdir(target_item_dir): shutil.rmtree(target_item_dir) else: os.remove(target_item_dir) shutil.move(os.path.join(lib_dir, item), target_item_dir) def _determine_conflicts( self, to_install: List[InstallRequirement] ) -> Optional[ConflictDetails]: try: return check_install_conflicts(to_install) except Exception: logger.exception( "Error while checking for conflicts. Please file an issue on " "pip's issue tracker: https://github.com/pypa/pip/issues/new" ) return None def _warn_about_conflicts( self, conflict_details: ConflictDetails, resolver_variant: str ) -> None: package_set, (missing, conflicting) = conflict_details if not missing and not conflicting: return parts: List[str] = [] if resolver_variant == "legacy": parts.append( "pip's legacy dependency resolver does not consider dependency " "conflicts when selecting packages. This behaviour is the " "source of the following dependency conflicts." ) else: assert resolver_variant == "2020-resolver" parts.append( "pip's dependency resolver does not currently take into account " "all the packages that are installed. This behaviour is the " "source of the following dependency conflicts." ) # NOTE: There is some duplication here, with commands/check.py for project_name in missing: version = package_set[project_name][0] for dependency in missing[project_name]: message = ( "{name} {version} requires {requirement}, " "which is not installed." ).format( name=project_name, version=version, requirement=dependency[1], ) parts.append(message) for project_name in conflicting: version = package_set[project_name][0] for dep_name, dep_version, req in conflicting[project_name]: message = ( "{name} {version} requires {requirement}, but {you} have " "{dep_name} {dep_version} which is incompatible." ).format( name=project_name, version=version, requirement=req, dep_name=dep_name, dep_version=dep_version, you=("you" if resolver_variant == "2020-resolver" else "you'll"), ) parts.append(message) logger.critical("\n".join(parts)) def get_lib_location_guesses( user: bool = False, home: Optional[str] = None, root: Optional[str] = None, isolated: bool = False, prefix: Optional[str] = None, ) -> List[str]: scheme = get_scheme( "", user=user, home=home, root=root, isolated=isolated, prefix=prefix, ) return [scheme.purelib, scheme.platlib] def site_packages_writable(root: Optional[str], isolated: bool) -> bool: return all( test_writable_dir(d) for d in set(get_lib_location_guesses(root=root, isolated=isolated)) ) def decide_user_install( use_user_site: Optional[bool], prefix_path: Optional[str] = None, target_dir: Optional[str] = None, root_path: Optional[str] = None, isolated_mode: bool = False, ) -> bool: """Determine whether to do a user install based on the input options. If use_user_site is False, no additional checks are done. If use_user_site is True, it is checked for compatibility with other options. If use_user_site is None, the default behaviour depends on the environment, which is provided by the other arguments. """ # In some cases (config from tox), use_user_site can be set to an integer # rather than a bool, which 'use_user_site is False' wouldn't catch. if (use_user_site is not None) and (not use_user_site): logger.debug("Non-user install by explicit request") return False if use_user_site: if prefix_path: raise CommandError( "Can not combine '--user' and '--prefix' as they imply " "different installation locations" ) if virtualenv_no_global(): raise InstallationError( "Can not perform a '--user' install. User site-packages " "are not visible in this virtualenv." ) logger.debug("User install by explicit request") return True # If we are here, user installs have not been explicitly requested/avoided assert use_user_site is None # user install incompatible with --prefix/--target if prefix_path or target_dir: logger.debug("Non-user install due to --prefix or --target option") return False # If user installs are not enabled, choose a non-user install if not site.ENABLE_USER_SITE: logger.debug("Non-user install because user site-packages disabled") return False # If we have permission for a non-user install, do that, # otherwise do a user install. if site_packages_writable(root=root_path, isolated=isolated_mode): logger.debug("Non-user install because site-packages writeable") return False logger.info( "Defaulting to user installation because normal site-packages " "is not writeable" ) return True def reject_location_related_install_options( requirements: List[InstallRequirement], options: Optional[List[str]] ) -> None: """If any location-changing --install-option arguments were passed for requirements or on the command-line, then show a deprecation warning. """ def format_options(option_names: Iterable[str]) -> List[str]: return ["--{}".format(name.replace("_", "-")) for name in option_names] offenders = [] for requirement in requirements: install_options = requirement.install_options location_options = parse_distutils_args(install_options) if location_options: offenders.append( "{!r} from {}".format( format_options(location_options.keys()), requirement ) ) if options: location_options = parse_distutils_args(options) if location_options: offenders.append( "{!r} from command line".format(format_options(location_options.keys())) ) if not offenders: return raise CommandError( "Location-changing options found in --install-option: {}." " This is unsupported, use pip-level options like --user," " --prefix, --root, and --target instead.".format("; ".join(offenders)) ) def create_os_error_message( error: OSError, show_traceback: bool, using_user_site: bool ) -> str: """Format an error message for an OSError It may occur anytime during the execution of the install command. """ parts = [] # Mention the error if we are not going to show a traceback parts.append("Could not install packages due to an OSError") if not show_traceback: parts.append(": ") parts.append(str(error)) else: parts.append(".") # Spilt the error indication from a helper message (if any) parts[-1] += "\n" # Suggest useful actions to the user: # (1) using user site-packages or (2) verifying the permissions if error.errno == errno.EACCES: user_option_part = "Consider using the `--user` option" permissions_part = "Check the permissions" if not running_under_virtualenv() and not using_user_site: parts.extend( [ user_option_part, " or ", permissions_part.lower(), ] ) else: parts.append(permissions_part) parts.append(".\n") # Suggest the user to enable Long Paths if path length is # more than 260 if ( WINDOWS and error.errno == errno.ENOENT and error.filename and len(error.filename) > 260 ): parts.append( "HINT: This error might have occurred since " "this system does not have Windows Long Path " "support enabled. You can find information on " "how to enable this at " "https://pip.pypa.io/warnings/enable-long-paths\n" ) return "".join(parts).strip() + "\n"
py
1a5b40bec2d8a62b0e84585009773df7cf67fed5
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Module contains the helper for CI step. It is used to find and verify correctness if beam examples/katas/tests. """ import logging from typing import List from api.v1.api_pb2 import STATUS_COMPILE_ERROR, STATUS_ERROR, STATUS_RUN_ERROR, \ STATUS_RUN_TIMEOUT, \ STATUS_VALIDATION_ERROR, STATUS_PREPARATION_ERROR from config import Config from grpc_client import GRPCClient from helper import Example, get_statuses class VerifyException(Exception): def __init__(self, error: str): super().__init__() self.msg = error def __str__(self): return self.msg class CIHelper: """ Helper for CI step. It is used to find and verify correctness if beam examples/katas/tests. """ async def verify_examples(self, examples: List[Example]): """ Verify correctness of beam examples. 1. Find all beam examples starting from directory os.getenv("BEAM_ROOT_DIR") 2. Group code of examples by their SDK. 3. Run processing for single-file examples to verify examples' code. """ single_file_examples = list(filter( lambda example: example.tag.multifile is False, examples)) await get_statuses(single_file_examples) await self._verify_examples(single_file_examples) async def _verify_examples(self, examples: List[Example]): """ Verify statuses of beam examples and the number of found default examples. Check example.status for each examples. If the status of the example is: - STATUS_VALIDATION_ERROR/STATUS_PREPARATION_ERROR /STATUS_ERROR/STATUS_RUN_TIMEOUT: log error - STATUS_COMPILE_ERROR: get logs using GetCompileOutput request and log them with error. - STATUS_RUN_ERROR: get logs using GetRunError request and log them with error. Args: examples: beam examples that should be verified """ count_of_verified = 0 client = GRPCClient() verify_status_failed = False default_examples = [] for example in examples: if example.tag.default_example: default_examples.append(example) if example.status not in Config.ERROR_STATUSES: count_of_verified += 1 continue if example.status == STATUS_VALIDATION_ERROR: logging.error("Example: %s has validation error", example.filepath) elif example.status == STATUS_PREPARATION_ERROR: logging.error("Example: %s has preparation error", example.filepath) elif example.status == STATUS_ERROR: logging.error( "Example: %s has error during setup run builder", example.filepath) elif example.status == STATUS_RUN_TIMEOUT: logging.error("Example: %s failed because of timeout", example.filepath) elif example.status == STATUS_COMPILE_ERROR: err = await client.get_compile_output(example.pipeline_id) logging.error( "Example: %s has compilation error: %s", example.filepath, err) elif example.status == STATUS_RUN_ERROR: err = await client.get_run_error(example.pipeline_id) logging.error( "Example: %s has execution error: %s", example.filepath, err) verify_status_failed = True logging.info( "Number of verified Playground examples: %s / %s", count_of_verified, len(examples)) logging.info( "Number of Playground examples with some error: %s / %s", len(examples) - count_of_verified, len(examples)) if len(default_examples) == 0: logging.error("Default example not found") raise VerifyException( "CI step failed due to finding an incorrect number " "of default examples. Default example not found") if len(default_examples) > 1: logging.error("Many default examples found") logging.error("Examples where the default_example field is true:") for example in default_examples: logging.error(example.filepath) raise VerifyException( "CI step failed due to finding an incorrect number " "of default examples. Many default examples found") if verify_status_failed: raise VerifyException("CI step failed due to errors in the examples")
py
1a5b41b689e4aa8d1b2ce5c205f818e03edf6738
from __future__ import unicode_literals from django.test import TestCase try: from unittest.mock import call, patch except ImportError: from mock import call, patch from ..forms import AggregateMetricForm, MetricCategoryForm class TestAggregateMetricForm(TestCase): def test_form(self): """Test that form has choices populated from R.metric_slugs""" # Set up a mock result for R.metric_slugs config = {'return_value.metric_slugs.return_value': ['test-slug']} with patch('redis_metrics.forms.R', **config) as mock_R: form = AggregateMetricForm() mock_R.assert_has_calls([ call(), call().metric_slugs(), ]) self.assertEqual( form.fields['metrics'].choices, [('test-slug', 'test-slug')] ) def test_cleaned_data(self): """Verify we get expected results from cleaned_data""" # Set up a mock result for R.metric_slugs config = {'return_value.metric_slugs.return_value': ['test-slug']} with patch('redis_metrics.forms.R', **config): form = AggregateMetricForm({"metrics": ["test-slug"]}) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data, {"metrics": ["test-slug"]}) class TestMetricCategoryForm(TestCase): def test_form(self): """Test that the form has choices from R.metric_slugs, and that providing a ``category`` argument sets initial values.""" # Set up a mock result for R.metric_slugs & R._category_slugs config = { 'return_value.metric_slugs.return_value': ['test-slug'], 'return_value._category_slugs.return_value': ['test-slug'] } with patch('redis_metrics.forms.R', **config) as mock_R: # No Category form = MetricCategoryForm() self.assertFalse(form.fields['metrics'].required) mock_R.assert_has_calls([ call(), call().metric_slugs(), ]) self.assertEqual( form.fields['metrics'].choices, [('test-slug', 'test-slug')] ) self.assertEqual(form.fields['metrics'].initial, None) self.assertEqual(form.fields['category_name'].initial, None) self.assertFalse(mock_R._category_slugs.called) mock_R.reset_mock() # With a Category initial = {'category_name': "Sample Category"} form = MetricCategoryForm(initial=initial) self.assertFalse(form.fields['metrics'].required) self.assertEqual(form.fields['metrics'].initial, ['test-slug']) self.assertEqual( form.fields['category_name'].initial, "Sample Category" ) r = mock_R.return_value r._category_slugs.assert_called_once_with("Sample Category") def test_cleaned_data(self): """Verify we get expected results from cleaned_data.""" # Set up a mock result for R.metric_slugs & R._category_slugs config = { 'return_value.metric_slugs.return_value': ['test-slug'], 'return_value._category_slugs.return_value': ['test-slug'] } with patch('redis_metrics.forms.R', **config): data = { 'category_name': 'Sample Data', 'metrics': ['test-slug'], } form = MetricCategoryForm(data) self.assertTrue(form.is_valid()) self.assertEqual(form.cleaned_data, data) def test_categorize_metrics(self): """Test the ``categorize_metrics`` method; This method should be called after POSTing.""" k = { 'return_value.metric_slugs.return_value': ['foo', 'bar', 'baz'], 'return_value._category_slugs.return_value': ['foo', 'bar'], } with patch('redis_metrics.forms.R', **k) as mock_R: data = {'category_name': 'Foo', 'metrics': ['foo', 'bar']} form = MetricCategoryForm(data) self.assertTrue(form.is_valid()) form.categorize_metrics() # This is what should happen in the form when POSTing mock_R.assert_has_calls([ # happens in __init__ call(), call().metric_slugs(), # happens in categorize_metrics call().reset_category('Foo', ['foo', 'bar']) ])
py
1a5b44755fb48d15434f667326509736183390f7
# # KTH Royal Institute of Technology # DD2424: Deep Learning in Data Science # Assignment 4 # # Carlo Rapisarda ([email protected]) # import numpy as np from sys import stderr from model import RNNet Theta = RNNet.Theta def eprint(*args, **kwargs): print(*args, file=stderr, **kwargs) def unpickle(filename): import pickle with open(filename, 'rb') as f: res = pickle.load(f, encoding='bytes') return res def pickle(obj, filename): import pickle as pickle_ with open(filename, 'wb') as f: pickle_.dump(obj, f) def _compute_grads_numerical(X, Y, m, K, theta, loss_fn, h): grads = Theta.zeros(m, K) grads_v = vars(grads) theta = vars(theta) for k in theta: for i in range(theta[k].size): theta[k].itemset(i, theta[k].item(i) - h) l1 = loss_fn(X, Y) theta[k].itemset(i, theta[k].item(i) + h) theta[k].itemset(i, theta[k].item(i) + h) l2 = loss_fn(X, Y) theta[k].itemset(i, theta[k].item(i) - h) grads_v[k].itemset(i, (l2 - l1) / (2.0 * h)) return grads def compute_grads_numerical(X, Y, h0, net: RNNet, step_size=1e-5): old_theta = net.theta tmp_theta = old_theta.copy() m, K = net.m, net.K net.theta = tmp_theta def loss_fn(X_, Y_): return net.cross_entropy_loss(X_, Y_, h_prev=h0) grads = _compute_grads_numerical(X, Y, m, K, tmp_theta, loss_fn, step_size) net.theta = old_theta return grads def relative_err(a,b,eps=1e-12): assert a.shape == b.shape return np.abs(a-b) / np.maximum(eps, np.abs(a)+np.abs(b)) def compare_grads(lhs: Theta, rhs: Theta, m, K): errors = Theta.zeros(m, K) errors_v = vars(errors) lhs = vars(lhs) rhs = vars(rhs) for k in lhs: errors_v[k] = relative_err(lhs[k], rhs[k]) return errors def simple_smooth_1d(x, alpha): assert len(x.shape) == 1, 'Function only works with 1D arrays' smooth_x = np.zeros(x.shape[0]) smooth_x[0] = x[0] for i in range(1, smooth_x.size): smooth_x[i] = alpha * smooth_x[i-1] + (1.0 - alpha) * x[i] return smooth_x
py
1a5b47a47439e63139c8c6afeee089168cf90984
import pathlib from setuptools import setup # The directory containing this file HERE = pathlib.Path(__file__).parent # The text of the README file README = (HERE / "README.md").read_text() # This call to setup() does all the work setup( name="my_package_chetan", version="2.0.0", description="Read the latest my_package.", long_description=README, long_description_content_type="text/markdown", url="https://github.com/chetanghadawaje/my_package.git", author="Chetan Ghadawaje", author_email="[email protected]", license="MIT", classifiers=[ "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.7", ], packages=["my_package"], include_package_data=True, install_requires=[], entry_points={ "console_scripts": [ "my_package=__main__:main", ] }, )
py
1a5b48124b155cfafdea92d47fcf5b5886d1af42
from invoke.vendor import six import fabric.connection def create_connection(host, user, identity_file): return fabric.connection.Connection(host=host, user=user, connect_kwargs={ 'key_filename': identity_file, }) def mount_volume(conn, device, mounting_point, user, group): # Catch tail of greeting output res = conn.sudo('whoami', hide=True) # Inspect volume's file system res = conn.sudo('file -s {}'.format(device), hide=True) # Ensure volume contains a file system has_file_system = res.stdout.strip() != '{}: data'.format(device) if not has_file_system: conn.sudo('mkfs -t ext4 {}'.format(device), hide=True) # Create mounting point res = conn.run('mkdir -p {}'.format(mounting_point), hide=True) # Mount volume res = conn.sudo('mount {} {}'.format(device, mounting_point), hide=True) # If file system has just been created, fix group and user of the mounting point if not has_file_system: res = conn.sudo('chown -R {}:{} {}'.format(group, user, mounting_point), hide=True) def install_python_packages(conn, virtual_env, packages): if not packages: return with conn.prefix('source activate {}'.format(virtual_env)): conn.run('pip install {}'.format(' '.join(packages)), hide=True) def install_packages(conn, packages): if not packages: return # TODO: handle locked /var/lib/dpkg/lock conn.sudo('apt install -y {}'.format(' '.join(packages))) def sync_files(conn, local_path, remote_path, is_upload, is_recursive, allow_delete=False, strict_host_keys=True): """This code was ported from https://github.com/fabric/patchwork and extended for two-way transfer. """ exclude = () ssh_opts = "" rsync_opts = '--out-format="[%t] {} %f %\'\'b"'.format('OUT' if is_upload else 'IN') if is_recursive: rsync_opts += ' -r' # Turn single-string exclude into a one-item list for consistency if isinstance(exclude, six.string_types): exclude = [exclude] # Create --exclude options from exclude list exclude_opts = ' --exclude "{}"' * len(exclude) # Double-backslash-escape exclusions = tuple([str(s).replace('"', '\\\\"') for s in exclude]) # Honor SSH key(s) key_string = "" # TODO: seems plausible we need to look in multiple places if there's too # much deferred evaluation going on in how we eg source SSH config files # and so forth, re: connect_kwargs # TODO: we could get VERY fancy here by eg generating a tempfile from any # in-memory-only keys...but that's also arguably a security risk, so... keys = conn.connect_kwargs.get("key_filename", []) # TODO: would definitely be nice for Connection/FabricConfig to expose an # always-a-list, always-up-to-date-from-all-sources attribute to save us # from having to do this sort of thing. (may want to wait for Paramiko auth # overhaul tho!) if isinstance(keys, six.string_types): keys = [keys] if keys: key_string = "-i " + " -i ".join(keys) # Get base cxn params user, host, port = conn.user, conn.host, conn.port port_string = "-p {}".format(port) # Remote shell (SSH) options rsh_string = "" # Strict host key checking disable_keys = "-o StrictHostKeyChecking=no" if not strict_host_keys and disable_keys not in ssh_opts: ssh_opts += " {}".format(disable_keys) rsh_parts = [key_string, port_string, ssh_opts] if any(rsh_parts): rsh_string = "--rsh='ssh {}'".format(" ".join(rsh_parts)) # Set up options part of string options_map = { "delete": "--delete" if allow_delete else "", "exclude": exclude_opts.format(*exclusions), "rsh": rsh_string, "extra": rsync_opts, } options = "{delete}{exclude} -pthrvz {extra} {rsh}".format(**options_map) # Create and run final command string # TODO: richer host object exposing stuff like .address_is_ipv6 or whatever if host.count(":") > 1: # Square brackets are mandatory for IPv6 rsync address, # even if port number is not specified cmd = "rsync {opt:} {local:} [{user:}@{host:}]:{remote:}" if is_upload else "rsync {opt:} [{user:}@{host:}]:{remote:} {local:}" else: cmd = "rsync {opt:} {local:} {user:}@{host:}:{remote:}" if is_upload else "rsync {opt:} {user:}@{host:}:{remote:} {local:}" cmd = cmd.format(opt=options, local=local_path, user=user, host=host, remote=remote_path) res = conn.local(cmd, hide=True) # Get transferred files transferred_files = res.stdout.strip('\n').split('\n')[1:-3] if len(transferred_files) > 0: print('\n'.join(transferred_files)) __all__ = [ 'create_connection', 'mount_volume', 'install_python_packages', 'install_packages', 'sync_files' ]
py
1a5b48eb6567d0c9614fe2ad4598ea20c210aa1d
# -*- coding: utf-8 -*- { '!langcode!': 'pt-br', '!langname!': 'Português (do Brasil)', '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" é uma expressão opcional como "campo1=\'novovalor\'". Você não pode atualizar ou apagar os resultados de um JOIN', '%s %%{row} deleted': '%s linhas apagadas', '%s %%{row} updated': '%s linhas atualizadas', '%s selected': '%s selecionado', '%Y-%m-%d': '%d-%m-%Y', '%Y-%m-%d %H:%M:%S': '%d-%m-%Y %H:%M:%S', 'About': 'About', 'Access Control': 'Access Control', 'Administrative Interface': 'Administrative Interface', 'Administrative interface': 'Interface administrativa', 'Ajax Recipes': 'Ajax Recipes', 'appadmin is disabled because insecure channel': 'Administração desativada devido ao canal inseguro', 'April': 'April', 'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?', 'August': 'August', 'Available Databases and Tables': 'Bancos de dados e tabelas disponíveis', 'Buy this book': 'Buy this book', 'cache': 'cache', 'Cache': 'Cache', 'Cache Keys': 'Cache Keys', 'Cannot be empty': 'Não pode ser vazio', 'change password': 'modificar senha', 'Check to delete': 'Marque para apagar', 'Clear CACHE?': 'Clear CACHE?', 'Clear DISK': 'Clear DISK', 'Clear RAM': 'Clear RAM', 'Client IP': 'Client IP', 'Community': 'Community', 'Components and Plugins': 'Components and Plugins', 'Controller': 'Controlador', 'Copyright': 'Copyright', 'Current request': 'Requisição atual', 'Current response': 'Resposta atual', 'Current session': 'Sessão atual', 'customize me!': 'Personalize-me!', 'data uploaded': 'dados enviados', 'Database': 'banco de dados', 'Database %s select': 'Selecionar banco de dados %s', 'db': 'bd', 'DB Model': 'Modelo BD', 'December': 'December', 'Delete:': 'Apagar:', 'Demo': 'Demo', 'Deployment Recipes': 'Deployment Recipes', 'Description': 'Description', 'design': 'design', 'DISK': 'DISK', 'Disk Cache Keys': 'Disk Cache Keys', 'Disk Cleared': 'Disk Cleared', 'Documentation': 'Documentation', "Don't know what to do?": "Don't know what to do?", 'done!': 'concluído!', 'Download': 'Download', 'E-mail': 'E-mail', 'Edit': 'Editar', 'Edit current record': 'Editar o registro atual', 'edit profile': 'editar perfil', 'Edit This App': 'Edit This App', 'Email and SMS': 'Email and SMS', 'Enter a number between %(min)g and %(max)g': 'Enter a number between %(min)g and %(max)g', 'Enter an integer between %(min)g and %(max)g': 'Enter an integer between %(min)g and %(max)g', 'Errors': 'Errors', 'export as csv file': 'exportar como um arquivo csv', 'FAQ': 'FAQ', 'February': 'February', 'First name': 'First name', 'Forms and Validators': 'Forms and Validators', 'Free Applications': 'Free Applications', 'Group ID': 'Group ID', 'Groups': 'Groups', 'Hello World': 'Olá Mundo', 'Home': 'Home', 'How did you get here?': 'How did you get here?', 'import': 'import', 'Import/Export': 'Importar/Exportar', 'Index': 'Início', 'insert new': 'inserir novo', 'insert new %s': 'inserir novo %s', 'Internal State': 'Estado Interno', 'Introduction': 'Introduction', 'Invalid email': 'Invalid email', 'Invalid Query': 'Consulta Inválida', 'invalid request': 'requisição inválida', 'Invalid username': 'Invalid username', 'January': 'January', 'July': 'July', 'June': 'June', 'Key': 'Key', 'Last name': 'Last name', 'Layout': 'Layout', 'Layout Plugins': 'Layout Plugins', 'Layouts': 'Layouts', 'Live chat': 'Live chat', 'Live Chat': 'Live Chat', 'login': 'Entrar', 'Login': 'Autentique-se', 'logout': 'Sair', 'Lost Password': 'Esqueceu sua senha?', 'lost password?': 'lost password?', 'Main Menu': 'Menu Principal', 'Manage Cache': 'Manage Cache', 'March': 'March', 'May': 'May', 'Menu Model': 'Modelo de Menu', 'My Sites': 'My Sites', 'Name': 'Name', 'New Record': 'Novo Registro', 'new record inserted': 'novo registro inserido', 'next 100 rows': 'próximas 100 linhas', 'No databases in this application': 'Sem bancos de dados nesta aplicação', 'November': 'November', 'Object or table name': 'Object or table name', 'October': 'October', 'Online examples': 'Alguns exemplos', 'Only files are accepted with extension': 'Only files are accepted with extension', 'or import from csv file': 'ou importar de um arquivo csv', 'Origin': 'Origin', 'Other Plugins': 'Other Plugins', 'Other Recipes': 'Other Recipes', 'Overview': 'Overview', 'Password': 'Password', 'Plugins': 'Plugins', 'Powered by': 'Powered by', 'Preface': 'Preface', 'previous 100 rows': '100 linhas anteriores', 'Python': 'Python', 'Query:': 'Consulta:', 'Quick Examples': 'Quick Examples', 'RAM': 'RAM', 'RAM Cache Keys': 'RAM Cache Keys', 'Ram Cleared': 'Ram Cleared', 'Recipes': 'Recipes', 'Record': 'registro', 'record does not exist': 'registro não existe', 'Record ID': 'Record ID', 'Record id': 'id do registro', 'Register': 'Registre-se', 'register': 'Registre-se', 'Registration identifier': 'Registration identifier', 'Registration key': 'Registration key', 'Reset Password key': 'Reset Password key', 'Resources': 'Resources', 'Role': 'Role', 'Rows in Table': 'Linhas na tabela', 'Rows selected': 'Linhas selecionadas', 'Semantic': 'Semantic', 'September': 'September', 'Services': 'Services', 'Size of cache:': 'Size of cache:', 'state': 'estado', 'Statistics': 'Statistics', 'Stylesheet': 'Stylesheet', 'submit': 'submit', 'Support': 'Support', 'Sure you want to delete this object?': 'Está certo(a) que deseja apagar esse objeto ?', 'Table': 'tabela', 'Table name': 'Table name', 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'Uma "consulta" é uma condição como "db.tabela1.campo1==\'valor\'". Expressões como "db.tabela1.campo1==db.tabela2.campo2" resultam em um JOIN SQL.', 'The Core': 'The Core', 'The output of the file is a dictionary that was rendered by the view %s': 'The output of the file is a dictionary that was rendered by the view %s', 'The Views': 'The Views', 'This App': 'This App', 'This email already has an account': 'This email already has an account', 'This is a copy of the scaffolding application': 'This is a copy of the scaffolding application', 'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)', 'Timestamp': 'Timestamp', 'Twitter': 'Twitter', 'unable to parse csv file': 'não foi possível analisar arquivo csv', 'Update:': 'Atualizar:', 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) para AND, (...)|(...) para OR, e ~(...) para NOT para construir consultas mais complexas.', 'User ID': 'User ID', 'User Voice': 'User Voice', 'Username': 'Username', 'Username already taken': 'Username already taken', 'Videos': 'Videos', 'View': 'Visualização', 'Web2py': 'Web2py', 'Welcome': 'Welcome', 'Welcome %s': 'Vem vindo %s', 'Welcome to web2py': 'Bem vindo ao web2py', 'Welcome to web2py!': 'Welcome to web2py!', 'Which called the function %s located in the file %s': 'Which called the function %s located in the file %s', 'with 200x300px size': 'with 200x300px size', 'with 480x280px size': 'with 480x280px size', 'You are successfully running web2py': 'You are successfully running web2py', 'You are successfully running web2py.': 'You are successfully running web2py.', 'You can modify this application and adapt it to your needs': 'You can modify this application and adapt it to your needs', 'You visited the url %s': 'You visited the url %s', }
py
1a5b49ea820cf1df255240dc1e8f150097138336
# Copyright 2018 ICON Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """base class for multi thread """ import logging import threading from abc import abstractmethod class CommonThread: """Thread 로 동작하는 클래스의 공통 요소 분리 코드 내에서 쓰레드로 동작하는 부분을 명시적으로 구분하기 쉽게 한다. """ def __init__(self): logging.debug("CommonThread init") self.__is_run = False self.__run_thread = None def is_run(self): return self.__is_run def start(self): """쓰레드를 시작한다. 상속받아서 override 하는 경우 반드시 CommonThread.start(self) 를 호출하여야 한다. """ logging.debug("CommonThread start") event = threading.Event() self.__is_run = True self.__run_thread = threading.Thread(target=self.run, args=(event, )) self.__run_thread.daemon = True self.__run_thread.start() event.wait() def stop(self): """쓰레드를 중지한다. 상속받아서 override 하는 경우 반드시 CommonThread.stop(self) 을 호출하여야 한다. """ # logging.debug("try stop thread...") self.__is_run = False def wait(self): """쓰레드 종료를 기다린다. """ logging.debug("CommonThread wait thread...") self.__run_thread.join() @abstractmethod def run(self, event: threading.Event): """ define loop for running thread. see below sample implementation. # # sample implementation # event.set() # while self.is_run(): # time.sleep(conf.SLEEP_SECONDS_IN_SERVICE_LOOP) """ pass
py
1a5b4ad141b3a94dab5a42dada0fbecb553712b2
import numpy as np import tensorflow as tf from tensorflow.keras.models import Model, Sequential from tensorflow.keras.layers import Dense, BatchNormalization, ReLU, Input, LSTM, Concatenate, Masking, Reshape, Lambda, \ Bidirectional, GRU, LayerNormalization, Bidirectional, Conv2D, Conv1D, MaxPooling2D, Flatten, LayerNormalization, Layer, Embedding, MultiHeadAttention, Dropout from tensorflow.keras.regularizers import l1, l2 import tensorflow_probability as tfp tfd = tfp.distributions tfb = tfp.bijectors tfpl = tfp.layers from lfp.custom_layers import LearnedInitLSTM, LearnedInitGRU from tensorflow.keras.layers.experimental.preprocessing import Rescaling def latent_normal(inputs): mu, scale = inputs dist = tfd.Normal(loc=mu, scale=scale) return dist def logistic_mixture(inputs, qbits=None): """ :param inputs: :param qbits: number of quantisation bits, total quantisation intervals = 2 ** qbits :return: """ weightings, mu, scale = inputs if qbits is not None: dist = tfd.Logistic(loc=mu, scale=scale) dist = tfd.QuantizedDistribution( distribution=tfd.TransformedDistribution( distribution=dist, bijector=tfb.Shift(shift=-0.5)), low=-2 ** qbits / 2., high=2 ** qbits / 2., ) else: dist = tfd.Logistic(loc=mu, scale=scale) mixture_dist = tfd.MixtureSameFamily( mixture_distribution=tfd.Categorical(logits=weightings), components_distribution=dist, validate_args=True ) if qbits is not None: action_limits = tf.constant([1.5, 1.5, 2.2, 3.2, 3.2, 3.2, 1.1]) mixture_dist = tfd.TransformedDistribution( distribution=mixture_dist, bijector=tfb.Scale(scale=action_limits / (2 ** qbits / 2.)) # scale to action limits ) return mixture_dist def create_actor(obs_dim, act_dim, goal_dim, layer_size=1024, latent_dim=256, epsilon=1e-4, num_distribs=None, qbits=None, gcbc=False, training=True, return_state=False, discrete=False, disc_embed_size=64, **kwargs): # params # batch_size = None if training else 1 stateful = not training # Input # o = Input(shape=(None, obs_dim), batch_size=batch_size, dtype=tf.float32, name='input_obs') z = Input(shape=(None, latent_dim), batch_size=batch_size, dtype=tf.float32, name='input_latent') g = Input(shape=(None, goal_dim), batch_size=batch_size, dtype=tf.float32, name='input_goals') # RNN # if discrete: embed = Dense(disc_embed_size, activation = 'relu', name='disc_to_cts_embedding')(z) x = Concatenate(axis=-1)([o, embed, g]) else: x = Concatenate(axis=-1)([o, z, g]) x = Masking(mask_value=0.)(x) if return_state: x, _, state1 = LSTM(layer_size, return_sequences=True, stateful=stateful, name='LSTM_in_1', return_state=return_state)(x) x, _, state2 = LSTM(layer_size, return_sequences=True, stateful=stateful, name='LSTM_in_2', return_state=return_state)(x) else: x = LSTM(layer_size, return_sequences=True, stateful=stateful, name='LSTM_in_1', return_state=return_state)(x) x = LSTM(layer_size, return_sequences=True, stateful=stateful, name='LSTM_in_2', return_state=return_state)(x) # Probabilistic Mixture Model # if num_distribs is not None: weightings = Dense(act_dim * num_distribs, activation=None, name='alpha')(x) mu = Dense(act_dim * num_distribs, activation=None, name='mu')(x) scale = Dense(act_dim * num_distribs, activation="softplus", name='sigma')(x + epsilon) weightings = Reshape((-1, act_dim, num_distribs))(weightings) mu = Reshape((-1, act_dim, num_distribs))(mu) scale = Reshape((-1, act_dim, num_distribs))(scale) actions = tfpl.DistributionLambda(logistic_mixture, name='logistic_mix')([weightings, mu, scale], qbits) else: actions = Dense(act_dim, activation=None, name='acts')(x) if return_state: if gcbc: return Model([o, g], [actions, state1, state2]) else: return Model([o, z, g], [actions, state1, state2]) else: if gcbc: return Model([o, g], actions) else: return Model([o, z, g], actions) def create_encoder(enc_in_dim, layer_size=2048, latent_dim=256, epsilon=1e-4, training=True, **kwargs): # Input # inputs = Input(shape=(None, enc_in_dim), dtype=tf.float32, name='encoder_in') # Layers # x = Masking(mask_value=0.)(inputs) x = Bidirectional(LSTM(layer_size, return_sequences=True), merge_mode='concat')(x) x = Bidirectional(LSTM(layer_size, return_sequences=False), merge_mode='concat')(x) # Latent Variable # mu = Dense(latent_dim, activation=None, name='mu')(x) scale = Dense(latent_dim, activation="softplus", name='sigma')(x + epsilon) mixture = tfpl.DistributionLambda(latent_normal, name='latent_variable')((mu, scale)) return Model([inputs], mixture) # def create_discrete_encoder(enc_in_dim, layer_size=2048, latent_dim=1024, **kwargs): # # Input # # inputs = Input(shape=(None, enc_in_dim), dtype=tf.float32, name='encoder_in') # # Layers # # x = Masking(mask_value=0.)(inputs) # x = Bidirectional(LSTM(layer_size, return_sequences=True), merge_mode='concat')(x) # x = Bidirectional(LSTM(layer_size, return_sequences=False), merge_mode='concat')(x) # logits = Dense(latent_dim, name='to_vocab')(x) # return Model([inputs], logits) def create_discrete_encoder(enc_in_dim, layer_size=128, latent_dim=64, reductions=3, **kwargs): # Input # inputs = Input(shape=(None, enc_in_dim), dtype=tf.float32, name='encoder_in') # Layers # x = Masking(mask_value=0.)(inputs) x = Bidirectional(LSTM(layer_size, return_sequences=True), merge_mode='concat')(x) x = Bidirectional(LSTM(layer_size, return_sequences=True), merge_mode='concat')(x) for l in range(reductions-1): print(l) x = Conv1D(layer_size, kernel_size=3, strides=2, padding="same")(x) embed = Conv1D(latent_dim, kernel_size=3, strides=2, padding="same")(x) return Model([inputs], embed) def create_planner(obs_dim, goal_dim, layer_size=2048, latent_dim=256, epsilon=1e-4, training=True, **kwargs): # params # batch_size = None # Input # o_i = Input(shape=(obs_dim,), batch_size=batch_size, dtype=tf.float32, name='initial_obs') # has arm state o_g = Input(shape=(goal_dim,), batch_size=batch_size, dtype=tf.float32, name='goal_obs') # does not have arm state # Layers # x = Concatenate(axis=-1)([o_i, o_g]) x = Masking(mask_value=0.)(x) x = Dense(layer_size, activation="relu", name='layer_1')(x) x = Dense(layer_size, activation="relu", name='layer_2')(x) x = Dense(layer_size, activation="relu", name='layer_3')(x) x = Dense(layer_size, activation="relu", name='layer_4')(x) # Latent Variable # mu = Dense(latent_dim, activation=None, name='mu')(x) scale = Dense(latent_dim, activation="softplus", name='sigma')(x + epsilon) mixture = tfpl.DistributionLambda(latent_normal, name='latent_variable')((mu, scale)) return Model([o_i, o_g], mixture) def create_discrete_planner(obs_dim, goal_dim, layer_size=2048, latent_dim=256, epsilon=1e-4, training=True, **kwargs): ''' takes in size B, N_TILES, D for start_state and goal_state LSTM then predicts which discrete plan it should be for each tile ''' # params # batch_size = None if training else 1 stateful = not training # Input # o = Input(shape=(None, obs_dim), batch_size=batch_size, dtype=tf.float32, name='input_obs') g = Input(shape=(None, goal_dim), batch_size=batch_size, dtype=tf.float32, name='input_goals') # RNN # x = Concatenate(axis=-1)([o, g]) x = LSTM(layer_size, return_sequences=True, stateful=stateful, name='LSTM_in_1', return_state=False)(x) x = LSTM(layer_size, return_sequences=True, stateful=stateful, name='LSTM_in_2', return_state=False)(x) tokens = Dense(latent_dim, name='acts')(x) return Model([o, g], tokens) # maps from sentence embedding space to goal dim space def create_goal_space_mapper(input_embedding_dim, goal_embedding_dim, layer_size=2048, **kwargs): # params # batch_size = None # Input # input_embeddings = Input(shape=(input_embedding_dim,), batch_size=batch_size, dtype=tf.float32, name='lang_embeds') # embeddings created by MUSE or equiv # Layers # x = Masking(mask_value=0.)(input_embeddings) x = Dense(layer_size, activation="relu", name='layer_1')(x) x = Dense(layer_size, activation="relu", name='layer_2')(x) goal_embeddings = Dense(goal_embedding_dim, activation=None, name='goal_space')(x) return Model(input_embeddings, goal_embeddings) # InfoVAE related def compute_kernel(x, y): x_size = tf.shape(x)[0] y_size = tf.shape(y)[0] dim = tf.shape(x)[1] tiled_x = tf.tile(tf.reshape(x, tf.stack([x_size, 1, dim])), tf.stack([1, y_size, 1])) tiled_y = tf.tile(tf.reshape(y, tf.stack([1, y_size, dim])), tf.stack([x_size, 1, 1])) return tf.exp(-tf.reduce_mean(tf.square(tiled_x - tiled_y), axis=2) / tf.cast(dim, tf.float32)) def compute_mmd(x, y): x_kernel = compute_kernel(x, x) y_kernel = compute_kernel(y, y) xy_kernel = compute_kernel(x, y) return tf.reduce_mean(x_kernel) + tf.reduce_mean(y_kernel) - 2 * tf.reduce_mean(xy_kernel) # # Standard CNN boi # def create_vision_network(img_height, img_width, embedding_size = 256): # return Sequential([ # Rescaling(1./255, input_shape=(img_height, img_width, 3)), # put it here for portability # Conv2D(32, 3, padding='same', activation='relu'), # MaxPooling2D(), # Conv2D(32, 3, padding='same', activation='relu'), # MaxPooling2D(), # Conv2D(64, 3, padding='same', activation='relu'), # MaxPooling2D(), # Conv2D(64, 3, padding='same', activation='relu'), # MaxPooling2D(), # Conv2D(128, 3, padding='same', activation='relu'), # MaxPooling2D(), # Conv2D(128, 3, padding='same', activation='relu'), # MaxPooling2D(), # Conv2D(64, 3, padding='same', activation='relu', name='features'), # Flatten(), # Dense(512, activation='relu'), # Dense(embedding_size), # ], name = 'feature_encoder') # # Has a cheeky 10M params but ok. This is the option which uses spatial softmax. # class cnn(tf.keras.Model): # # TODO: Make height width dependent # def __init__(self, img_height=128, img_width = 128, img_channels=3, embedding_size=64): # super(cnn, self).__init__() # self.img_height = img_height # self.img_width = img_width # self.img_channels = img_channels # self.rescaling = Rescaling(1./255, input_shape=(img_height, img_width, img_channels)) # put it here for portability # self.conv1 = Conv2D(32, 8, strides=(4,4), padding='same', activation='relu', name='c1') # self.conv2 = Conv2D(64, 4, strides=(2,2), padding='same', activation='relu', name='c2') # self.conv3 = Conv2D(64, 4, strides=(2,2), padding='same', activation='relu', name='c3') # self.conv4 = Conv2D(64, 3, strides=(1,1), padding='same', activation='relu', name='c4') # # In between these, do a spatial softmax # self.flatten = Flatten() # self.dense1 = Dense(512, activation='relu') # self.dense2 = Dense(embedding_size) # def call(self, inputs): # x = self.rescaling(inputs) # x = self.conv1(x) # x = self.conv2(x) # x = self.conv3(x) # pre_softmax = self.conv4(x) # # Assume features is of size [N, H, W, C] (batch_size, height, width, channels). # # Transpose it to [N, C, H, W], then reshape to [N * C, H * W] to compute softmax # # jointly over the image dimensions. # N, H, W, C = pre_softmax.shape # pre_softmax = tf.reshape(tf.transpose(pre_softmax, [0, 3, 1, 2]), [N * C, H * W]) # softmax = tf.nn.softmax(pre_softmax) # # Reshape and transpose back to original format. # softmax = tf.transpose(tf.reshape(softmax, [N, C, H, W]), [0, 2, 3, 1]) # x = self.flatten(softmax) # x = self.dense1(x) # return self.dense2(x) # Has a cheeky 10M params but ok. This is the option which uses spatial softmax. class spatial_softmax_cnn(tf.keras.Model): # TODO: Make height width dependent def __init__(self, img_height=128, img_width = 128, img_channels=3, embedding_size=64, return_spatial_softmax = False): super(spatial_softmax_cnn, self).__init__() self.img_height = img_height self.img_width = img_width self.img_channels = img_channels self.rescaling = Rescaling(1./255, input_shape=(img_height, img_width, img_channels)) # put it here for portability self.conv1 = Conv2D(32, 8, strides=(4,4), padding='same', activation='relu', name='c1') self.conv2 = Conv2D(64, 4, strides=(2,2), padding='same', activation='relu', name='c2') self.conv3 = Conv2D(64, 3, strides=(1,1), padding='same', activation='relu', name='c3') # In between these, do a spatial softmax self.flatten = Flatten() self.dense1 = Dense(512, activation='relu') self.dense2 = Dense(embedding_size) self.return_spatial_softmax = return_spatial_softmax def call(self, inputs): x = self.rescaling(inputs) x = self.conv1(x) x = self.conv2(x) pre_softmax = self.conv3(x) # pre_softmax = self.conv4(x) # Assume features is of size [N, H, W, C] (batch_size, height, width, channels). # Transpose it to [N, C, H, W], then reshape to [N * C, H * W] to compute softmax # jointly over the image dimensions. N, H, W, C = pre_softmax.shape pre_softmax = tf.reshape(tf.transpose(pre_softmax, [0, 3, 1, 2]), [N * C, H * W]) softmax = tf.nn.softmax(pre_softmax) # Reshape and transpose back to original format. softmax = tf.transpose(tf.reshape(softmax, [N, C, H, W]), [0, 2, 3, 1]) # N, H, W, C # Expand dims by 1 softmax = tf.expand_dims(softmax, -1) x, y = tf.range(0, W)/W, tf.range(0, H)/H # so that feature locations are on a 0-1 scale not 0-128 X,Y = tf.meshgrid(x,y) # Image coords is a tensor of size [H,W,2] representing the image coordinates of each pixel image_coords = tf.cast(tf.stack([X,Y],-1), tf.float32) image_coords= tf.expand_dims(image_coords, 2) # multiply to get feature locations spatial_soft_argmax = tf.reduce_sum(softmax * image_coords, axis=[1,2]) x = self.flatten(spatial_soft_argmax) x = self.dense1(x) return self.dense2(x), spatial_soft_argmax class intensities_spatial_softmax_cnn(tf.keras.Model): # TODO: Make height width dependent def __init__(self, img_height=128, img_width = 128, img_channels=3, embedding_size=64, return_spatial_softmax = False): super(intensities_spatial_softmax_cnn, self).__init__() self.img_height = img_height self.img_width = img_width self.img_channels = img_channels self.rescaling = Rescaling(1./255, input_shape=(img_height, img_width, img_channels)) # put it here for portability self.conv1 = Conv2D(32, 8, strides=(4,4), padding='same', activation='relu', name='c1') self.conv2 = Conv2D(64, 4, strides=(2,2), padding='same', activation='relu', name='c2') self.conv3 = Conv2D(64, 3, strides=(1,1), padding='same', activation='relu', name='c3') # In between these, do a spatial softmax self.flatten = Flatten() self.dense1 = Dense(512, activation='relu') self.dense2 = Dense(embedding_size) self.return_spatial_softmax = return_spatial_softmax def call(self, inputs): x = self.rescaling(inputs) x = self.conv1(x) x = self.conv2(x) pre_softmax = self.conv3(x) # pre_softmax = self.conv4(x) # Assume features is of size [N, H, W, C] (batch_size, height, width, channels). # Transpose it to [N, C, H, W], then reshape to [N * C, H * W] to compute softmax # jointly over the image dimensions. N, H, W, C = pre_softmax.shape pre_softmax = tf.reshape(tf.transpose(pre_softmax, [0, 3, 1, 2]), [N * C, H * W]) softmax = tf.nn.softmax(pre_softmax) # Reshape and transpose back to original format. softmax = tf.transpose(tf.reshape(softmax, [N, C, H, W]), [0, 2, 3, 1]) # N, H, W, C # Expand dims by 1 softmax = tf.expand_dims(softmax, -1) x, y = tf.range(0, W)/W, tf.range(0, H)/H # so that feature locations are on a 0-1 scale not 0-128 X,Y = tf.meshgrid(x,y) # Image coords is a tensor of size [H,W,2] representing the image coordinates of each pixel image_coords = tf.cast(tf.stack([X,Y],-1), tf.float32) image_coords= tf.expand_dims(image_coords, 2) # multiply to get feature locations spatial_soft_argmax = tf.reduce_sum(softmax * image_coords, axis=[1,2]) # Get indices corresponding to each batch_indices =tf.reshape(tf.repeat(tf.range(0,N,1)[tf.newaxis,:], C), [N,C])[:,:,tf.newaxis] # 0,0,0, 1,1,1, etc as batch indices keypoint_indices = tf.tile(tf.range(0,C,1)[tf.newaxis, :], [N,1])[:, :, tf.newaxis] # numbers 1,2,3... 1,2,3... keypoints, batches appropriately assert W == H # this next step is currently only coded for squares keypoint_img_indices = tf.reverse(tf.cast(spatial_soft_argmax * W, tf.int32), [-1]) # gather nd has opposite axes to images, x is y, y is x gather_indices = tf.concat([batch_indices, keypoint_img_indices, keypoint_indices], axis = -1) feature_intensities = tf.gather_nd(softmax, gather_indices) # N, C, 1 keypoints_with_intensities = tf.concat([feature_intensities, spatial_soft_argmax], -1) x = self.flatten(keypoints_with_intensities) x = self.dense1(x) return self.dense2(x), keypoints_with_intensities class impala_cnn(tf.keras.Model): def __init__(self, img_height=128, img_width = 128, img_channels=3, embedding_size=64, return_spatial_softmax = False, l1=16, l2=32, l3=32): super(impala_cnn, self).__init__() self.img_height = img_height self.img_width = img_width self.img_channels = img_channels self.rescaling = Rescaling(1./255, input_shape=(img_height, img_width, img_channels)) # put it here for portability self.conv_1 = Conv2D(l1, 3, strides=(2,2), padding='same', activation='relu', name='c1') self.res_1_1 = Conv2D(l1, 3, strides=(1,1), padding='same', activation='relu', name='r1_1') self.res_1_2 = Conv2D(l1, 3, strides=(1,1), padding='same', activation='relu', name='r1_2') self.conv_2 = Conv2D(l2, 3, strides=(2,2), padding='same', activation='relu', name='c2') self.res_2_1 = Conv2D(l2, 3, strides=(1,1), padding='same', activation='relu', name='r2_1') self.res_2_2 = Conv2D(l2, 3, strides=(1,1), padding='same', activation='relu', name='r2_2') self.conv_3 = Conv2D(l3, 3, strides=(2,2), padding='same', activation='relu', name='c3') self.res_3_1 = Conv2D(l3, 3, strides=(1,1), padding='same', activation='relu', name='r3_1') self.res_3_2 = Conv2D(l3, 3, strides=(1,1), padding='same', activation='relu', name='r3_2') # In between these, do a spatial softmax self.flatten = Flatten() self.dense1 = Dense(256, activation='relu') self.dense2 = Dense(embedding_size) self.return_spatial_softmax = return_spatial_softmax def call(self, inputs): x = self.rescaling(inputs) x = self.conv_1(x) r1 = self.res_1_1(x) x = self.res_1_2(r1) + x x = self.conv_2(x) r1 = self.res_2_1(x) x = self.res_2_2(r1) + x x = self.conv_3(x) r1 = self.res_3_1(x) x = self.res_3_2(r1) + x x = self.flatten(x) x = self.dense1(x) x = self.dense2(x) return x, 0 # for compat with spatial softmax returns class deep_impala_cnn(impala_cnn): def __init__(self, img_height=128, img_width = 128, img_channels=3, embedding_size=64, return_spatial_softmax = False): super(deep_impala_cnn, self).__init__(img_height, img_width, img_channels, embedding_size, return_spatial_softmax, l1=64, l2=128, l3=128) CNN_DICT= {'spatial_softmax': spatial_softmax_cnn, 'intensities_spatial_softmax': intensities_spatial_softmax_cnn, 'impala': impala_cnn, 'deep_impala': deep_impala_cnn} ############################################################################### def causal_attention_mask(batch_size, n_dest, n_src, dtype): """ Mask the upper half of the dot product matrix in self attention. This prevents flow of information from future tokens to current token. 1's in the lower triangle, counting from the lower right corner. """ i = tf.range(n_dest)[:, None] j = tf.range(n_src) m = i >= j - n_src + n_dest mask = tf.cast(m, dtype) mask = tf.reshape(mask, [1, n_dest, n_src]) mult = tf.concat( [tf.expand_dims(batch_size, -1), tf.constant([1, 1], dtype=tf.int32)], 0 ) return tf.tile(mask, mult) class TransformerBlock(Layer): def __init__(self, embed_dim, num_heads, ff_dim, rate=0.1): super(TransformerBlock, self).__init__() self.att = MultiHeadAttention(num_heads, embed_dim) self.ffn = Sequential( [Dense(ff_dim, activation="relu"), Dense(embed_dim),] ) self.layernorm1 = LayerNormalization(epsilon=1e-6) self.layernorm2 = LayerNormalization(epsilon=1e-6) self.dropout1 = Dropout(rate) self.dropout2 = Dropout(rate) def call(self, inputs): input_shape = tf.shape(inputs) batch_size = input_shape[0] seq_len = input_shape[1] causal_mask = causal_attention_mask(batch_size, seq_len, seq_len, tf.bool) attention_output = self.att(inputs, inputs, attention_mask=causal_mask) attention_output = self.dropout1(attention_output) out1 = self.layernorm1(inputs + attention_output) ffn_output = self.ffn(out1) ffn_output = self.dropout2(ffn_output) return self.layernorm2(out1 + ffn_output) class PositionEmbedding(Layer): def __init__(self, maxlen, embed_dim): super(PositionEmbedding, self).__init__() self.pos_emb = Embedding(input_dim=maxlen, output_dim=embed_dim) def call(self, x): maxlen = tf.shape(x)[-2] positions = tf.range(start=0, limit=maxlen, delta=1) positions = self.pos_emb(positions) return x + positions # def create_conditional_transformer(vocab_size, max_len, embed_dim, num_heads, feed_forward_dim=256, num_layers=1): # goal = Input(shape=(1, goal_dim,), dtype=tf.float32) # so that we can concat easily # seq = Input(shape=(max_len,), dtype=tf.int32) # goal_embed = Dense(embed_dim, activation='relu', name='goal_embed')(goal) # convert the goal to the same embedding dim as the seq # token_embeddings = Embedding(input_dim=vocab_size, output_dim=embed_dim)(seq) # embed the seq # x = Concatenate(axis=-2)([goal_embed, token_embeddings]) # # # embedding_layer = PositionEmbedding(max_len+1, embed_dim) # x = embedding_layer(x) # for i in range(num_layers): # x = TransformerBlock(embed_dim, num_heads, feed_forward_dim)(x) # outputs = Dense(vocab_size)(x) # model = Model(inputs=[goal, seq], outputs={'logits': outputs, 'x':x}) # return model class conditional_transformer(Model): # TODO: Make height width dependent def __init__(self, vocab_size, max_len,embed_dim, num_heads, feed_forward_dim=256, num_layers=1): super(conditional_transformer, self).__init__() self.goal_embed = Dense(embed_dim, activation='relu', name='goal_embed') self.state_embed = Dense(embed_dim, activation='relu', name='state_embed') self.token_embeddings = Embedding(input_dim=vocab_size, output_dim=embed_dim) self.embedding_layer = PositionEmbedding(max_len+1, embed_dim) self.tformer_layers = [TransformerBlock(embed_dim, num_heads, feed_forward_dim) for i in range(num_layers)] self.outputs = Dense(vocab_size) def expand(self, input): if len(input.shape) == 2: return input[:, tf.newaxis, :] # insert a time dim elif len(input.shape) == 1: return input[tf.newaxis, tf.newaxis, :] def call(self, inputs): current_state, goal, seq = inputs # seq should be 1, T (indices) current_state = self.expand(current_state) goal = self.expand(goal) state_embed = self.state_embed(current_state) goal_embed = self.goal_embed(goal) if seq is not None: seq_embed = self.token_embeddings(seq) x = Concatenate(axis=-2)([goal_embed, state_embed, seq_embed]) else: x = Concatenate(axis=-2)([goal_embed, state_embed]) x = self.embedding_layer(x) for l in self.tformer_layers: x = l(x) logits = self.outputs(x) return {'logits': logits, 'x': x}
py
1a5b4e692fca3ad1503b94350e0ceb3e0b11d1d9
import pytest from icecreamrating_v2.users.models import User pytestmark = pytest.mark.django_db def test_user_get_absolute_url(user: User): assert user.get_absolute_url() == f"/users/{user.username}/"
py
1a5b4e83516d404051c93492f8e74f9d5c16524b
import tests.experiments.blueprints.gaussian_data_gaussian_sm_prim_kd as blueprint from active_learning_ts.experiments.experiment_runner import ExperimentRunner def test_gaussian_data_gaussian_sm_prim_kd(): """ Smoke test, of the entire framework. There is currently no way to write such a test """ er = ExperimentRunner([blueprint.GaussianDataGaussianSMPrim]) er.run()
py
1a5b51f7b1e59f254966ab4a35fda9b03fa593af
# -*- coding: utf-8 -*- """ Copyright (c) 2018 Keijack Wu Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import json import socket import os import re import ssl as _ssl import threading import time import asyncio from collections import OrderedDict from socketserver import ThreadingMixIn, TCPServer from types import coroutine from urllib.parse import unquote from urllib.parse import quote from typing import Any, Awaitable, Callable, Coroutine, Dict, List, Tuple from simple_http_server import ControllerFunction, StaticFile from .base_request_handler import BaseHTTPRequestHandler from .wsgi_request_handler import WSGIRequestHandler from .__utils import remove_url_first_slash, get_function_args, get_function_kwargs from .logger import get_logger _logger = get_logger("simple_http_server.http_server") class RoutingConf: HTTP_METHODS = ["OPTIONS", "GET", "HEAD", "POST", "PUT", "DELETE", "TRACE", "CONNECT"] def __init__(self, res_conf={}): self.method_url_mapping: Dict[str, Dict[str, ControllerFunction]] = {"_": {}} self.path_val_url_mapping: Dict[str, Dict[str, ControllerFunction]] = {"_": OrderedDict()} self.method_regexp_mapping: Dict[str, Dict[str, ControllerFunction]] = {"_": OrderedDict()} for mth in self.HTTP_METHODS: self.method_url_mapping[mth] = {} self.path_val_url_mapping[mth] = OrderedDict() self.method_regexp_mapping[mth] = OrderedDict() self.filter_mapping = OrderedDict() self._res_conf = [] self.add_res_conf(res_conf) self.ws_mapping = OrderedDict() self.ws_path_val_mapping = OrderedDict() self.error_page_mapping = {} @property def res_conf(self): return self._res_conf @res_conf.setter def res_conf(self, val: Dict[str, str]): self._res_conf.clear() self.add_res_conf(val) def add_res_conf(self, val: Dict[str, str]): if not val or not isinstance(val, dict): return for res_k, v in val.items(): if res_k.startswith("/"): k = res_k[1:] else: k = res_k if k.endswith("/*"): key = k[0:-1] elif k.endswith("/**"): key = k[0:-2] elif k.endswith("/"): key = k else: key = k + "/" if v.endswith(os.path.sep): val = v else: val = v + os.path.sep self._res_conf.append((key, val)) self._res_conf.sort(key=lambda it: -len(it[0])) def __get_path_reg_pattern(self, url): _url: str = url path_names = re.findall("(?u)\\{\\w+\\}", _url) if len(path_names) == 0: # normal url return None, path_names for name in path_names: _url = _url.replace(name, "([\\w%.-@!\\(\\)\\[\\]\\|\\$]+)") _url = f"^{_url}$" quoted_names = [] for name in path_names: name = name[1: -1] quoted_names.append(quote(name)) return _url, quoted_names def map_controller(self, ctrl: ControllerFunction): url = ctrl.url regexp = ctrl.regexp method = ctrl.method _logger.debug(f"map url {url}|{regexp} with method[{method}] to function {ctrl.func}. ") assert method is None or method == "" or method.upper() in self.HTTP_METHODS _method = method.upper() if method is not None and method != "" else "_" if regexp: self.method_regexp_mapping[_method][regexp] = ctrl else: _url = remove_url_first_slash(url) path_pattern, path_names = self.__get_path_reg_pattern(_url) if path_pattern is None: self.method_url_mapping[_method][_url] = ctrl else: self.path_val_url_mapping[_method][path_pattern] = (ctrl, path_names) def _res_(self, path, res_pre, res_dir): fpath = os.path.join(res_dir, path.replace(res_pre, "")) _logger.debug(f"static file. {path} :: {fpath}") fext = os.path.splitext(fpath)[1] ext = fext.lower() if ext in (".html", ".htm", ".xhtml"): content_type = "text/html" elif ext == ".xml": content_type = "text/xml" elif ext == ".css": content_type = "text/css" elif ext in (".jpg", ".jpeg"): content_type = "image/jpeg" elif ext == ".png": content_type = "image/png" elif ext == ".webp": content_type = "image/webp" elif ext == ".js": content_type = "text/javascript" elif ext == ".pdf": content_type = "application/pdf" elif ext == ".mp4": content_type = "video/mp4" elif ext == ".mp3": content_type = "audio/mp3" else: content_type = "application/octet-stream" return StaticFile(fpath, content_type) def get_url_controller(self, path="", method="") -> Tuple[ControllerFunction, Dict, List]: # explicitly url matching if path in self.method_url_mapping[method]: return self.method_url_mapping[method][path], {}, () elif path in self.method_url_mapping["_"]: return self.method_url_mapping["_"][path], {}, () # url with path value matching fun_and_val = self.__try_get_from_path_val(path, method) if fun_and_val is None: fun_and_val = self.__try_get_from_path_val(path, "_") if fun_and_val is not None: return fun_and_val[0], fun_and_val[1], () # regexp func_and_groups = self.__try_get_from_regexp(path, method) if func_and_groups is None: func_and_groups = self.__try_get_from_regexp(path, "_") if func_and_groups is not None: return func_and_groups[0], {}, func_and_groups[1] # static files for k, v in self.res_conf: if path.startswith(k): def static_fun(): return self._res_(path, k, v) return ControllerFunction(func=static_fun), {}, () return None, {}, () def __try_get_from_regexp(self, path, method): for regex, ctrl in self.method_regexp_mapping[method].items(): m = re.match(regex, path) _logger.debug(f"regexp::pattern::[{regex}] => path::[{path}] match? {m is not None}") if m: return ctrl, tuple([unquote(v) for v in m.groups()]) return None def __try_get_from_path_val(self, path, method): for patterns, val in self.path_val_url_mapping[method].items(): m = re.match(patterns, path) _logger.debug(f"url with path value::pattern::[{patterns}] => path::[{path}] match? {m is not None}") if m: fun, path_names = val path_values = {} for idx in range(len(path_names)): key = unquote(path_names[idx]) path_values[key] = unquote(m.groups()[idx]) return fun, path_values return None def map_filter(self, path_pattern, filter_fun): self.filter_mapping[path_pattern] = filter_fun def get_matched_filters(self, path): available_filters = [] for key, val in self.filter_mapping.items(): if re.match(key, path): available_filters.append(val) return available_filters def map_websocket_handler(self, endpoint, handler_class): url = remove_url_first_slash(endpoint) path_pattern, path_names = self.__get_path_reg_pattern(url) if path_pattern is None: self.ws_mapping[url] = handler_class else: self.ws_path_val_mapping[path_pattern] = (handler_class, path_names) def get_websocket_handler(self, path): if path in self.ws_mapping: return self.ws_mapping[path], {} return self.__try_get_ws_handler_from_path_val(path) def __try_get_ws_handler_from_path_val(self, path): for patterns, val in self.ws_path_val_mapping.items(): m = re.match(patterns, path) _logger.debug(f"websocket endpoint with path value::pattern::[{patterns}] => path::[{path}] match? {m is not None}") if m: clz, path_names = val path_values = {} for idx in range(len(path_names)): key = unquote(path_names[idx]) path_values[key] = unquote(m.groups()[idx]) return clz, path_values return None, {} def map_error_page(self, code: str, error_page_fun: Callable): if not code: c = "_" else: c = str(code).lower() self.error_page_mapping[c] = error_page_fun def _default_error_page(self, code: int, message: str = "", explain: str = ""): return json.dumps({ "code": code, "message": message, "explain": explain }) def error_page(self, code: int, message: str = "", explain: str = ""): c = str(code) func = None if c in self.error_page_mapping: func = self.error_page_mapping[c] elif code > 200: c0x = c[0:2] + "x" if c0x in self.error_page_mapping: func = self.error_page_mapping[c0x] elif "_" in self.error_page_mapping: func = self.error_page_mapping["_"] if not func: func = self._default_error_page _logger.debug(f"error page function:: {func}") co = code msg = message exp = explain args_def = get_function_args(func, None) kwargs_def = get_function_kwargs(func, None) args = [] for n, t in args_def: _logger.debug(f"set value to error_page function -> {n}") if co is not None: if t is None or t == int: args.append(co) co = None continue if msg is not None: if t is None or t == str: args.append(msg) msg = None continue if exp is not None: if t is None or t == str: args.append(exp) exp = None continue args.append(None) kwargs = {} for n, v, t in kwargs_def: if co is not None: if (t is None and isinstance(v, int)) or t == int: kwargs[n] = co co = None continue if msg is not None: if (t is None and isinstance(v, str)) or t == str: kwargs[n] = msg msg = None continue if exp is not None: if (t is None and isinstance(v, str)) or t == str: kwargs[n] = exp exp = None continue kwargs[n] = v if args and kwargs: return func(*args, **kwargs) elif args: return func(*args) elif kwargs: return func(**kwargs) else: return func() class HTTPServer(TCPServer, RoutingConf): allow_reuse_address = 1 # Seems to make sense in testing environment def server_bind(self): """Override server_bind to store the server name.""" TCPServer.server_bind(self) host, port = self.server_address[:2] self.server_name = socket.getfqdn(host) self.server_port = port def __init__(self, addr, res_conf={}): TCPServer.__init__(self, addr, BaseHTTPRequestHandler) RoutingConf.__init__(self, res_conf) class ThreadingMixInHTTPServer(ThreadingMixIn, HTTPServer): pass class CoroutineMixIn: daemon_threads = True @property def coroutine_tasks(self) -> Dict[Any, List[Awaitable]]: if not hasattr(self, "_coroutine_tasks"): self._coroutine_tasks = {} return self._coroutine_tasks @property def coroutine_thread(self) -> threading.Thread: if not hasattr(self, "_coroutine_thread"): self._coroutine_thread = None return self._coroutine_thread @coroutine_thread.setter def coroutine_thread(self, val: threading.Thread): self._coroutine_thread = val @property def coroutine_loop(self) -> asyncio.AbstractEventLoop: if not hasattr(self, "_coroutine_loop"): self._coroutine_loop = None return self._coroutine_loop @coroutine_loop.setter def coroutine_loop(self, val: asyncio.AbstractEventLoop): self._coroutine_loop = val def put_coroutine_task(self, request, task: Awaitable): if request in self.coroutine_tasks: self.coroutine_tasks[request].append(task) else: self.coroutine_tasks[request] = [task] def coroutine_main(self): self.coroutine_loop = loop = asyncio.new_event_loop() try: loop.run_forever() finally: loop.run_until_complete(loop.shutdown_asyncgens()) loop.close() async def process_request_async(self, request, client_address): """Same as in BaseServer but as async. In addition, exception handling is done here. """ try: self.finish_request(request, client_address) if request in self.coroutine_tasks: coroutine_tasks = self.coroutine_tasks[request] while coroutine_tasks: await coroutine_tasks.pop(0) del self.coroutine_tasks[request] except Exception: self.handle_error(request, client_address) finally: self.shutdown_request(request) def process_request(self, request, client_address): if self.coroutine_thread is None: self.coroutine_thread = threading.Thread(target=self.coroutine_main, name="CoroutineThread", daemon=self.daemon_threads) self.coroutine_thread.start() while not self.coroutine_loop: # wait for the loop ready time.sleep(0.1) asyncio.run_coroutine_threadsafe(self.process_request_async(request, client_address), self.coroutine_loop) def server_close(self): super().server_close() if self.coroutine_loop: self.coroutine_loop.call_soon_threadsafe(self.coroutine_loop.stop) self.coroutine_thread.join() def shutdown(self): super().shutdown() if self.coroutine_loop: self.coroutine_loop.call_soon_threadsafe(self.coroutine_loop.stop) self.coroutine_thread.join() class CoroutineMixInHTTPServer(CoroutineMixIn, HTTPServer): pass class SimpleDispatcherHttpServer: """Dispatcher Http server""" def map_filter(self, path_pattern, filter_fun): self.server.map_filter(path_pattern, filter_fun) def map_controller(self, ctrl: ControllerFunction): self.server.map_controller(ctrl) def map_websocket_handler(self, endpoint, handler_class): self.server.map_websocket_handler(endpoint, handler_class) def map_error_page(self, code, func): self.server.map_error_page(code, func) def __init__(self, host: Tuple[str, int] = ('', 9090), ssl: bool = False, ssl_protocol: int = _ssl.PROTOCOL_TLS_SERVER, ssl_check_hostname: bool = False, keyfile: str = "", certfile: str = "", keypass: str = "", ssl_context: _ssl.SSLContext = None, resources: Dict[str, str] = {}, prefer_corountine=False): self.host = host self.__ready = False self.ssl = ssl if prefer_corountine: self.server = CoroutineMixInHTTPServer(self.host, res_conf=resources) else: self.server = ThreadingMixInHTTPServer(self.host, res_conf=resources) if ssl: if ssl_context: ssl_ctx = ssl_context else: assert keyfile and certfile, "keyfile and certfile should be provided. " ssl_ctx = _ssl.SSLContext(protocol=ssl_protocol) ssl_ctx.check_hostname = ssl_check_hostname ssl_ctx.load_cert_chain(certfile=certfile, keyfile=keyfile, password=keypass) self.server.socket = ssl_ctx.wrap_socket( self.server.socket, server_side=True ) @property def ready(self): return self.__ready def resources(self, res={}): self.server.res_conf = res def start(self): if self.ssl: ssl_hint = " with SSL on" else: ssl_hint = "" _logger.info(f"Dispatcher Http Server starts. Listen to port [{self.host[1]}]{ssl_hint}.") try: self.__ready = True self.server.serve_forever() except: self.__ready = False raise def shutdown(self): # shutdown it in a seperate thread. threading.Thread(target=self.server.shutdown, daemon=True).start() class WSGIProxy(RoutingConf): def __init__(self, res_conf): super().__init__(res_conf=res_conf) def app_proxy(self, environment, start_response): requestHandler = WSGIRequestHandler(self, environment, start_response) return requestHandler.handle()
py
1a5b52eb2f1ba9c641e7ea560eadff779c231ef2
import os.path from data.base_dataset import BaseDataset, get_params, get_transform from data.image_folder import make_dataset from PIL import Image class AlignedDataset(BaseDataset): """A dataset class for paired image dataset. It assumes that the directory '/path/to/data/train' contains image pairs in the form of {A,B}. During test time, you need to prepare a directory '/path/to/data/test'. """ def __init__(self, opt): """Initialize this dataset class. Parameters: opt (Option class) -- stores all the experiment flags; needs to be a subclass of BaseOptions """ BaseDataset.__init__(self, opt) self.dir_AB = os.path.join(opt.dataroot, opt.phase) # get the image directory self.AB_paths = sorted(make_dataset(self.dir_AB, opt.max_dataset_size)) # get image paths assert(self.opt.load_size >= self.opt.crop_size) # crop_size should be smaller than the size of loaded image self.input_nc = self.opt.output_nc if self.opt.direction == 'BtoA' else self.opt.input_nc self.output_nc = self.opt.input_nc if self.opt.direction == 'BtoA' else self.opt.output_nc def __getitem__(self, index): """Return a data point and its metadata information. Parameters: index - - a random integer for data indexing Returns a dictionary that contains A, B, A_paths and B_paths A (tensor) - - an image in the input domain B (tensor) - - its corresponding image in the target domain A_paths (str) - - image paths B_paths (str) - - image paths (same as A_paths) """ # read a image given a random integer index AB_path = self.AB_paths[index] #print('index'+ index) print('index:{a}'.format(a=index)) AB = Image.open(AB_path).convert('RGB') # split AB image into A and B w, h = AB.size w2 = int(w / 2) A = AB.crop((0, 0, w2, h)) B = AB.crop((w2, 0, w, h)) # apply the same transform to both A and B transform_params = get_params(self.opt, A.size) A_transform = get_transform(self.opt, transform_params, grayscale=(self.input_nc == 1)) B_transform = get_transform(self.opt, transform_params, grayscale=(self.output_nc == 1)) A = A_transform(A) B = B_transform(B) return {'A': A, 'B': B, 'A_paths': AB_path, 'B_paths': AB_path} def __len__(self): """Return the total number of images in the dataset.""" return len(self.AB_paths)
py
1a5b538de723aac1b5dfc8143ec5933c22680eda
import numpy as np from holoviews.core.overlay import NdOverlay from holoviews.element import Bars from bokeh.models import CategoricalColorMapper, LinearColorMapper from ..utils import ParamLogStream from .testplot import TestBokehPlot, bokeh_renderer class TestBarPlot(TestBokehPlot): def test_bars_hover_ensure_kdims_sanitized(self): obj = Bars(np.random.rand(10,2), kdims=['Dim with spaces']) obj = obj.opts(tools=['hover']) self._test_hover_info(obj, [('Dim with spaces', '@{Dim_with_spaces}'), ('y', '@{y}')]) def test_bars_hover_ensure_vdims_sanitized(self): obj = Bars(np.random.rand(10,2), vdims=['Dim with spaces']) obj = obj.opts(tools=['hover']) self._test_hover_info(obj, [('x', '@{x}'), ('Dim with spaces', '@{Dim_with_spaces}')]) def test_bars_suppress_legend(self): bars = Bars([('A', 1), ('B', 2)]).opts(plot=dict(show_legend=False)) plot = bokeh_renderer.get_plot(bars) plot.initialize_plot() fig = plot.state self.assertEqual(len(fig.legend), 0) def test_empty_bars(self): bars = Bars([], kdims=['x', 'y'], vdims=['z']).opts(plot=dict(group_index=1)) plot = bokeh_renderer.get_plot(bars) plot.initialize_plot() source = plot.handles['source'] for v in source.data.values(): self.assertEqual(len(v), 0) def test_bars_grouped_categories(self): bars = Bars([('A', 0, 1), ('A', 1, -1), ('B', 0, 2)], kdims=['Index', 'Category'], vdims=['Value']) plot = bokeh_renderer.get_plot(bars) source = plot.handles['source'] self.assertEqual([tuple(x) for x in source.data['xoffsets']], [('A', '0'), ('B', '0'), ('A', '1')]) self.assertEqual(list(source.data['Category']), ['0', '0', '1']) self.assertEqual(source.data['Value'], np.array([1, 2, -1])) x_range = plot.handles['x_range'] self.assertEqual(x_range.factors, [('A', '0'), ('A', '1'), ('B', '0'), ('B', '1')]) def test_bars_multi_level_sorted(self): box= Bars((['A', 'B']*15, [3, 10, 1]*10, np.random.randn(30)), ['Group', 'Category'], 'Value').aggregate(function=np.mean) plot = bokeh_renderer.get_plot(box) x_range = plot.handles['x_range'] self.assertEqual(x_range.factors, [ ('A', '1'), ('A', '3'), ('A', '10'), ('B', '1'), ('B', '3'), ('B', '10')]) def test_box_whisker_multi_level_sorted_alphanumerically(self): box= Bars(([3, 10, 1]*10, ['A', 'B']*15, np.random.randn(30)), ['Group', 'Category'], 'Value').aggregate(function=np.mean) plot = bokeh_renderer.get_plot(box) x_range = plot.handles['x_range'] self.assertEqual(x_range.factors, [ ('1', 'A'), ('1', 'B'), ('3', 'A'), ('3', 'B'), ('10', 'A'), ('10', 'B')]) def test_bars_positive_negative_mixed(self): bars = Bars([('A', 0, 1), ('A', 1, -1), ('B', 0, 2)], kdims=['Index', 'Category'], vdims=['Value']) plot = bokeh_renderer.get_plot(bars.opts(stacked=True)) source = plot.handles['source'] self.assertEqual(list(source.data['Category']), ['1', '0', '0']) self.assertEqual(list(source.data['Index']), ['A', 'A', 'B']) self.assertEqual(source.data['top'], np.array([0, 1, 2])) self.assertEqual(source.data['bottom'], np.array([-1, 0, 0])) def test_bars_logy(self): bars = Bars([('A', 1), ('B', 2), ('C', 3)], kdims=['Index'], vdims=['Value']) plot = bokeh_renderer.get_plot(bars.opts(plot=dict(logy=True))) source = plot.handles['source'] glyph = plot.handles['glyph'] y_range = plot.handles['y_range'] self.assertEqual(list(source.data['Index']), ['A', 'B', 'C']) self.assertEqual(source.data['Value'], np.array([1, 2, 3])) self.assertEqual(glyph.bottom, 10**(np.log10(3)-2)) self.assertEqual(y_range.start, 10**(np.log10(3)-2)) self.assertEqual(y_range.end, 3.) def test_bars_logy_explicit_range(self): bars = Bars([('A', 1), ('B', 2), ('C', 3)], kdims=['Index'], vdims=['Value']).redim.range(Value=(0.001, 3)) plot = bokeh_renderer.get_plot(bars.opts(plot=dict(logy=True))) source = plot.handles['source'] glyph = plot.handles['glyph'] y_range = plot.handles['y_range'] self.assertEqual(list(source.data['Index']), ['A', 'B', 'C']) self.assertEqual(source.data['Value'], np.array([1, 2, 3])) self.assertEqual(glyph.bottom, 0.001) self.assertEqual(y_range.start, 0.001) self.assertEqual(y_range.end, 3.0000000000000013) def test_bars_ylim(self): bars = Bars([1, 2, 3]).opts(ylim=(0, 200)) plot = bokeh_renderer.get_plot(bars) y_range = plot.handles['y_range'] self.assertEqual(y_range.start, 0) self.assertEqual(y_range.end, 200) def test_bars_padding_square(self): points = Bars([(1, 2), (2, -1), (3, 3)]).options(padding=0.1) plot = bokeh_renderer.get_plot(points) y_range = plot.handles['y_range'] self.assertEqual(y_range.start, -1.4) self.assertEqual(y_range.end, 3.4) def test_bars_padding_square_positive(self): points = Bars([(1, 2), (2, 1), (3, 3)]).options(padding=0.1) plot = bokeh_renderer.get_plot(points) y_range = plot.handles['y_range'] self.assertEqual(y_range.start, 0) self.assertEqual(y_range.end, 3.2) def test_bars_padding_square_negative(self): points = Bars([(1, -2), (2, -1), (3, -3)]).options(padding=0.1) plot = bokeh_renderer.get_plot(points) y_range = plot.handles['y_range'] self.assertEqual(y_range.start, -3.2) self.assertEqual(y_range.end, 0) def test_bars_padding_nonsquare(self): bars = Bars([(1, 2), (2, 1), (3, 3)]).options(padding=0.1, width=600) plot = bokeh_renderer.get_plot(bars) y_range = plot.handles['y_range'] self.assertEqual(y_range.start, 0) self.assertEqual(y_range.end, 3.2) def test_bars_padding_logx(self): bars = Bars([(1, 1), (2, 2), (3,3)]).options(padding=0.1, logx=True) plot = bokeh_renderer.get_plot(bars) y_range = plot.handles['y_range'] self.assertEqual(y_range.start, 0) self.assertEqual(y_range.end, 3.2) def test_bars_padding_logy(self): bars = Bars([(1, 2), (2, 1), (3, 3)]).options(padding=0.1, logy=True) plot = bokeh_renderer.get_plot(bars) y_range = plot.handles['y_range'] self.assertEqual(y_range.start, 0.033483695221017122) self.assertEqual(y_range.end, 3.3483695221017129) ########################### # Styling mapping # ########################### def test_bars_color_op(self): bars = Bars([(0, 0, '#000'), (0, 1, '#F00'), (0, 2, '#0F0')], vdims=['y', 'color']).options(color='color') plot = bokeh_renderer.get_plot(bars) cds = plot.handles['cds'] glyph = plot.handles['glyph'] self.assertEqual(cds.data['color'], np.array(['#000', '#F00', '#0F0'])) self.assertEqual(glyph.fill_color, {'field': 'color'}) self.assertEqual(glyph.line_color, 'black') def test_bars_linear_color_op(self): bars = Bars([(0, 0, 0), (0, 1, 1), (0, 2, 2)], vdims=['y', 'color']).options(color='color') plot = bokeh_renderer.get_plot(bars) cds = plot.handles['cds'] glyph = plot.handles['glyph'] cmapper = plot.handles['color_color_mapper'] self.assertTrue(cmapper, LinearColorMapper) self.assertEqual(cmapper.low, 0) self.assertEqual(cmapper.high, 2) self.assertEqual(cds.data['color'], np.array([0, 1, 2])) self.assertEqual(glyph.fill_color, {'field': 'color', 'transform': cmapper}) self.assertEqual(glyph.line_color, 'black') def test_bars_categorical_color_op(self): bars = Bars([(0, 0, 'A'), (0, 1, 'B'), (0, 2, 'C')], vdims=['y', 'color']).options(color='color') plot = bokeh_renderer.get_plot(bars) cds = plot.handles['cds'] glyph = plot.handles['glyph'] cmapper = plot.handles['color_color_mapper'] self.assertTrue(cmapper, CategoricalColorMapper) self.assertEqual(cmapper.factors, ['A', 'B', 'C']) self.assertEqual(cds.data['color'], np.array(['A', 'B', 'C'])) self.assertEqual(glyph.fill_color, {'field': 'color', 'transform': cmapper}) self.assertEqual(glyph.line_color, 'black') def test_bars_line_color_op(self): bars = Bars([(0, 0, '#000'), (0, 1, '#F00'), (0, 2, '#0F0')], vdims=['y', 'color']).options(line_color='color') plot = bokeh_renderer.get_plot(bars) cds = plot.handles['cds'] glyph = plot.handles['glyph'] self.assertEqual(cds.data['line_color'], np.array(['#000', '#F00', '#0F0'])) self.assertNotEqual(glyph.fill_color, {'field': 'line_color'}) self.assertEqual(glyph.line_color, {'field': 'line_color'}) def test_bars_fill_color_op(self): bars = Bars([(0, 0, '#000'), (0, 1, '#F00'), (0, 2, '#0F0')], vdims=['y', 'color']).options(fill_color='color') plot = bokeh_renderer.get_plot(bars) cds = plot.handles['cds'] glyph = plot.handles['glyph'] self.assertEqual(cds.data['fill_color'], np.array(['#000', '#F00', '#0F0'])) self.assertEqual(glyph.fill_color, {'field': 'fill_color'}) self.assertNotEqual(glyph.line_color, {'field': 'fill_color'}) def test_bars_alpha_op(self): bars = Bars([(0, 0, 0), (0, 1, 0.2), (0, 2, 0.7)], vdims=['y', 'alpha']).options(alpha='alpha') plot = bokeh_renderer.get_plot(bars) cds = plot.handles['cds'] glyph = plot.handles['glyph'] self.assertEqual(cds.data['alpha'], np.array([0, 0.2, 0.7])) self.assertEqual(glyph.fill_alpha, {'field': 'alpha'}) def test_bars_line_alpha_op(self): bars = Bars([(0, 0, 0), (0, 1, 0.2), (0, 2, 0.7)], vdims=['y', 'alpha']).options(line_alpha='alpha') plot = bokeh_renderer.get_plot(bars) cds = plot.handles['cds'] glyph = plot.handles['glyph'] self.assertEqual(cds.data['line_alpha'], np.array([0, 0.2, 0.7])) self.assertEqual(glyph.line_alpha, {'field': 'line_alpha'}) self.assertNotEqual(glyph.fill_alpha, {'field': 'line_alpha'}) def test_bars_fill_alpha_op(self): bars = Bars([(0, 0, 0), (0, 1, 0.2), (0, 2, 0.7)], vdims=['y', 'alpha']).options(fill_alpha='alpha') plot = bokeh_renderer.get_plot(bars) cds = plot.handles['cds'] glyph = plot.handles['glyph'] self.assertEqual(cds.data['fill_alpha'], np.array([0, 0.2, 0.7])) self.assertNotEqual(glyph.line_alpha, {'field': 'fill_alpha'}) self.assertEqual(glyph.fill_alpha, {'field': 'fill_alpha'}) def test_bars_line_width_op(self): bars = Bars([(0, 0, 1), (0, 1, 4), (0, 2, 8)], vdims=['y', 'line_width']).options(line_width='line_width') plot = bokeh_renderer.get_plot(bars) cds = plot.handles['cds'] glyph = plot.handles['glyph'] self.assertEqual(cds.data['line_width'], np.array([1, 4, 8])) self.assertEqual(glyph.line_width, {'field': 'line_width'}) def test_op_ndoverlay_value(self): colors = ['blue', 'red'] overlay = NdOverlay({color: Bars(np.arange(i+2)) for i, color in enumerate(colors)}, 'Color').options('Bars', fill_color='Color') plot = bokeh_renderer.get_plot(overlay) for subplot, color in zip(plot.subplots.values(), colors): self.assertEqual(subplot.handles['glyph'].fill_color, color) def test_bars_color_index_color_clash(self): bars = Bars([(0, 0, 0), (0, 1, 1), (0, 2, 2)], vdims=['y', 'color']).options(color='color', color_index='color') with ParamLogStream() as log: bokeh_renderer.get_plot(bars) log_msg = log.stream.read() warning = ("Cannot declare style mapping for 'color' option " "and declare a color_index; ignoring the color_index.\n") self.assertEqual(log_msg, warning)
py
1a5b545d1c7cb92c82518e084b285db3eb98743a
import py from pypy.rlib.objectmodel import specialize from pypy.rpython.memory.lltypelayout import convert_offset_to_int from pypy.jit.codegen.llvm.test.test_llvmjit import skip_unsupported_platform from pypy.jit.codegen.test.operation_tests import OperationTests from pypy.jit.codegen.llvm.rgenop import RLLVMGenOp from pypy.jit.codegen.llvm.llvmjit import llvm_version, MINIMAL_VERSION def conv(n): if not isinstance(n, int) and not isinstance(n, str): n = convert_offset_to_int(n) return n class RGenOpPacked(RLLVMGenOp): """Like RLLVMGenOp, but produces concrete offsets in the tokens instead of llmemory.offsets. These numbers may not agree with your C compiler's. """ @staticmethod @specialize.memo() def fieldToken(T, name): return tuple(map(conv, RLLVMGenOp.fieldToken(T, name))) @staticmethod @specialize.memo() def arrayToken(A): return tuple(map(conv, RLLVMGenOp.arrayToken(A))) @staticmethod @specialize.memo() def allocToken(T): return conv(RLLVMGenOp.allocToken(T)) @staticmethod @specialize.memo() def varsizeAllocToken(A): return tuple(map(conv, RLLVMGenOp.varsizeAllocToken(A))) class LLVMTestBasicMixin(object): RGenOp = RGenOpPacked class TestBasic(LLVMTestBasicMixin, OperationTests): # for the individual tests see # ====> ../../test/operation_tests.py def skip(self): py.test.skip('WIP') def skip_too_minimal(self): py.test.skip('found llvm %.1f, requires at least llvm %.1f(cvs)' % ( llvm_version(), MINIMAL_VERSION)) if llvm_version() < 2.0: test_unsigned = skip_too_minimal #uint_invert uses incorrect xor constant? test_unsigned_comparison = skip_too_minimal test_unsigned = skip #? test_arithmetic = skip #XXX << 32 and >> 32 fail test_constants_in_divmod = skip #in-progress test_float_arithmetic = skip #XXX llvmjit.execute() returns an int :-( test_float_cast = skip #XXX llvmjit.execute() returns an int :-( test_unichar_array = skip test_char_unichar_fields = skip
py
1a5b5479d839082c10fbefb5f6699b354ff285c8
# -*- coding: utf-8 -*- """ Python Flight Mechanics Engine (PyFME). Copyright (c) AeroPython Development Team. Distributed under the terms of the MIT License. Inputs Generator Tests ---------------------- Test functions for input generator module. """ import numpy as np from numpy.testing import assert_almost_equal from pyfme.utils.input_generator import (Step, Doublet, Ramp, Harmonic, Constant) def test_input_scalar_output_scalar(): control = Constant(1.5) control_value = control(1.5) assert isinstance(control_value, float) def test_step(): t_init = 0 T = 5 A = 1.5 time = np.linspace(0, 10, 11) expected_input = np.zeros([11]) expected_input[0:6] = A step_input = Step(t_init, T, A, offset=0) real_input = step_input(time) assert_almost_equal(real_input, expected_input) def test_step_bounds_not_included(): t_init = 0.1 T = 4.8 A = 1.5 time = np.linspace(0, 10, 11) expected_input = np.zeros([11]) expected_input[1:5] = A step_input = Step(t_init, T, A, offset=0) real_input = step_input(time) assert_almost_equal(real_input, expected_input) def test_step_offset(): t_init = 0 T = 5 A = 1.5 time = np.linspace(0, 10, 11) offset = 2.6 expected_input = np.zeros([11]) + offset expected_input[0:6] += A step_input = Step(t_init, T, A, offset=offset) real_input = step_input(time) assert_almost_equal(real_input, expected_input) def test_doublet(): t_init = 0 T = 5. A = 3. time = np.linspace(0, 10, 11) expected_input = np.zeros([11]) expected_input[0:3] = A/2 expected_input[3:6] = -A/2 doublet_input = Doublet(t_init, T, A, offset=0) real_input = doublet_input(time) assert_almost_equal(real_input, expected_input) def test_doublet_bounds_not_included(): t_init = 0.1 T = 4.8 A = 3. time = np.linspace(0, 10, 11) expected_input = np.zeros([11]) expected_input[1:3] = A/2 expected_input[3:5] = -A/2 doublet_input = Doublet(t_init, T, A, offset=0) real_input = doublet_input(time) assert_almost_equal(real_input, expected_input) def test_doublet_offset(): t_init = 0 T = 5 A = 1.5 time = np.linspace(0, 10, 11) offset = 2.6 expected_input = np.zeros([11]) + offset expected_input[0:3] += A/2 expected_input[3:6] += -A/2 doublet_input = Doublet(t_init, T, A, offset=offset) real_input = doublet_input(time) assert_almost_equal(real_input, expected_input) def test_ramp(): t_init = 0 T = 4. A = 3. time = np.linspace(0, 10, 11) expected_input = np.zeros([11]) expected_input[0:5] = np.array([0, A/4, A/2, 3*A/4, A]) ramp_input = Ramp(t_init, T, A, offset=0) real_input = ramp_input(time) assert_almost_equal(real_input, expected_input) def test_ramp_offset(): t_init = 0 T = 4. A = 3. time = np.linspace(0, 10, 11) offset = 1 expected_input = np.zeros([11]) + offset expected_input[0:5] += np.array([0, A/4, A/2, 3*A/4, A]) ramp_input = Ramp(t_init, T, A, offset=offset) real_input = ramp_input(time) assert_almost_equal(real_input, expected_input) def test_harmonic(): t_init = 0 T = 4. A = 3.0 time = np.linspace(0, 10, 11) f = 0.25 expected_input = np.zeros([11]) expected_input[0:5] = np.array([0, A/2, 0, -A/2, 0]) harmonic_input = Harmonic(t_init, T, A, f, phase=0, offset=0) real_input = harmonic_input(time) assert_almost_equal(real_input, expected_input) def test_harmonic_offset(): t_init = 0 T = 4. A = 3. time = np.linspace(0, 10, 11) offset = 1 f = 0.25 expected_input = np.zeros([11]) + offset expected_input[0:5] += np.array([0, A/2, 0, -A/2, 0]) harmonic_input = Harmonic(t_init, T, A, f, phase=0, offset=offset) real_input = harmonic_input(time) assert_almost_equal(real_input, expected_input) def test_harmonic_phase(): t_init = 0 T = 4. A = 3. time = np.linspace(0, 10, 11) phase = np.pi/2 f = 0.25 expected_input = np.zeros([11]) expected_input[0:5] += np.array([A/2, 0, -A/2, 0, A/2]) harmonic_input = Harmonic(t_init, T, A, f, phase=phase) real_input = harmonic_input(time) assert_almost_equal(real_input, expected_input) def test_constant(): offset = 3.5 time = np.linspace(0, 5, 11) expected_input = np.full_like(time, offset) constant = Constant(offset) real_input = constant(time) assert_almost_equal(real_input, expected_input) def test_add_controls_01(): offset1 = 3.5 offset2 = 1.2 time = np.linspace(0, 5, 11) expected_input = np.full_like(time, offset1 + offset2) constant1 = Constant(offset1) constant2 = Constant(offset2) constant_input = constant1 + constant2 real_input = constant_input(time) assert_almost_equal(real_input, expected_input) def test_add_controls_02(): time = np.linspace(0, 10, 11) # Define harmonic input t_init = 0 T = 4. A = 3. phase = np.pi / 2 f = 0.25 expected_harm_input = np.zeros([11]) expected_harm_input[0:5] += np.array([A / 2, 0, -A / 2, 0, A / 2]) harmonic_input = Harmonic(t_init, T, A, f, phase=phase) # Define ramp input t_init = 0 T = 4. A = 3. expected_ramp_input = np.zeros([11]) expected_ramp_input[0:5] = np.array([0, A / 4, A / 2, 3 * A / 4, A]) ramp_input = Ramp(t_init, T, A, offset=0) # Add both composed_input = ramp_input + harmonic_input real_input = composed_input(time) expected_input = expected_ramp_input + expected_harm_input assert_almost_equal(real_input, expected_input) def test_subtract_controls_01(): time = np.linspace(0, 10, 11) # Define harmonic input t_init = 0 T = 4. A = 3. phase = np.pi / 2 f = 0.25 expected_harm_input = np.zeros([11]) expected_harm_input[0:5] += np.array([A / 2, 0, -A / 2, 0, A / 2]) harmonic_input = Harmonic(t_init, T, A, f, phase=phase) # Define ramp input t_init = 0 T = 4. A = 3. expected_ramp_input = np.zeros([11]) expected_ramp_input[0:5] = np.array([0, A / 4, A / 2, 3 * A / 4, A]) ramp_input = Ramp(t_init, T, A, offset=0) # Subtract both composed_input = ramp_input - harmonic_input real_input = composed_input(time) expected_input = expected_ramp_input - expected_harm_input assert_almost_equal(real_input, expected_input) def test_multiply_controls_01(): time = np.linspace(0, 10, 11) # Define harmonic input t_init = 0 T = 4. A = 3. phase = np.pi / 2 f = 0.25 expected_harm_input = np.zeros([11]) expected_harm_input[0:5] += np.array([A / 2, 0, -A / 2, 0, A / 2]) harmonic_input = Harmonic(t_init, T, A, f, phase=phase) # Define ramp input t_init = 0 T = 4. A = 3. expected_ramp_input = np.zeros([11]) expected_ramp_input[0:5] = np.array([0, A / 4, A / 2, 3 * A / 4, A]) ramp_input = Ramp(t_init, T, A, offset=0) # Multiply both composed_input = ramp_input * harmonic_input real_input = composed_input(time) expected_input = expected_ramp_input * expected_harm_input assert_almost_equal(real_input, expected_input) time = np.linspace(0, 10, 11) # Define harmonic input t_init = 0 T = 4. A = 3. phase = np.pi / 2 f = 0.25 expected_harm_input = np.zeros([11]) expected_harm_input[0:5] += np.array([A / 2, 0, -A / 2, 0, A / 2]) harmonic_input = Harmonic(t_init, T, A, f, phase=phase) # Define ramp input t_init = 0 T = 4. A = 3. expected_ramp_input = np.zeros([11]) expected_ramp_input[0:5] = np.array([0, A / 4, A / 2, 3 * A / 4, A]) ramp_input = Ramp(t_init, T, A, offset=0) # Add both composed_input = ramp_input + harmonic_input real_input = composed_input(time) expected_input = expected_harm_input + expected_ramp_input assert_almost_equal(real_input, expected_input)
py
1a5b54beb7aaa8aaa742cf1026b225d6fc79690d
# Copyright (C) 2017 Adam Schubert <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import tests.TestCase as TestCase from cron_descriptor import Options, ExpressionDescriptor class TestLocale(TestCase.TestCase): def test_locale_de(self): options = Options() options.locale_code = 'de_DE' options.use_24hour_time_format = True self.assertEqual( "Jede Minute", ExpressionDescriptor("* * * * *", options).get_description())
py
1a5b54c05668e7eeb2743e0452b799eae9e13d8a
_base_ = [ './ircsn_ig65m_pretrained_bnfrozen_r152_32x2x1_58e_kinetics400_rgb.py' ] # model settings model = dict( backbone=dict( norm_eval=True, bn_frozen=True, bottleneck_mode='ip', pretrained=None)) dataset_type = 'RawframeDataset' data_root = 'data/kinetics400/rawframes_train' data_root_val = 'data/kinetics400/rawframes_val' ann_file_train = 'data/kinetics400/kinetics400_train_list_rawframes.txt' ann_file_val = 'data/kinetics400/kinetics400_val_list_rawframes.txt' ann_file_test = 'data/kinetics400/kinetics400_val_list_rawframes.txt' img_norm_cfg = dict( mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_bgr=False) train_pipeline = [ dict(type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='RandomResizedCrop'), dict(type='Resize', scale=(224, 224), keep_ratio=False), dict(type='Flip', flip_ratio=0.5), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs', 'label']) ] val_pipeline = [ dict( type='SampleFrames', clip_len=32, frame_interval=2, num_clips=1, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='CenterCrop', crop_size=224), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs']) ] test_pipeline = [ dict( type='SampleFrames', clip_len=32, frame_interval=2, num_clips=10, test_mode=True), dict(type='RawFrameDecode'), dict(type='Resize', scale=(-1, 256)), dict(type='ThreeCrop', crop_size=256), dict(type='Normalize', **img_norm_cfg), dict(type='FormatShape', input_format='NCTHW'), dict(type='Collect', keys=['imgs', 'label'], meta_keys=[]), dict(type='ToTensor', keys=['imgs']) ] data = dict( videos_per_gpu=4, workers_per_gpu=4, train=dict( type=dataset_type, ann_file=ann_file_train, data_prefix=data_root, pipeline=train_pipeline), val=dict( type=dataset_type, ann_file=ann_file_val, data_prefix=data_root_val, pipeline=val_pipeline), test=dict( type=dataset_type, ann_file=ann_file_val, data_prefix=data_root_val, pipeline=test_pipeline)) optimizer = dict( type='SGD', lr=0.08, momentum=0.9, weight_decay=0.0001) # this lr is used for 8 gpus optimizer_config = dict(grad_clip=dict(max_norm=40, norm_type=2)) # learning policy lr_config = dict( policy='CosineAnnealing', min_lr=0, warmup='linear', warmup_by_epoch=True, warmup_iters=40) total_epochs = 180 work_dir = './work_dirs/ipcsn_bnfrozen_r152_32x2x1_180e_kinetics400_rgb' # noqa: E501
py
1a5b54ef52a483871f05c7144c281ed3e55e6479
#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from flask.ext.script import Manager, Server from flask.ext.collect import Collect from quokka import create_app from quokka.core.db import db from quokka.ext.blueprints import load_blueprint_commands app = create_app() if app.config.get("LOGGER_ENABLED"): logging.basicConfig( level=getattr(logging, app.config.get("LOGGER_LEVEL", "DEBUG")), format=app.config.get( "LOGGER_FORMAT", '%(asctime)s %(name)-12s %(levelname)-8s %(message)s'), datefmt=app.config.get("LOGGER_DATE_FORMAT", '%d.%m %H:%M:%S') ) manager = Manager(app) collect = Collect() collect.init_script(manager) @manager.shell def make_shell_context(): " Update shell. " return dict(app=app, db=db) @manager.command def check(): """Prints app status""" from pprint import pprint print("Extensions.") pprint(app.extensions) print("Modules.") pprint(app.blueprints) print("App.") return app @manager.command def populate(): """Populate the database with sample data""" from quokka.utils.populate import Populate Populate(db)() @manager.command def show_config(): "print all config variables" from pprint import pprint print("Config.") pprint(dict(app.config)) manager.add_command("run0", Server( use_debugger=True, use_reloader=True, host='0.0.0.0', port=8000 )) load_blueprint_commands(manager) if __name__ == '__main__': manager.run()
py
1a5b560dcb19fb72fcba73d003601b7cad1eedfc
# Copyright (c) 2022 Ansys, Inc. and its affiliates. Unauthorised use, # distribution or duplication is prohibited # LICENSE file is in the root directory of this source tree. DOT_FILES_TO_RENAME = {'flake8': '.flake8', 'gitignore': '.gitignore', 'gitattributes': '.gitattributes'} class Colors: HEADER = '\033[95m' OKBLUE = '\033[94m' OKCYAN = '\033[96m' OKGREEN = '\033[92m' WARNING = '\033[93m' FAIL = '\033[91m' ENDC = '\033[0m' BOLD = '\033[1m' UNDERLINE = '\033[4m' GIT_RECC_LOG = f"""{Colors.OKCYAN}We recommend you track your project using git and store it in a remote repository, such as on ADO or GitHub. You will need a GitHub or ADO account to do this. This can be done by following these instructions provided you already have git installed. 1- Navigate to the created project directory on the command line 2- Make the directory into a git repo and link it to a remote origin 2.1 [Optional] - git init 2.2 [Optional] - git remote add origin <repository_url> 2.3 [Optional] - git add . 2.4 [Optional] - git commit -m "initial commit" 2.4 [Optional] - git push origin main {Colors.OKCYAN} """
py
1a5b5690409133a8e91239d74efcc6621d64baeb
# coding: utf-8 # ----------------------------------------------------------------------------------- # <copyright company="Aspose" file="insert_table_row_online_response.py"> # Copyright (c) 2021 Aspose.Words for Cloud # </copyright> # <summary> # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # </summary> # ----------------------------------------------------------------------------------- class InsertTableRowOnlineResponse(object): """ Response model for insert_table_row_online operation. Initializes a new instance. """ def __init__(self, model, document): self.model = model self.document = document
py
1a5b56a2e2a086dd8c93d2bf3bc8e417e66eea5b
from os import system, name import json class Eb2Utils: # Simple function to clear the console... def clear(): # for windows if name == 'nt': _ = system('cls') _ = system('TITLE Expertise Bot :: Rewrite v0.0.2') # for mac and linux(here, os.name is 'posix') else: _ = system('clear')
py
1a5b56f0dd74d5503613e1892b58e93ebd3935a2
import tkinter as tk import tkinter.ttk as ttk import numpy as np import math import operator import sys import DataControls.ControlElements as DCCE import os import subprocess from datetime import datetime import matplotlib as mpl mpl.use('TkAgg') #mpl backend from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2Tk from matplotlib import backend_bases # mpl.rcParams['toolbar'] = 'None' from matplotlib.figure import Figure import matplotlib.animation as anim #Custom MPL Navigation Toolbar BEGIN class CustomNavigationToolbar(NavigationToolbar2Tk): def __init__(self, pFigureCanvasTKAgg, parent, root, *args, **kwargs): self.m_root = root self.toolitems = ( ('Home', 'Reset original view', 'home', 'home_extended'), ('Back', 'Back to previous view', 'back', 'back'), ('Forward', 'Forward to next view', 'forward', 'forward'), (None, None, None, None), ('Pan', 'Pan axes with left mouse, zoom with right', 'move', 'pan'), ('Zoom', 'Zoom to rectangle', 'zoom_to_rect', 'zoom'), (None, None, None, None), ('Save', 'Save the figure', 'filesave', 'save_figure'), ('Subplots', 'Configure subplots', 'subplots', 'advanced_settings') ) super().__init__(pFigureCanvasTKAgg, parent, *args, **kwargs) self.m_figureRef = pFigureCanvasTKAgg.figure self.m_containerRef = parent def inputCheckFloatEntry(self, entry): if(entry.get() == ''): return False try: float(entry.get()) except ValueError: tk.messagebox.showerror("Advanced Settings Error", "Please make sure all inputs are parseable as floats (i.e. decimal numbers).") return False return True def set_advanced_settings(self): self.inputCheckFloatEntry(self.m_yMaxBoundEntry) pYMax = float(self.m_yMaxBoundEntry.get()) self.inputCheckFloatEntry(self.m_yMinBoundEntry) pYMin = float(self.m_yMinBoundEntry.get()) self.m_containerRef.getPrimaryAxes().set_ybound(pYMin, pYMax) self.inputCheckFloatEntry(self.m_xMaxBoundEntry) pXMax = float(self.m_xMaxBoundEntry.get()) self.inputCheckFloatEntry(self.m_xMinBoundEntry) pXMin = float(self.m_xMinBoundEntry.get()) self.m_containerRef.getPrimaryAxes().set_xbound(pXMin, pXMax) self.m_containerRef.m_subplot.grid(linestyle=':') if(self.m_containerRef.m_secondaryYAxisRequired): self.inputCheckFloatEntry(self.m_secondYMaxBoundEntry) sYMax = float(self.m_secondYMaxBoundEntry.get()) self.inputCheckFloatEntry(self.m_secondYMinBoundEntry) sYMin = float(self.m_secondYMinBoundEntry.get()) self.m_containerRef.getSecondaryAxes().set_ybound(sYMin, sYMax) #TODO: get DPI settings working with tkinter canvas -> currently reverting after a few seconds # self.inputCheckFloatEntry(self.m_DPIEntry) # newDPI = float(self.m_DPIEntry.get()) # self.m_figureRef.set_dpi(newDPI) # self.m_containerRef.m_subplot.relim() self.m_containerRef.canvas.draw_idle() def advanced_settings(self): self.m_window = tk.Toplevel(self.m_root) self.m_window.title("Advanced Figure Options") if not sys.platform.startswith('win'): self.m_window.configure(bg = "#ececec") #ececec only for mac self.m_yMaxBoundLabel = ttk.Label(self.m_window, text="Primary Max Y Bound") self.m_yMaxBoundLabel.grid(row = 0, column = 0, sticky = "nsw") self.m_yMaxBoundEntry = DCCE.EnhancedEntry(self.m_window) self.m_yMaxBoundEntry.grid(row = 0, column = 1, sticky= "nsew") self.m_yMaxBoundEntry.set(self.m_containerRef.getPrimaryAxes().get_ybound()[1]) self.m_yMinBoundLabel = ttk.Label(self.m_window, text="Primary Min Y Bound") self.m_yMinBoundLabel.grid(row = 1, column = 0, sticky = "nsw") self.m_yMinBoundEntry = DCCE.EnhancedEntry(self.m_window) self.m_yMinBoundEntry.grid(row = 1, column = 1, sticky= "nsew") self.m_yMinBoundEntry.set(self.m_containerRef.getPrimaryAxes().get_ybound()[0]) self.m_xMaxBoundLabel = ttk.Label(self.m_window, text="Primary Max X Bound") self.m_xMaxBoundLabel.grid(row = 2, column = 0, sticky = "nsw") self.m_xMaxBoundEntry = DCCE.EnhancedEntry(self.m_window) self.m_xMaxBoundEntry.grid(row = 2, column = 1, sticky= "nsew") self.m_xMaxBoundEntry.set(self.m_containerRef.getPrimaryAxes().get_xbound()[1]) self.m_xMinBoundLabel = ttk.Label(self.m_window, text="Primary Min X Bound") self.m_xMinBoundLabel.grid(row = 3, column = 0, sticky = "nsw") self.m_xMinBoundEntry = DCCE.EnhancedEntry(self.m_window) self.m_xMinBoundEntry.grid(row = 3, column = 1, sticky= "nsew") self.m_xMinBoundEntry.set(self.m_containerRef.getPrimaryAxes().get_xbound()[0]) self.m_DPILabel = ttk.Label(self.m_window, text="Figure DPI") self.m_DPILabel.grid(row = 4, column = 0, sticky = "nsw") self.m_DPIEntry = DCCE.EnhancedEntry(self.m_window) self.m_DPIEntry.grid(row = 4, column = 1, sticky= "nsew") self.m_DPIEntry.set(self.m_figureRef.get_dpi()) if(self.m_containerRef.m_secondaryYAxisRequired): self.m_secondYMaxBoundLabel = ttk.Label(self.m_window, text="Secondary Max Y Bound") self.m_secondYMaxBoundLabel.grid(row = 5, column = 0, sticky = "nsw") self.m_secondYMaxBoundEntry = DCCE.EnhancedEntry(self.m_window) self.m_secondYMaxBoundEntry.grid(row = 5, column = 1, sticky= "nsew") self.m_secondYMaxBoundEntry.set(self.m_containerRef.getSecondaryAxes().get_ybound()[1]) self.m_secondYMinBoundLabel = ttk.Label(self.m_window, text="Secondary Min Y Bound") self.m_secondYMinBoundLabel.grid(row = 6, column = 0, sticky = "nsw") self.m_secondYMinBoundEntry = DCCE.EnhancedEntry(self.m_window) self.m_secondYMinBoundEntry.grid(row = 6, column = 1, sticky= "nsew") self.m_secondYMinBoundEntry.set(self.m_containerRef.getSecondaryAxes().get_ybound()[0]) self.m_buttonRowIndex = 7 else: self.m_buttonRowIndex = 5 self.m_setButton = ttk.Button(self.m_window, text = "Set Values", command = self.set_advanced_settings) self.m_setButton.grid(row = self.m_buttonRowIndex, columnspan = 2, sticky = "ns") # raise NotImplementedError #MACOS ONLY!!! TRY USING THIS TO FIX SAVEAS INTERFACE def user_action(apath, cmd): ascript = ''' -- apath - default path for dialogs to open too -- cmd - "Select", "Save" on run argv set userCanceled to false if (count of argv) = 0 then tell application "System Events" to display dialog "argv is 0" ¬ giving up after 10 else set apath to POSIX file (item 1 of argv) as alias set action to (item 2 of argv) as text end if try if action contains "Select" then set fpath to POSIX path of (choose file default location apath ¬ without invisibles, multiple selections allowed and ¬ showing package contents) # return fpath as text else if action contains "Save" then set fpath to POSIX path of (choose file name default location apath) end if return fpath as text on error number -128 set userCanceled to true end try if userCanceled then return "Cancel" else return fpath end if end run ''' try: proc = subprocess.check_output(['osascript', '-e', ascript, apath, cmd]) if 'Cancel' in proc.decode('utf-8'): # User pressed Cancel button sys.exit('User Canceled') return proc.decode('utf-8') except subprocess.CalledProcessError as e: print('Python error: [%d]\n%s\n' % e.returncode, e.output) def save_figure(self,*args): #copied backend save_fig because I needed to add custom solution for .txt file extension # previousSize = self.m_figureRef.get_size_inches() # previousDPI = self.m_figureRef.get_dpi() # self.m_figureRef.set_dpi(300) #print quality temporarily # self.m_figureRef.set_size_inches(w=13.0/2.54, h=8.0/2.54)#13cm by 8cm # super().save_figure() # self.m_figureRef.set_size_inches(previousSize) # self.m_figureRef.set_dpi(previousDPI) #print quality temporarily filetypes = self.canvas.get_supported_filetypes().copy() # default_filetype = self.canvas.get_default_filetype() # Tk doesn't provide a way to choose a default filetype, # so we just have to put it first #("All Files", "*.*"), # default_filetype_name = filetypes.pop(default_filetype) sorted_filetypes = sorted(filetypes.items()) tk_filetypes = [(name, '*.%s' % ext) for ext, name in sorted_filetypes] # adding a default extension seems to break the # asksaveasfilename dialog when you choose various save types # from the dropdown. Passing in the empty string seems to # work - JDH! #defaultextension = self.canvas.get_default_filetype() # defaultextension = 'txt' initialdir = os.path.expanduser(mpl.rcParams['savefig.directory']) # initialfile = "RawData.txt"#self.canvas.get_default_filename() fname = tk.filedialog.asksaveasfilename( master=self.canvas.get_tk_widget().master, title='Save the figure', filetypes=[("All Files", "*.*"),("Raw Plot Data", "*.txt")] + tk_filetypes # filetypes=[('Raw Plot Data','*.txt'),('Image Data','*.jpeg')], # defaultextension=defaultextension, # initialdir=initialdir, # initialfile=initialfile, ) if fname in ["", ()]: return # Save dir for next time, unless empty str (i.e., use cwd). if initialdir != "": mpl.rcParams['savefig.directory'] = ( os.path.dirname(str(fname))) try: if(".txt" in fname): rawData = self.m_containerRef.m_subplot.get_lines() #or use legend handles #_xy contains data and _label contains legend name lineCount = len(rawData) #init vars labels = [self.m_containerRef.m_subplot.get_xlabel().replace(' ', '_')] labels += [rawData[0].get_label().replace(' ', '_')] output = np.vstack((rawData[0].get_xdata(),rawData[0].get_ydata())) #append to them for i in range(1,lineCount): labels += [self.m_containerRef.m_subplot.get_xlabel().replace(' ', '_')] labels += [rawData[i].get_label().replace(' ', '_')] output = np.vstack((output, rawData[i].get_xdata())) output = np.vstack((output, rawData[i].get_ydata())) sep = ' ' headerString = sep.join(labels) with open(fname, mode='a') as fileHandle: np.savetxt(fileHandle, output.transpose(), delimiter=' ', header = headerString, comments='') else:# This method will handle the delegation to the correct type self.canvas.figure.savefig(fname) except Exception as e: tk.messagebox.showerror("Error saving file", str(e)) def home_extended(self): super().home() #Custom MPL Navigation Toolbar END #MPLContainer BEGIN # resizeFuncCounter = 0 #debug # lastFuncCounterOutputTime = datetime.now() #debug class MPLContainer(tk.Frame): def __init__(self, parent, title, yAxisName, xAxisName, root, secondaryYAxis = False, secondaryYAxisName = "", invertXAxis = False, legendLoc = 0, *args, **kwargs): super().__init__(parent, bg="white", *args, **kwargs) self.m_title = title self.m_xAxisName = xAxisName self.m_yAxisName = yAxisName self.m_usingMarkers = False self.m_legendLoc = legendLoc # self.m_primaryMaxX = 0 # self.m_primaryMaxY = 0 # self.m_secondaryMaxX = 0 # self.m_secondaryMaxY = 0 self.m_verticalLines = list() root.registerResizeCallback(self.resizePlot) self.m_secondaryYAxisRequired = secondaryYAxis if(secondaryYAxis and secondaryYAxisName == ""): raise ValueError #need a secondaryYAxisName! self.m_secondaryYAxisName = secondaryYAxisName self.m_invertXAxis = invertXAxis self.initUI(parent, root) def hidePlot(self): self.canvas.get_tk_widget().place_forget() self.m_figure.set_dpi(4)#setting figure to lowest dpi possible while hidden, because matplotlib tkagg backend keeps updating figure on resize, even while hidden :( self.plotHidden = True def showPlot(self): self.m_figure.set_dpi(96) self.canvas.get_tk_widget().place(anchor="nw",bordermode=tk.OUTSIDE,height=self.winfo_height(),width=self.winfo_width()) self.plotHidden = False def resizePlot(self, timeDelta, *args, **kwargs): if(not self.plotHidden and timeDelta.total_seconds()*1000 < 700): #hide the plot if we just started resizing self.hidePlot() elif(self.plotHidden and timeDelta.total_seconds()*1000 > 700): #if we stopped resizing, unhide plot self.showPlot() #else do nothing def explicitRefresh(self): self.canvas.draw() if(not self.plotHidden): self.hidePlot() self.showPlot() def initUI(self, parent, root): self.pack(side=tk.TOP, fill = tk.BOTH, expand=True) self.m_figure = Figure( dpi=96) self.m_subplot = self.m_figure.add_subplot(111) #add_subplot returns axes # a = f.add_subplot(111)#111 means only one chart as opposed to 121 meanign 2 self.m_subplot.set_title(self.m_title) self.m_subplot.set_xlabel(self.m_xAxisName) self.m_subplot.set_ylabel(self.m_yAxisName) self.m_subplot.tick_params(direction="in") self.m_subplot.grid(linestyle=':') self.m_subplot.margins(x = 0.0) #consider removing yellow, tan, salmon, coral self.m_subplot.set_prop_cycle(color=['blue', 'green', 'red', 'cyan', 'magenta', 'black', 'purple', 'pink', 'brown', 'orange', 'teal', 'lightblue', 'lime', 'turquoise', 'darkgreen', 'gold']) if(self.m_invertXAxis): self.m_subplot.invert_xaxis() if(self.m_secondaryYAxisRequired): self.m_secondaryYAxis = self.m_subplot.twinx() self.m_secondaryYAxis.set_ylabel(self.m_secondaryYAxisName) self.m_secondaryYAxis.tick_params(direction="in") self.m_secondaryYAxis.margins(x= 0.0) #normally plt.show() now, but different for tk self.canvas = FigureCanvasTkAgg(self.m_figure,self) self.canvas.draw() # self.canvas.draw_idle() # canvas.get_tk_widget().grid(row=0,column=0,sticky="nsew") # self.grid_rowconfigure(index=0,weight=1,minsize=self.winfo_height()) # self.grid_columnconfigure(index=0,weight=1,minsize=self.winfo_width()) # self.canvas.get_tk_widget().grid(sticky = "nsew", row = 0, column = 0) # self.pack_propagate(0)#should stop grid resizing self.resizeDateTime = datetime.now() self.plotHidden = False self.m_toolbar = CustomNavigationToolbar(self.canvas, self, root) self.m_toolbar.update() # self.canvas.get_tk_widget().pack(side=tk.TOP,fill=tk.BOTH,expand=True) # self.canvas.get_tk_widget().place(anchor="nw",bordermode=tk.OUTSIDE,height=self.winfo_height(),width=self.winfo_width()) self.canvas.get_tk_widget().place(anchor="nw",bordermode=tk.INSIDE,relheight = 1.0, relwidth = 1.0) def clearPlots(self): if(len(self.m_subplot.lines) > 0): for i in range(len(self.m_subplot.lines)-1,-1,-1): line = self.m_subplot.lines.pop(i) del line if(len(self.m_subplot.patches) > 0): for i in range(len(self.m_subplot.patches)-1,-1,-1): line = self.m_subplot.patches.pop(i) del line if(self.m_secondaryYAxisRequired): if(len(self.m_secondaryYAxis.lines) > 0): for i in range(len(self.m_secondaryYAxis.lines)-1,-1,-1): line = self.m_secondaryYAxis.lines.pop(i) del line self.canvas.draw_idle() def _switchToMarkers(self, axes): for child in axes.get_children(): if(type(child) is mpl.lines.Line2D): child.set_linestyle('None') child.set_marker('+') def switchToMarkers(self): self._switchToMarkers(self.m_subplot) if(self.m_secondaryYAxisRequired): self._switchToMarkers(self.m_secondaryYAxis) self.canvas.draw_idle() def _switchToLines(self, axes): for child in axes.get_children(): if(type(child) is mpl.lines.Line2D): child.set_marker('None') child.set_linestyle('solid') def switchToLines(self): self._switchToLines(self.m_subplot) if(self.m_secondaryYAxisRequired): self._switchToLines(self.m_secondaryYAxis) self.canvas.draw_idle() def toggleMarkers(self): if(self.m_usingMarkers): self.switchToLines() self.m_usingMarkers = False else: self.switchToMarkers() self.m_usingMarkers = True self.canvas.draw_idle() def _addLinePlots(self, axes, ndarrayData, labels, logXAxis, logYAxis, color, shouldRelim, pLineWidth = 1): maxX = None maxY = None minX = None if ndarrayData.ndim >= 2: for i in range(1,ndarrayData.shape[0]): if (type(labels) is str): axes.plot(ndarrayData[0,:],ndarrayData[i,:], label = labels, linewidth=pLineWidth, color = color) elif(labels != None): axes.plot(ndarrayData[0,:],ndarrayData[i,:], label = labels[i-1], linewidth=pLineWidth, color = color) else: axes.plot(ndarrayData[0,:],ndarrayData[i,:], linewidth=pLineWidth, color = color) if(shouldRelim): for l in axes.lines: l_maxX = np.amax(l.get_xdata()) l_minX = np.amin(l.get_xdata()) l_maxY = 1.1*np.amax(l.get_ydata()) #max x if maxX != None: maxX = np.amax((maxX,l_maxX)) else: maxX = l_maxX #max y if maxY != None: maxY = np.amax((maxY,l_maxY)) else: maxY = l_maxY #min X if minX != None: minX = np.amin((minX,l_minX)) else: minX = l_minX axes.set_xbound(minX, maxX)#, top = None) axes.set_ybound(0, maxY)#, top = None) self.m_subplot.grid(linestyle=':') if(self.m_usingMarkers): self.switchToMarkers() #because we plot with lines by default when adding or subtracting lines if (labels != None): handles, labels = self.m_subplot.get_legend_handles_labels() if(self.m_secondaryYAxisRequired): handles2, labels2 = self.m_secondaryYAxis.get_legend_handles_labels() handles = handles + handles2 labels = labels + labels2 # reverse the order #self.m_subplot.legend(handles[::-1], labels[::-1]) # or sort them by labels hl = sorted(zip(handles, labels), key=operator.itemgetter(1)) handles, labels = zip(*hl) legend = self.m_subplot.legend(handles, labels, loc=self.m_legendLoc) if (logXAxis): axes.set_xscale("log") axes.set_xbound(minX, maxX)#math.log(maxX))#, top = None) if (logYAxis): axes.set_yscale("log") axes.set_ybound(1, maxY)#math.log(maxY))#, top = None) if(shouldRelim): axes.relim() def addPrimaryLinePlots(self, ndarrayData, labels = None, logXAxis = False, logYAxis = False, color = None, shouldRelim = True): self._addLinePlots(self.m_subplot, ndarrayData, labels, logXAxis, logYAxis, color, shouldRelim) self.canvas.draw_idle() def addSecondaryLinePlots(self, ndarrayData, labels = None, logXAxis = False, logYAxis = False, color = None, shouldRelim = True): if(not self.m_secondaryYAxisRequired): raise NameError #should use primary line plots, since secondary axis is not defined for this plot self._addLinePlots(self.m_secondaryYAxis, ndarrayData, labels, logXAxis, logYAxis, color, shouldRelim) self.canvas.draw_idle() def addVerticalLine(self, xValue): self.m_verticalLines.append(self.m_subplot.axvline(xValue, linestyle="-.", color="r")) self.canvas.draw_idle() def removeVerticalLines(self): if(len(self.m_subplot.lines) > 0): for i in range(len(self.m_subplot.lines)-1,-1,-1): if(self.m_subplot.lines[i].get_linestyle() == '-.'): line = self.m_subplot.lines.pop(i) del line # if(len(self.m_verticalLines) == 0): # return # for l in self.m_verticalLines: # l.remove() #this function removes the actor from the matplotlib plot, not the list # self.m_verticalLines.clear() # def __autoScaleTopY(self): # self.m_subplot.set_ylim(auto = True) # if(self.m_subplot.get_ylim()[0] < 0.0): # self.m_subplot.set_ylim(bottom=0)#, top = None) # self.m_subplot.relim() def addSecondaryScaledXAxis(self, forwardFunc, reverseFunc): self.m_secondaryScaledXAxis = self.m_subplot.secondary_xaxis("top", functions=(forwardFunc, reverseFunc)) self.canvas.draw_idle() def addSecondaryScaledYAxis(self, forwardFunc, reverseFunc): self.m_secondaryScaledXAxis = self.m_subplot.secondary_yaxis("right", functions=(forwardFunc, reverseFunc)) self.canvas.draw_idle() def getPrimaryAxes(self): return self.m_subplot def getSecondaryAxes(self): return self.m_secondaryYAxis # def setLegendCenterRight(self): # self.m_subplot.get_legend().s def shadeBelowCurve(self, x, y, color = "b"): self.m_subplot.fill(x,y,color, hatch = '/', fill = False) #MPLContainer END #PlotsFrame BEGIN class PlotsFrame(tk.Frame): # notebooks = {} def __init__(self, parent, *args, **kwargs): super().__init__(parent, *args, **kwargs) self.initUI(parent) self.m_notebooks = {} def initUI(self, parent): # self.pack(side = tk.RIGHT, fill = tk.BOTH, expand = True) self.grid_rowconfigure(0,weight=1) self.grid_columnconfigure(0,weight=1) def requestNotebook(self, key): self.m_notebooks[key] = ttk.Notebook(self) self.m_notebooks[key].grid(row=0,column=0,sticky="nsew") #initalize self.m_notebooks[key].grid_forget() #but hide right away return self.m_notebooks[key] def raiseNotebook(self, key): self.m_notebooks[key].grid(row=0,column=0,sticky="nsew") def hideNotebook(self, key): self.m_notebooks[key].grid_forget() #PlotsFrame END
py
1a5b579b6f18100f1f16dc66c8d0855fd6a284bf
""" Submodule contarining all the environments and registers them. """ from .kick_env import KickEnv from .walk_env import WalkEnv from .orient_env import OrientEnv # Mappings from strings to environments isaacgym_task_map = { "bez_kick": KickEnv, "bez_walk": WalkEnv, "bez_orient": OrientEnv, }
py
1a5b57dfdd3a3ea1ec7930033347cacb2615bdbc
import flask import requests import argparse import json import websockets import uuid import asyncio import logging import sys import re import threading from flask import Flask, request from parlai.chat_service.services.api.config import HOST_URL, PARLAI_URL, PARLAI_PORT, HOST_PORT, DEBUG, LOG_FORMAT # Server configuration parser = argparse.ArgumentParser(description="API for ParlAI chatbot") parser.add_argument('--hostname', default=PARLAI_URL, help="ParlAI web server hostname.") parser.add_argument('--port', type=int, default=PARLAI_PORT, help="ParlAI web server port.") parser.add_argument('--serving_hostname', default=HOST_URL, help="API web server hostname.") parser.add_argument('--serving_port', type=int, default=HOST_PORT, help="API web server port.") args = parser.parse_args() hostname = args.hostname port = args.port serving_hostname = args.serving_hostname serving_port = args.serving_port app = Flask(__name__) blueprint = flask.Blueprint('parlai_api', __name__, template_folder='templates') # Log configuration logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=LOG_FORMAT) connections = {} websocket_uri = f"ws://{hostname}:{port}/websocket" running = False requests = [] responses = {} def get_random_id(): return str(uuid.uuid4()) def format_message(message): # Remove all spaces in general for the following chars p = re.compile(r"\s(?P<special_char>[$&+,:;=?@#|'<>.-^*()%!])\s?") text_response = p.sub(r"\g<special_char>", message) print(text_response) # Remove only one space from the left for each of the following. p = re.compile(r"(?P<special_char>[.,:?!])") return p.sub(r"\g<special_char> ", text_response) class ParlaiAPI: @staticmethod def parse(): loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) while True: if not requests: continue request = requests.pop(0) result = loop.run_until_complete(request[1]()) responses[request[0]] = result @staticmethod async def send_message(user_message, message_history=[], persona=False): if persona: message = "your persona: " else: message = "" message += user_message request_dict = {"text": message, "message_history": message_history} request_string = json.dumps(request_dict) request_bytes = bytes(request_string, encoding="UTF-8") print(request_bytes) try: async with websockets.connect(websocket_uri) as ws: await ws.send(request_bytes) response = await ws.recv() response = json.loads(response) print(response) try: response['text'] = format_message(response.get('text')) except Exception as e: print(e) return response except Exception as e: return {'text': str(e), 'error': True} @blueprint.route('/api/send_message', methods=["POST"]) def send_message(): request_id = get_random_id() data = request.get_json() loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) message_text, message_history = data.get('text', None), data.get('message_history', []) requests.append([request_id, lambda: ParlaiAPI.send_message(message_text, message_history)]) print(str(requests)) logging.warning(str(requests)) while request_id not in responses: pass result = responses[request_id] del responses[request_id] return result, 200 async def main(): thread = threading.Thread(target=ParlaiAPI.parse) thread.start() app.register_blueprint(blueprint) app.debug = True app.run(host=serving_hostname, threaded=True, port=serving_port, debug=DEBUG) main_loop = asyncio.get_event_loop() main_loop.run_until_complete(main())
py
1a5b57f561787210d3f9f76261e83c671b8ec97b
import shade shade.simple_logging(http_debug=True) cloud = shade.openstack_cloud( cloud='my-vexxhost', region_name='ca-ymq-1') cloud.get_image('Ubuntu 16.04.1 LTS [2017-03-03]')
py
1a5b57f902571d59b9ec8ed69e12cab2602c7656
# Copyright (c) 2019, NVIDIA CORPORATION. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from cuml.benchmark import datagen, algorithms from cuml.benchmark.bench_helper_funcs import _training_data_to_numpy from cuml.benchmark.runners import AccuracyComparisonRunner, \ SpeedupComparisonRunner, run_variations from cuml.common.import_utils import has_umap from cuml.common.import_utils import has_xgboost import numpy as np import cudf import pytest from numba import cuda from sklearn import metrics import pandas as pd import time @pytest.mark.parametrize('dataset', ['blobs', 'regression', 'classification']) def test_data_generators(dataset): data = datagen.gen_data(dataset, "numpy", n_samples=100, n_features=10) assert isinstance(data[0], np.ndarray) assert data[0].shape[0] == 100 @pytest.mark.parametrize('input_type', ['numpy', 'cudf', 'pandas', 'gpuarray', 'gpuarray-c']) def test_data_generator_types(input_type): X, *_ = datagen.gen_data('blobs', input_type, n_samples=100, n_features=10) if input_type == 'numpy': assert isinstance(X, np.ndarray) elif input_type == 'cudf': assert isinstance(X, cudf.DataFrame) elif input_type == 'pandas': assert isinstance(X, pd.DataFrame) elif input_type == 'gpuarray': assert cuda.is_cuda_array(X) elif input_type == 'gpuarray-c': assert cuda.is_cuda_array(X) else: assert False def test_data_generator_split(): X_train, y_train, X_test, y_test = datagen.gen_data( 'blobs', 'numpy', n_samples=100, n_features=10, test_fraction=0.20 ) assert X_train.shape == (100, 10) assert X_test.shape == (25, 10) def test_run_variations(): algo = algorithms.algorithm_by_name("LogisticRegression") res = run_variations( [algo], dataset_name="classification", bench_rows=[100, 200], bench_dims=[10, 20], ) assert res.shape[0] == 4 assert (res.n_samples == 100).sum() == 2 assert (res.n_features == 20).sum() == 2 def test_speedup_runner(): class MockAlgo: def __init__(self, t): self.t = t def fit(self, X, y): time.sleep(self.t) return def predict(self, X): nr = X.shape[0] res = np.zeros(nr) res[0:int(nr / 5.0)] = 1.0 return res class FastMockAlgo(MockAlgo): def __init__(self): MockAlgo.__init__(self, 0.1) class SlowMockAlgo(MockAlgo): def __init__(self): MockAlgo.__init__(self, 2) pair = algorithms.AlgorithmPair( SlowMockAlgo, FastMockAlgo, shared_args={}, name="Mock", accuracy_function=metrics.accuracy_score, ) runner = SpeedupComparisonRunner( [20], [5], dataset_name='zeros' ) results = runner.run(pair)[0] expected_speedup = SlowMockAlgo().t / FastMockAlgo().t assert results["speedup"] == pytest.approx(expected_speedup, 0.4) def test_multi_reps(): class CountingAlgo: tot_reps = 0 def fit(self, X, y): CountingAlgo.tot_reps += 1 pair = algorithms.AlgorithmPair( CountingAlgo, CountingAlgo, shared_args={}, name="Counting", ) runner = AccuracyComparisonRunner( [20], [5], dataset_name='zeros', test_fraction=0.20, n_reps=4 ) runner.run(pair) # Double the n_reps since it is used in cpu and cuml versions assert CountingAlgo.tot_reps == 8 def test_accuracy_runner(): # Set up data that should deliver accuracy of 0.20 if all goes right class MockAlgo: def fit(self, X, y): return def predict(self, X): nr = X.shape[0] res = np.zeros(nr) res[0:int(nr / 5.0)] = 1.0 return res pair = algorithms.AlgorithmPair( MockAlgo, MockAlgo, shared_args={}, name="Mock", accuracy_function=metrics.accuracy_score, ) runner = AccuracyComparisonRunner( [20], [5], dataset_name='zeros', test_fraction=0.20 ) results = runner.run(pair)[0] assert results["cuml_acc"] == pytest.approx(0.80) # Only test a few algorithms (which collectively span several types) # to reduce runtime burden @pytest.mark.parametrize('algo_name', ['UMAP-Supervised', 'DBSCAN', 'LogisticRegression', 'ElasticNet', 'FIL']) def test_real_algos_runner(algo_name): pair = algorithms.algorithm_by_name(algo_name) if (algo_name == 'UMAP-Supervised' and not has_umap()) or \ (algo_name == 'FIL' and not has_xgboost()): pytest.xfail() runner = AccuracyComparisonRunner( [20], [5], dataset_name='classification', test_fraction=0.20 ) results = runner.run(pair)[0] print(results) assert results["cuml_acc"] is not None # Test FIL with several input types @pytest.mark.parametrize('input_type', ['numpy', 'cudf', 'gpuarray', 'gpuarray-c']) def test_fil_input_types(input_type): pair = algorithms.algorithm_by_name('FIL') if not has_xgboost(): pytest.xfail() runner = AccuracyComparisonRunner( [20], [5], dataset_name='classification', test_fraction=0.5, input_type=input_type) results = runner.run(pair, run_cpu=False)[0] assert results["cuml_acc"] is not None @pytest.mark.parametrize('input_type', ['numpy', 'cudf', 'pandas', 'gpuarray']) def test_training_data_to_numpy(input_type): X, y, *_ = datagen.gen_data( 'blobs', input_type, n_samples=100, n_features=10 ) X_np, y_np = _training_data_to_numpy(X, y) assert isinstance(X_np, np.ndarray) assert isinstance(y_np, np.ndarray)
py
1a5b58aa3351aed8c26392d0584fa57d5830fd45
"""mysite URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add an import: from blog import urls as blog_urls 2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls)) """ from django.conf.urls import include, url from django.contrib import admin urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^polls/', include('polls.urls', namespace="polls")), ]
py
1a5b58fa6d1f0caacc94c5eaa7f3710930be09b1
# Copyright (c) Microsoft Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import asyncio import sys from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Union, cast from pyee import EventEmitter from playwright._impl._api_structures import FilePayload, Position from playwright._impl._api_types import Error from playwright._impl._connection import ( ChannelOwner, from_channel, from_nullable_channel, ) from playwright._impl._element_handle import ElementHandle, convert_select_option_values from playwright._impl._event_context_manager import EventContextManagerImpl from playwright._impl._file_chooser import normalize_file_payloads from playwright._impl._helper import ( DocumentLoadState, FrameNavigatedEvent, KeyboardModifier, MouseButton, URLMatch, URLMatcher, async_readfile, locals_to_params, monotonic_time, ) from playwright._impl._js_handle import ( JSHandle, Serializable, parse_result, serialize_argument, ) from playwright._impl._locator import FrameLocator, Locator from playwright._impl._network import Response from playwright._impl._wait_helper import WaitHelper if sys.version_info >= (3, 8): # pragma: no cover from typing import Literal else: # pragma: no cover from typing_extensions import Literal if TYPE_CHECKING: # pragma: no cover from playwright._impl._page import Page class Frame(ChannelOwner): def __init__( self, parent: ChannelOwner, type: str, guid: str, initializer: Dict ) -> None: super().__init__(parent, type, guid, initializer) self._parent_frame = from_nullable_channel(initializer.get("parentFrame")) if self._parent_frame: self._parent_frame._child_frames.append(self) self._name = initializer["name"] self._url = initializer["url"] self._detached = False self._child_frames: List[Frame] = [] self._page: "Page" self._load_states: Set[str] = set(initializer["loadStates"]) self._event_emitter = EventEmitter() self._channel.on( "loadstate", lambda params: self._on_load_state(params.get("add"), params.get("remove")), ) self._channel.on( "navigated", lambda params: self._on_frame_navigated(params), ) def __repr__(self) -> str: return f"<Frame name={self.name} url={self.url!r}>" def _on_load_state( self, add: DocumentLoadState = None, remove: DocumentLoadState = None ) -> None: if add: self._load_states.add(add) self._event_emitter.emit("loadstate", add) elif remove and remove in self._load_states: self._load_states.remove(remove) def _on_frame_navigated(self, event: FrameNavigatedEvent) -> None: self._url = event["url"] self._name = event["name"] self._event_emitter.emit("navigated", event) if "error" not in event and hasattr(self, "_page") and self._page: self._page.emit("framenavigated", self) @property def page(self) -> "Page": return self._page async def goto( self, url: str, timeout: float = None, waitUntil: DocumentLoadState = None, referer: str = None, ) -> Optional[Response]: return cast( Optional[Response], from_nullable_channel( await self._channel.send("goto", locals_to_params(locals())) ), ) def _setup_navigation_wait_helper( self, wait_name: str, timeout: float = None ) -> WaitHelper: wait_helper = WaitHelper(self._page, f"frame.{wait_name}") wait_helper.reject_on_event( self._page, "close", Error("Navigation failed because page was closed!") ) wait_helper.reject_on_event( self._page, "crash", Error("Navigation failed because page crashed!") ) wait_helper.reject_on_event( self._page, "framedetached", Error("Navigating frame was detached!"), lambda frame: frame == self, ) if timeout is None: timeout = self._page._timeout_settings.navigation_timeout() wait_helper.reject_on_timeout(timeout, f"Timeout {timeout}ms exceeded.") return wait_helper def expect_navigation( self, url: URLMatch = None, wait_until: DocumentLoadState = None, timeout: float = None, ) -> EventContextManagerImpl[Response]: if not wait_until: wait_until = "load" if timeout is None: timeout = self._page._timeout_settings.navigation_timeout() deadline = monotonic_time() + timeout wait_helper = self._setup_navigation_wait_helper("expect_navigation", timeout) to_url = f' to "{url}"' if url else "" wait_helper.log(f"waiting for navigation{to_url} until '{wait_until}'") matcher = ( URLMatcher(self._page._browser_context._options.get("baseURL"), url) if url else None ) def predicate(event: Any) -> bool: # Any failed navigation results in a rejection. if event.get("error"): return True wait_helper.log(f' navigated to "{event["url"]}"') return not matcher or matcher.matches(event["url"]) wait_helper.wait_for_event( self._event_emitter, "navigated", predicate=predicate, ) async def continuation() -> Optional[Response]: event = await wait_helper.result() if "error" in event: raise Error(event["error"]) if wait_until not in self._load_states: t = deadline - monotonic_time() if t > 0: await self._wait_for_load_state_impl(state=wait_until, timeout=t) if "newDocument" in event and "request" in event["newDocument"]: request = from_channel(event["newDocument"]["request"]) return await request.response() return None return EventContextManagerImpl(asyncio.create_task(continuation())) async def wait_for_url( self, url: URLMatch, wait_until: DocumentLoadState = None, timeout: float = None, ) -> None: matcher = URLMatcher(self._page._browser_context._options.get("baseURL"), url) if matcher.matches(self.url): await self._wait_for_load_state_impl(state=wait_until, timeout=timeout) return async with self.expect_navigation( url=url, wait_until=wait_until, timeout=timeout ): pass async def wait_for_load_state( self, state: Literal["domcontentloaded", "load", "networkidle"] = None, timeout: float = None, ) -> None: return await self._wait_for_load_state_impl(state, timeout) async def _wait_for_load_state_impl( self, state: DocumentLoadState = None, timeout: float = None ) -> None: if not state: state = "load" if state not in ("load", "domcontentloaded", "networkidle", "commit"): raise Error( "state: expected one of (load|domcontentloaded|networkidle|commit)" ) if state in self._load_states: return wait_helper = self._setup_navigation_wait_helper("wait_for_load_state", timeout) def handle_load_state_event(actual_state: str) -> bool: wait_helper.log(f'"{actual_state}" event fired') return actual_state == state wait_helper.wait_for_event( self._event_emitter, "loadstate", handle_load_state_event, ) await wait_helper.result() async def frame_element(self) -> ElementHandle: return from_channel(await self._channel.send("frameElement")) async def evaluate(self, expression: str, arg: Serializable = None) -> Any: return parse_result( await self._channel.send( "evaluateExpression", dict( expression=expression, arg=serialize_argument(arg), ), ) ) async def evaluate_handle( self, expression: str, arg: Serializable = None ) -> JSHandle: return from_channel( await self._channel.send( "evaluateExpressionHandle", dict( expression=expression, arg=serialize_argument(arg), ), ) ) async def query_selector( self, selector: str, strict: bool = None ) -> Optional[ElementHandle]: return from_nullable_channel( await self._channel.send("querySelector", locals_to_params(locals())) ) async def query_selector_all(self, selector: str) -> List[ElementHandle]: return list( map( from_channel, await self._channel.send("querySelectorAll", dict(selector=selector)), ) ) async def wait_for_selector( self, selector: str, strict: bool = None, timeout: float = None, state: Literal["attached", "detached", "hidden", "visible"] = None, ) -> Optional[ElementHandle]: return from_nullable_channel( await self._channel.send("waitForSelector", locals_to_params(locals())) ) async def is_checked( self, selector: str, strict: bool = None, timeout: float = None ) -> bool: return await self._channel.send("isChecked", locals_to_params(locals())) async def is_disabled( self, selector: str, strict: bool = None, timeout: float = None ) -> bool: return await self._channel.send("isDisabled", locals_to_params(locals())) async def is_editable( self, selector: str, strict: bool = None, timeout: float = None ) -> bool: return await self._channel.send("isEditable", locals_to_params(locals())) async def is_enabled( self, selector: str, strict: bool = None, timeout: float = None ) -> bool: return await self._channel.send("isEnabled", locals_to_params(locals())) async def is_hidden( self, selector: str, strict: bool = None, timeout: float = None ) -> bool: return await self._channel.send("isHidden", locals_to_params(locals())) async def is_visible( self, selector: str, strict: bool = None, timeout: float = None ) -> bool: return await self._channel.send("isVisible", locals_to_params(locals())) async def dispatch_event( self, selector: str, type: str, eventInit: Dict = None, strict: bool = None, timeout: float = None, ) -> None: await self._channel.send( "dispatchEvent", locals_to_params( dict( selector=selector, type=type, eventInit=serialize_argument(eventInit), strict=strict, timeout=timeout, ), ), ) async def eval_on_selector( self, selector: str, expression: str, arg: Serializable = None, strict: bool = None, ) -> Any: return parse_result( await self._channel.send( "evalOnSelector", locals_to_params( dict( selector=selector, expression=expression, arg=serialize_argument(arg), strict=strict, ) ), ) ) async def eval_on_selector_all( self, selector: str, expression: str, arg: Serializable = None, ) -> Any: return parse_result( await self._channel.send( "evalOnSelectorAll", dict( selector=selector, expression=expression, arg=serialize_argument(arg), ), ) ) async def content(self) -> str: return await self._channel.send("content") async def set_content( self, html: str, timeout: float = None, waitUntil: DocumentLoadState = None, ) -> None: await self._channel.send("setContent", locals_to_params(locals())) @property def name(self) -> str: return self._name or "" @property def url(self) -> str: return self._url or "" @property def parent_frame(self) -> Optional["Frame"]: return self._parent_frame @property def child_frames(self) -> List["Frame"]: return self._child_frames.copy() def is_detached(self) -> bool: return self._detached async def add_script_tag( self, url: str = None, path: Union[str, Path] = None, content: str = None, type: str = None, ) -> ElementHandle: params = locals_to_params(locals()) if path: params["content"] = ( (await async_readfile(path)).decode() + "\n//# sourceURL=" + str(Path(path)) ) del params["path"] return from_channel(await self._channel.send("addScriptTag", params)) async def add_style_tag( self, url: str = None, path: Union[str, Path] = None, content: str = None ) -> ElementHandle: params = locals_to_params(locals()) if path: params["content"] = ( (await async_readfile(path)).decode() + "\n/*# sourceURL=" + str(Path(path)) + "*/" ) del params["path"] return from_channel(await self._channel.send("addStyleTag", params)) async def click( self, selector: str, modifiers: List[KeyboardModifier] = None, position: Position = None, delay: float = None, button: MouseButton = None, clickCount: int = None, timeout: float = None, force: bool = None, noWaitAfter: bool = None, strict: bool = None, trial: bool = None, ) -> None: await self._channel.send("click", locals_to_params(locals())) async def dblclick( self, selector: str, modifiers: List[KeyboardModifier] = None, position: Position = None, delay: float = None, button: MouseButton = None, timeout: float = None, force: bool = None, noWaitAfter: bool = None, strict: bool = None, trial: bool = None, ) -> None: await self._channel.send("dblclick", locals_to_params(locals())) async def tap( self, selector: str, modifiers: List[KeyboardModifier] = None, position: Position = None, timeout: float = None, force: bool = None, noWaitAfter: bool = None, strict: bool = None, trial: bool = None, ) -> None: await self._channel.send("tap", locals_to_params(locals())) async def fill( self, selector: str, value: str, timeout: float = None, noWaitAfter: bool = None, strict: bool = None, force: bool = None, ) -> None: await self._channel.send("fill", locals_to_params(locals())) def locator( self, selector: str, ) -> Locator: return Locator(self, selector) def frame_locator(self, selector: str) -> FrameLocator: return FrameLocator(self, selector) async def focus( self, selector: str, strict: bool = None, timeout: float = None ) -> None: await self._channel.send("focus", locals_to_params(locals())) async def text_content( self, selector: str, strict: bool = None, timeout: float = None ) -> Optional[str]: return await self._channel.send("textContent", locals_to_params(locals())) async def inner_text( self, selector: str, strict: bool = None, timeout: float = None ) -> str: return await self._channel.send("innerText", locals_to_params(locals())) async def inner_html( self, selector: str, strict: bool = None, timeout: float = None ) -> str: return await self._channel.send("innerHTML", locals_to_params(locals())) async def get_attribute( self, selector: str, name: str, strict: bool = None, timeout: float = None ) -> Optional[str]: return await self._channel.send("getAttribute", locals_to_params(locals())) async def hover( self, selector: str, modifiers: List[KeyboardModifier] = None, position: Position = None, timeout: float = None, force: bool = None, strict: bool = None, trial: bool = None, ) -> None: await self._channel.send("hover", locals_to_params(locals())) async def drag_and_drop( self, source: str, target: str, source_position: Position = None, target_position: Position = None, force: bool = None, noWaitAfter: bool = None, strict: bool = None, timeout: float = None, trial: bool = None, ) -> None: await self._channel.send("dragAndDrop", locals_to_params(locals())) async def select_option( self, selector: str, value: Union[str, List[str]] = None, index: Union[int, List[int]] = None, label: Union[str, List[str]] = None, element: Union["ElementHandle", List["ElementHandle"]] = None, timeout: float = None, noWaitAfter: bool = None, strict: bool = None, force: bool = None, ) -> List[str]: params = locals_to_params( dict( selector=selector, timeout=timeout, noWaitAfter=noWaitAfter, strict=strict, force=force, **convert_select_option_values(value, index, label, element), ) ) return await self._channel.send("selectOption", params) async def input_value( self, selector: str, strict: bool = None, timeout: float = None, ) -> str: return await self._channel.send("inputValue", locals_to_params(locals())) async def set_input_files( self, selector: str, files: Union[str, Path, FilePayload, List[Union[str, Path]], List[FilePayload]], strict: bool = None, timeout: float = None, noWaitAfter: bool = None, ) -> None: params = locals_to_params(locals()) params["files"] = await normalize_file_payloads(files) await self._channel.send("setInputFiles", params) async def type( self, selector: str, text: str, delay: float = None, strict: bool = None, timeout: float = None, noWaitAfter: bool = None, ) -> None: await self._channel.send("type", locals_to_params(locals())) async def press( self, selector: str, key: str, delay: float = None, strict: bool = None, timeout: float = None, noWaitAfter: bool = None, ) -> None: await self._channel.send("press", locals_to_params(locals())) async def check( self, selector: str, position: Position = None, timeout: float = None, force: bool = None, noWaitAfter: bool = None, strict: bool = None, trial: bool = None, ) -> None: await self._channel.send("check", locals_to_params(locals())) async def uncheck( self, selector: str, position: Position = None, timeout: float = None, force: bool = None, noWaitAfter: bool = None, strict: bool = None, trial: bool = None, ) -> None: await self._channel.send("uncheck", locals_to_params(locals())) async def wait_for_timeout(self, timeout: float) -> None: await self._channel.send("waitForTimeout", locals_to_params(locals())) async def wait_for_function( self, expression: str, arg: Serializable = None, timeout: float = None, polling: Union[float, Literal["raf"]] = None, ) -> JSHandle: params = locals_to_params(locals()) params["arg"] = serialize_argument(arg) return from_channel(await self._channel.send("waitForFunction", params)) async def title(self) -> str: return await self._channel.send("title") async def set_checked( self, selector: str, checked: bool, position: Position = None, timeout: float = None, force: bool = None, noWaitAfter: bool = None, strict: bool = None, trial: bool = None, ) -> None: if checked: await self.check( selector=selector, position=position, timeout=timeout, force=force, noWaitAfter=noWaitAfter, strict=strict, trial=trial, ) else: await self.uncheck( selector=selector, position=position, timeout=timeout, force=force, noWaitAfter=noWaitAfter, strict=strict, trial=trial, )
py
1a5b595889a5a4576dbae12b4d7bb559d141a92c
import json from unittest import TestCase from django.test.client import Client from mock import patch class ViewsDiffTest(TestCase): @patch('regcore_read.views.diff.storage') def test_get_none(self, storage): storage.for_diffs.get.return_value = None response = Client().get('/diff/lablab/oldold/newnew') self.assertEqual(404, response.status_code) @patch('regcore_read.views.diff.storage') def test_get_empty(self, storage): storage.for_diffs.get.return_value = {} response = Client().get('/diff/lablab/oldold/newnew') self.assertEqual(200, response.status_code) self.assertEqual({}, json.loads(response.content.decode('utf-8'))) @patch('regcore_read.views.diff.storage') def test_get_results(self, storage): storage.for_diffs.get.return_value = {'example': 'response'} response = Client().get('/diff/lablab/oldold/newnew') self.assertEqual(200, response.status_code) self.assertEqual({'example': 'response'}, json.loads(response.content.decode('utf-8')))
py
1a5b599f9f2e7d91e9f01ad8e16de6ba8c2c741f
from django.conf.urls import url # URLconf maps URL patterns (described as regular expressions) to views from . import views app_name = 'polls' urlpatterns = [ # ex: /polls/ url(r'^$', views.IndexView.as_view(), name='index'), # ex. /polls/5/ url(r'^(?P<pk>[0-9]+)/$', views.DetailView.as_view(), name='detail'), # ex. /polls/5/results/ url(r'^(?P<pk>[0-9]+)/results/$', views.ResultsView.as_view(), name='results'), # ex. /polls/5/vote/ url(r'^(?P<question_id>[0-9]+)/vote/$', views.vote, name='vote'), ]
py
1a5b5b7e7fb577211b033496d160cd84805bf2aa
import collections import datetime try: from github import Github except ImportError: raise ImportError('Install PyGithub from https://github.com/PyGithub/PyGithub or via pip') API_TOKEN = None if API_TOKEN is None: raise ValueError('Need to specify an API token') p = Github(API_TOKEN) last_release = datetime.datetime(year=2017, month=11, day=8) authors = [] comments = p.get_repo('brian-team/brian2').get_issues_comments(since=last_release) comment_counter = 0 for comment in comments: name = comment.user.name if name is None: authors.append('`@{login} <https://github.com/{login}>`_'.format(login=comment.user.login.encode('utf-8'), name=name)) else: authors.append( '{name} (`@{login} <https://github.com/{login}>`_)'.format( login=comment.user.login.encode('utf-8'), name=name.encode('utf-8'))) comment_counter += 1 print('Counted {} comments'.format(comment_counter)) issues = p.get_repo('brian-team/brian2').get_issues(since=last_release) issue_counter = 0 for issue in issues: name = issue.user.name if name is None: authors.append('`@{login} <https://github.com/{login}>`_'.format(login=issue.user.login.encode('utf-8'), name=name)) else: authors.append( '{name} (`@{login} <https://github.com/{login}>`_)'.format( login=issue.user.login.encode('utf-8'), name=name.encode('utf-8'))) issue_counter += 1 print('Counted {} issues'.format(issue_counter)) counted = collections.Counter(authors) sorted = sorted(counted.items(), key=lambda item: item[1], reverse=True) for name, contributions in sorted: print('{:>4} {}'.format(contributions, name))
py
1a5b5e0aca013de7ea01ae00daeebd9888124364
"""Sable is a testing tool for SQL.""" __version__: str = "0.0.1"
py
1a5b5e4cbbf1bbb6b75d63c48b34238cb8eb74ee
relight = StaticLibrary( 'relight', sources = [ '.', 'baker', 'scene', 'rt' ], defines = [ 'RELIGHT_BUILD_LIBRARY' ] ) relight.linkExternal( Library( 'embree2', True ) )
py
1a5b5e614b4b6a653875ec81443496fe798c1269
#!/usr/bin/env python """Django's command-line utility for administrative tasks.""" import os import sys def main(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'aed_medical_29914.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv) if __name__ == '__main__': main()
py
1a5b5f494b0a975da90fdd0ec56548763e40c629
c = get_config() import os # This file is copied to /etc/jupyter in the docker image. # This file just needs to point jupyter to the right course directory. You can have one more config file in the course home directory with other configuration. c.CourseDirectory.root = os.environ['COURSE_HOME_ON_CONTAINER']
py
1a5b611c1c1b55c8ea2f5c6b75ae39dae21d7c82
#!/usr/bin/python # (c) 2018-2019, NetApp, Inc # GNU General Public License v3.0+ # (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'certified'} DOCUMENTATION = ''' module: na_ontap_qos_adaptive_policy_group short_description: NetApp ONTAP Adaptive Quality of Service policy group. extends_documentation_fragment: - netapp.na_ontap version_added: '2.9' author: NetApp Ansible Team (@joshedmonds) <[email protected]> description: - Create, destroy, modify, or rename an Adaptive QoS policy group on NetApp ONTAP. Module is based on the standard QoS policy group module. options: state: choices: ['present', 'absent'] description: - Whether the specified policy group should exist or not. default: 'present' type: str name: description: - The name of the policy group to manage. type: str required: true vserver: description: - Name of the vserver to use. type: str required: true from_name: description: - Name of the existing policy group to be renamed to name. type: str absolute_min_iops: description: - Absolute minimum IOPS defined by this policy. type: str expected_iops: description: - Minimum expected IOPS defined by this policy. type: str peak_iops: description: - Maximum possible IOPS per allocated or used TB|GB. type: str peak_iops_allocation: choices: ['allocated_space', 'used_space'] description: - Whether peak_iops is specified by allocated or used space. default: 'used_space' type: str force: type: bool default: False description: - Setting to 'true' forces the deletion of the workloads associated with the policy group along with the policy group. ''' EXAMPLES = """ - name: create adaptive qos policy group na_ontap_qos_adaptive_policy_group: state: present name: aq_policy_1 vserver: policy_vserver absolute_min_iops: 70IOPS expected_iops: 100IOPS/TB peak_iops: 250IOPS/TB peak_iops_allocation: allocated_space hostname: 10.193.78.30 username: admin password: netapp1! - name: modify adaptive qos policy group expected iops na_ontap_qos_adaptive_policy_group: state: present name: aq_policy_1 vserver: policy_vserver absolute_min_iops: 70IOPS expected_iops: 125IOPS/TB peak_iops: 250IOPS/TB peak_iops_allocation: allocated_space hostname: 10.193.78.30 username: admin password: netapp1! - name: modify adaptive qos policy group peak iops allocation na_ontap_qos_adaptive_policy_group: state: present name: aq_policy_1 vserver: policy_vserver absolute_min_iops: 70IOPS expected_iops: 125IOPS/TB peak_iops: 250IOPS/TB peak_iops_allocation: used_space hostname: 10.193.78.30 username: admin password: netapp1! - name: delete qos policy group na_ontap_qos_adaptive_policy_group: state: absent name: aq_policy_1 vserver: policy_vserver hostname: 10.193.78.30 username: admin password: netapp1! """ RETURN = """ """ import traceback import ansible.module_utils.netapp as netapp_utils from ansible.module_utils.netapp_module import NetAppModule from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_native HAS_NETAPP_LIB = netapp_utils.has_netapp_lib() class NetAppOntapAdaptiveQosPolicyGroup(object): """ Create, delete, modify and rename a policy group. """ def __init__(self): """ Initialize the Ontap qos policy group class. """ self.argument_spec = netapp_utils.na_ontap_host_argument_spec() self.argument_spec.update(dict( state=dict(required=False, type='str', choices=['present', 'absent'], default='present'), name=dict(required=True, type='str'), from_name=dict(required=False, type='str'), vserver=dict(required=True, type='str'), absolute_min_iops=dict(required=False, type='str'), expected_iops=dict(required=False, type='str'), peak_iops=dict(required=False, type='str'), peak_iops_allocation=dict(choices=['allocated_space', 'used_space'], default='used_space'), force=dict(required=False, type='bool', default=False) )) self.module = AnsibleModule( argument_spec=self.argument_spec, supports_check_mode=True ) self.na_helper = NetAppModule() self.parameters = self.na_helper.set_parameters(self.module.params) if HAS_NETAPP_LIB is False: self.module.fail_json( msg="the python NetApp-Lib module is required") else: self.server = netapp_utils.setup_na_ontap_zapi( module=self.module) def get_policy_group(self, policy_group_name=None): """ Return details of a policy group. :param policy_group_name: policy group name :return: policy group details. :rtype: dict. """ if policy_group_name is None: policy_group_name = self.parameters['name'] policy_group_get_iter = netapp_utils.zapi.NaElement('qos-adaptive-policy-group-get-iter') policy_group_info = netapp_utils.zapi.NaElement('qos-adaptive-policy-group-info') policy_group_info.add_new_child('policy-group', policy_group_name) policy_group_info.add_new_child('vserver', self.parameters['vserver']) query = netapp_utils.zapi.NaElement('query') query.add_child_elem(policy_group_info) policy_group_get_iter.add_child_elem(query) result = self.server.invoke_successfully(policy_group_get_iter, True) policy_group_detail = None if result.get_child_by_name('num-records') and int(result.get_child_content('num-records')) == 1: policy_info = result.get_child_by_name('attributes-list').get_child_by_name('qos-adaptive-policy-group-info') policy_group_detail = { 'name': policy_info.get_child_content('policy-group'), 'vserver': policy_info.get_child_content('vserver'), 'absolute_min_iops': policy_info.get_child_content('absolute-min-iops'), 'expected_iops': policy_info.get_child_content('expected-iops'), 'peak_iops': policy_info.get_child_content('peak-iops'), 'peak_iops_allocation': policy_info.get_child_content('peak-iops-allocation') } return policy_group_detail def create_policy_group(self): """ create a policy group name. """ policy_group = netapp_utils.zapi.NaElement('qos-adaptive-policy-group-create') policy_group.add_new_child('policy-group', self.parameters['name']) policy_group.add_new_child('vserver', self.parameters['vserver']) if self.parameters.get('absolute_min_iops'): policy_group.add_new_child('absolute-min-iops', self.parameters['absolute_min_iops']) if self.parameters.get('expected_iops'): policy_group.add_new_child('expected-iops', self.parameters['expected_iops']) if self.parameters.get('peak_iops'): policy_group.add_new_child('peak-iops', self.parameters['peak_iops']) if self.parameters.get('peak_iops_allocation'): policy_group.add_new_child('peak-iops-allocation', self.parameters['peak_iops_allocation']) try: self.server.invoke_successfully(policy_group, True) except netapp_utils.zapi.NaApiError as error: self.module.fail_json(msg='Error creating adaptive qos policy group %s: %s' % (self.parameters['name'], to_native(error)), exception=traceback.format_exc()) def delete_policy_group(self, policy_group=None): """ delete an existing policy group. :param policy_group: policy group name. """ if policy_group is None: policy_group = self.parameters['name'] policy_group_obj = netapp_utils.zapi.NaElement('qos-adaptive-policy-group-delete') policy_group_obj.add_new_child('policy-group', policy_group) if self.parameters.get('force'): policy_group_obj.add_new_child('force', str(self.parameters['force'])) try: self.server.invoke_successfully(policy_group_obj, True) except netapp_utils.zapi.NaApiError as error: self.module.fail_json(msg='Error deleting adaptive qos policy group %s: %s' % (policy_group, to_native(error)), exception=traceback.format_exc()) def modify_policy_group(self): """ Modify policy group. """ policy_group_obj = netapp_utils.zapi.NaElement('qos-adaptive-policy-group-modify') policy_group_obj.add_new_child('policy-group', self.parameters['name']) if self.parameters.get('absolute_min_iops'): policy_group_obj.add_new_child('absolute-min-iops', self.parameters['absolute_min_iops']) if self.parameters.get('expected_iops'): policy_group_obj.add_new_child('expected-iops', self.parameters['expected_iops']) if self.parameters.get('peak_iops'): policy_group_obj.add_new_child('peak-iops', self.parameters['peak_iops']) if self.parameters.get('peak_iops_allocation'): policy_group_obj.add_new_child('peak-iops-allocation', self.parameters['peak_iops_allocation']) try: self.server.invoke_successfully(policy_group_obj, True) except netapp_utils.zapi.NaApiError as error: self.module.fail_json(msg='Error modifying adaptive qos policy group %s: %s' % (self.parameters['name'], to_native(error)), exception=traceback.format_exc()) def rename_policy_group(self): """ Rename policy group name. """ rename_obj = netapp_utils.zapi.NaElement('qos-adaptive-policy-group-rename') rename_obj.add_new_child('new-name', self.parameters['name']) rename_obj.add_new_child('policy-group-name', self.parameters['from_name']) try: self.server.invoke_successfully(rename_obj, True) except netapp_utils.zapi.NaApiError as error: self.module.fail_json(msg='Error renaming adaptive qos policy group %s: %s' % (self.parameters['from_name'], to_native(error)), exception=traceback.format_exc()) def modify_helper(self, modify): """ helper method to modify policy group. :param modify: modified attributes. """ for attribute in modify.keys(): if attribute in ['absolute_min_iops', 'expected_iops', 'peak_iops', 'peak_iops_allocation']: self.modify_policy_group() def apply(self): """ Run module based on playbook """ self.autosupport_log("na_ontap_qos_policy_group") current = self.get_policy_group() rename, cd_action = None, None if self.parameters.get('from_name'): rename = self.na_helper.is_rename_action(self.get_policy_group(self.parameters['from_name']), current) else: cd_action = self.na_helper.get_cd_action(current, self.parameters) modify = self.na_helper.get_modified_attributes(current, self.parameters) if self.na_helper.changed: if self.module.check_mode: pass else: if rename: self.rename_policy_group() if cd_action == 'create': self.create_policy_group() elif cd_action == 'delete': self.delete_policy_group() elif modify: self.modify_helper(modify) self.module.exit_json(changed=self.na_helper.changed) def autosupport_log(self, event_name): """ Create a log event against the provided vserver """ server = netapp_utils.setup_na_ontap_zapi(module=self.module, vserver=self.parameters['vserver']) netapp_utils.ems_log_event(event_name, server) def main(): '''Apply vserver operations from playbook''' qos_policy_group = NetAppOntapAdaptiveQosPolicyGroup() qos_policy_group.apply() if __name__ == '__main__': main()
py
1a5b617e159c7eed66cfed965fe05c84f9482ae6
# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: envoy/config/core/v3/base.proto """Generated protocol buffer code.""" from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from envoy.config.core.v3 import address_pb2 as envoy_dot_config_dot_core_dot_v3_dot_address__pb2 from envoy.config.core.v3 import backoff_pb2 as envoy_dot_config_dot_core_dot_v3_dot_backoff__pb2 from envoy.config.core.v3 import http_uri_pb2 as envoy_dot_config_dot_core_dot_v3_dot_http__uri__pb2 from envoy.type.v3 import percent_pb2 as envoy_dot_type_dot_v3_dot_percent__pb2 from envoy.type.v3 import semantic_version_pb2 as envoy_dot_type_dot_v3_dot_semantic__version__pb2 from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 from xds.core.v3 import context_params_pb2 as xds_dot_core_dot_v3_dot_context__params__pb2 from envoy.annotations import deprecation_pb2 as envoy_dot_annotations_dot_deprecation__pb2 from udpa.annotations import migrate_pb2 as udpa_dot_annotations_dot_migrate__pb2 from udpa.annotations import status_pb2 as udpa_dot_annotations_dot_status__pb2 from udpa.annotations import versioning_pb2 as udpa_dot_annotations_dot_versioning__pb2 from validate import validate_pb2 as validate_dot_validate__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='envoy/config/core/v3/base.proto', package='envoy.config.core.v3', syntax='proto3', serialized_options=b'\n\"io.envoyproxy.envoy.config.core.v3B\tBaseProtoP\001ZBgithub.com/envoyproxy/go-control-plane/envoy/config/core/v3;corev3\272\200\310\321\006\002\020\002', create_key=_descriptor._internal_create_key, serialized_pb=b'\n\x1f\x65nvoy/config/core/v3/base.proto\x12\x14\x65nvoy.config.core.v3\x1a\"envoy/config/core/v3/address.proto\x1a\"envoy/config/core/v3/backoff.proto\x1a#envoy/config/core/v3/http_uri.proto\x1a\x1b\x65nvoy/type/v3/percent.proto\x1a$envoy/type/v3/semantic_version.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a xds/core/v3/context_params.proto\x1a#envoy/annotations/deprecation.proto\x1a\x1eudpa/annotations/migrate.proto\x1a\x1dudpa/annotations/status.proto\x1a!udpa/annotations/versioning.proto\x1a\x17validate/validate.proto\"]\n\x08Locality\x12\x0e\n\x06region\x18\x01 \x01(\t\x12\x0c\n\x04zone\x18\x02 \x01(\t\x12\x10\n\x08sub_zone\x18\x03 \x01(\t:!\x9a\xc5\x88\x1e\x1c\n\x1a\x65nvoy.api.v2.core.Locality\"\x91\x01\n\x0c\x42uildVersion\x12/\n\x07version\x18\x01 \x01(\x0b\x32\x1e.envoy.type.v3.SemanticVersion\x12)\n\x08metadata\x18\x02 \x01(\x0b\x32\x17.google.protobuf.Struct:%\x9a\xc5\x88\x1e \n\x1e\x65nvoy.api.v2.core.BuildVersion\"\xaf\x01\n\tExtension\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x10\n\x08\x63\x61tegory\x18\x02 \x01(\t\x12\x17\n\x0ftype_descriptor\x18\x03 \x01(\t\x12\x33\n\x07version\x18\x04 \x01(\x0b\x32\".envoy.config.core.v3.BuildVersion\x12\x10\n\x08\x64isabled\x18\x05 \x01(\x08:\"\x9a\xc5\x88\x1e\x1d\n\x1b\x65nvoy.api.v2.core.Extension\"\x8a\x05\n\x04Node\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0f\n\x07\x63luster\x18\x02 \x01(\t\x12)\n\x08metadata\x18\x03 \x01(\x0b\x32\x17.google.protobuf.Struct\x12M\n\x12\x64ynamic_parameters\x18\x0c \x03(\x0b\x32\x31.envoy.config.core.v3.Node.DynamicParametersEntry\x12\x30\n\x08locality\x18\x04 \x01(\x0b\x32\x1e.envoy.config.core.v3.Locality\x12\x17\n\x0fuser_agent_name\x18\x06 \x01(\t\x12\x1c\n\x12user_agent_version\x18\x07 \x01(\tH\x00\x12\x46\n\x18user_agent_build_version\x18\x08 \x01(\x0b\x32\".envoy.config.core.v3.BuildVersionH\x00\x12\x33\n\nextensions\x18\t \x03(\x0b\x32\x1f.envoy.config.core.v3.Extension\x12\x17\n\x0f\x63lient_features\x18\n \x03(\t\x12G\n\x13listening_addresses\x18\x0b \x03(\x0b\x32\x1d.envoy.config.core.v3.AddressB\x0b\x18\x01\x92\xc7\x86\xd8\x04\x03\x33.0\x1aT\n\x16\x44ynamicParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.xds.core.v3.ContextParams:\x02\x38\x01:\x1d\x9a\xc5\x88\x1e\x18\n\x16\x65nvoy.api.v2.core.NodeB\x19\n\x17user_agent_version_typeJ\x04\x08\x05\x10\x06R\rbuild_version\"\xf4\x02\n\x08Metadata\x12K\n\x0f\x66ilter_metadata\x18\x01 \x03(\x0b\x32\x32.envoy.config.core.v3.Metadata.FilterMetadataEntry\x12V\n\x15typed_filter_metadata\x18\x02 \x03(\x0b\x32\x37.envoy.config.core.v3.Metadata.TypedFilterMetadataEntry\x1aN\n\x13\x46ilterMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12&\n\x05value\x18\x02 \x01(\x0b\x32\x17.google.protobuf.Struct:\x02\x38\x01\x1aP\n\x18TypedFilterMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any:\x02\x38\x01:!\x9a\xc5\x88\x1e\x1c\n\x1a\x65nvoy.api.v2.core.Metadata\"l\n\rRuntimeUInt32\x12\x15\n\rdefault_value\x18\x02 \x01(\r\x12\x1c\n\x0bruntime_key\x18\x03 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01:&\x9a\xc5\x88\x1e!\n\x1f\x65nvoy.api.v2.core.RuntimeUInt32\"]\n\x0eRuntimePercent\x12-\n\rdefault_value\x18\x01 \x01(\x0b\x32\x16.envoy.type.v3.Percent\x12\x1c\n\x0bruntime_key\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01\"l\n\rRuntimeDouble\x12\x15\n\rdefault_value\x18\x01 \x01(\x01\x12\x1c\n\x0bruntime_key\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01:&\x9a\xc5\x88\x1e!\n\x1f\x65nvoy.api.v2.core.RuntimeDouble\"\x9c\x01\n\x12RuntimeFeatureFlag\x12;\n\rdefault_value\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.BoolValueB\x08\xfa\x42\x05\x8a\x01\x02\x10\x01\x12\x1c\n\x0bruntime_key\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01:+\x9a\xc5\x88\x1e&\n$envoy.api.v2.core.RuntimeFeatureFlag\"5\n\x0eQueryParameter\x12\x14\n\x03key\x18\x01 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01\x12\r\n\x05value\x18\x02 \x01(\t\"s\n\x0bHeaderValue\x12\x1e\n\x03key\x18\x01 \x01(\tB\x11\xfa\x42\x0er\x0c\x10\x01(\x80\x80\x01\xc0\x01\x01\xc8\x01\x00\x12\x1e\n\x05value\x18\x02 \x01(\tB\x0f\xfa\x42\x0cr\n(\x80\x80\x01\xc0\x01\x02\xc8\x01\x00:$\x9a\xc5\x88\x1e\x1f\n\x1d\x65nvoy.api.v2.core.HeaderValue\"\xeb\x02\n\x11HeaderValueOption\x12;\n\x06header\x18\x01 \x01(\x0b\x32!.envoy.config.core.v3.HeaderValueB\x08\xfa\x42\x05\x8a\x01\x02\x10\x01\x12*\n\x06\x61ppend\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12[\n\rappend_action\x18\x03 \x01(\x0e\x32:.envoy.config.core.v3.HeaderValueOption.HeaderAppendActionB\x08\xfa\x42\x05\x82\x01\x02\x10\x01\"d\n\x12HeaderAppendAction\x12\x1b\n\x17\x41PPEND_IF_EXISTS_OR_ADD\x10\x00\x12\x11\n\rADD_IF_ABSENT\x10\x01\x12\x1e\n\x1aOVERWRITE_IF_EXISTS_OR_ADD\x10\x02:*\x9a\xc5\x88\x1e%\n#envoy.api.v2.core.HeaderValueOption\"c\n\tHeaderMap\x12\x32\n\x07headers\x18\x01 \x03(\x0b\x32!.envoy.config.core.v3.HeaderValue:\"\x9a\xc5\x88\x1e\x1d\n\x1b\x65nvoy.api.v2.core.HeaderMap\")\n\x10WatchedDirectory\x12\x15\n\x04path\x18\x01 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01\"\xba\x01\n\nDataSource\x12\x1b\n\x08\x66ilename\x18\x01 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01H\x00\x12\x16\n\x0cinline_bytes\x18\x02 \x01(\x0cH\x00\x12\x17\n\rinline_string\x18\x03 \x01(\tH\x00\x12\'\n\x14\x65nvironment_variable\x18\x04 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01H\x00:#\x9a\xc5\x88\x1e\x1e\n\x1c\x65nvoy.api.v2.core.DataSourceB\x10\n\tspecifier\x12\x03\xf8\x42\x01\"\xba\x01\n\x0bRetryPolicy\x12=\n\x0eretry_back_off\x18\x01 \x01(\x0b\x32%.envoy.config.core.v3.BackoffStrategy\x12\x46\n\x0bnum_retries\x18\x02 \x01(\x0b\x32\x1c.google.protobuf.UInt32ValueB\x13\xf2\x98\xfe\x8f\x05\r\n\x0bmax_retries:$\x9a\xc5\x88\x1e\x1f\n\x1d\x65nvoy.api.v2.core.RetryPolicy\"\xca\x01\n\x10RemoteDataSource\x12\x39\n\x08http_uri\x18\x01 \x01(\x0b\x32\x1d.envoy.config.core.v3.HttpUriB\x08\xfa\x42\x05\x8a\x01\x02\x10\x01\x12\x17\n\x06sha256\x18\x02 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01\x12\x37\n\x0cretry_policy\x18\x03 \x01(\x0b\x32!.envoy.config.core.v3.RetryPolicy:)\x9a\xc5\x88\x1e$\n\"envoy.api.v2.core.RemoteDataSource\"\xba\x01\n\x0f\x41syncDataSource\x12\x31\n\x05local\x18\x01 \x01(\x0b\x32 .envoy.config.core.v3.DataSourceH\x00\x12\x38\n\x06remote\x18\x02 \x01(\x0b\x32&.envoy.config.core.v3.RemoteDataSourceH\x00:(\x9a\xc5\x88\x1e#\n!envoy.api.v2.core.AsyncDataSourceB\x10\n\tspecifier\x12\x03\xf8\x42\x01\"\x9d\x01\n\x0fTransportSocket\x12\x15\n\x04name\x18\x01 \x01(\tB\x07\xfa\x42\x04r\x02\x10\x01\x12,\n\x0ctyped_config\x18\x03 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00:(\x9a\xc5\x88\x1e#\n!envoy.api.v2.core.TransportSocketB\r\n\x0b\x63onfig_typeJ\x04\x08\x02\x10\x03R\x06\x63onfig\"\xa5\x01\n\x18RuntimeFractionalPercent\x12\x41\n\rdefault_value\x18\x01 \x01(\x0b\x32 .envoy.type.v3.FractionalPercentB\x08\xfa\x42\x05\x8a\x01\x02\x10\x01\x12\x13\n\x0bruntime_key\x18\x02 \x01(\t:1\x9a\xc5\x88\x1e,\n*envoy.api.v2.core.RuntimeFractionalPercent\"I\n\x0c\x43ontrolPlane\x12\x12\n\nidentifier\x18\x01 \x01(\t:%\x9a\xc5\x88\x1e \n\x1e\x65nvoy.api.v2.core.ControlPlane*(\n\x0fRoutingPriority\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x08\n\x04HIGH\x10\x01*\x89\x01\n\rRequestMethod\x12\x16\n\x12METHOD_UNSPECIFIED\x10\x00\x12\x07\n\x03GET\x10\x01\x12\x08\n\x04HEAD\x10\x02\x12\x08\n\x04POST\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\x0b\n\x07\x43ONNECT\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x12\t\n\x05TRACE\x10\x08\x12\t\n\x05PATCH\x10\t*>\n\x10TrafficDirection\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0b\n\x07INBOUND\x10\x01\x12\x0c\n\x08OUTBOUND\x10\x02\x42}\n\"io.envoyproxy.envoy.config.core.v3B\tBaseProtoP\x01ZBgithub.com/envoyproxy/go-control-plane/envoy/config/core/v3;corev3\xba\x80\xc8\xd1\x06\x02\x10\x02\x62\x06proto3' , dependencies=[envoy_dot_config_dot_core_dot_v3_dot_address__pb2.DESCRIPTOR,envoy_dot_config_dot_core_dot_v3_dot_backoff__pb2.DESCRIPTOR,envoy_dot_config_dot_core_dot_v3_dot_http__uri__pb2.DESCRIPTOR,envoy_dot_type_dot_v3_dot_percent__pb2.DESCRIPTOR,envoy_dot_type_dot_v3_dot_semantic__version__pb2.DESCRIPTOR,google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_struct__pb2.DESCRIPTOR,google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR,xds_dot_core_dot_v3_dot_context__params__pb2.DESCRIPTOR,envoy_dot_annotations_dot_deprecation__pb2.DESCRIPTOR,udpa_dot_annotations_dot_migrate__pb2.DESCRIPTOR,udpa_dot_annotations_dot_status__pb2.DESCRIPTOR,udpa_dot_annotations_dot_versioning__pb2.DESCRIPTOR,validate_dot_validate__pb2.DESCRIPTOR,]) _ROUTINGPRIORITY = _descriptor.EnumDescriptor( name='RoutingPriority', full_name='envoy.config.core.v3.RoutingPriority', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='DEFAULT', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='HIGH', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=4296, serialized_end=4336, ) _sym_db.RegisterEnumDescriptor(_ROUTINGPRIORITY) RoutingPriority = enum_type_wrapper.EnumTypeWrapper(_ROUTINGPRIORITY) _REQUESTMETHOD = _descriptor.EnumDescriptor( name='RequestMethod', full_name='envoy.config.core.v3.RequestMethod', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='METHOD_UNSPECIFIED', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='GET', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='HEAD', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='POST', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='PUT', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='DELETE', index=5, number=5, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CONNECT', index=6, number=6, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='OPTIONS', index=7, number=7, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TRACE', index=8, number=8, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='PATCH', index=9, number=9, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=4339, serialized_end=4476, ) _sym_db.RegisterEnumDescriptor(_REQUESTMETHOD) RequestMethod = enum_type_wrapper.EnumTypeWrapper(_REQUESTMETHOD) _TRAFFICDIRECTION = _descriptor.EnumDescriptor( name='TrafficDirection', full_name='envoy.config.core.v3.TrafficDirection', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='UNSPECIFIED', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INBOUND', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='OUTBOUND', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=4478, serialized_end=4540, ) _sym_db.RegisterEnumDescriptor(_TRAFFICDIRECTION) TrafficDirection = enum_type_wrapper.EnumTypeWrapper(_TRAFFICDIRECTION) DEFAULT = 0 HIGH = 1 METHOD_UNSPECIFIED = 0 GET = 1 HEAD = 2 POST = 3 PUT = 4 DELETE = 5 CONNECT = 6 OPTIONS = 7 TRACE = 8 PATCH = 9 UNSPECIFIED = 0 INBOUND = 1 OUTBOUND = 2 _HEADERVALUEOPTION_HEADERAPPENDACTION = _descriptor.EnumDescriptor( name='HeaderAppendAction', full_name='envoy.config.core.v3.HeaderValueOption.HeaderAppendAction', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='APPEND_IF_EXISTS_OR_ADD', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ADD_IF_ABSENT', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='OVERWRITE_IF_EXISTS_OR_ADD', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=2831, serialized_end=2931, ) _sym_db.RegisterEnumDescriptor(_HEADERVALUEOPTION_HEADERAPPENDACTION) _LOCALITY = _descriptor.Descriptor( name='Locality', full_name='envoy.config.core.v3.Locality', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='region', full_name='envoy.config.core.v3.Locality.region', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='zone', full_name='envoy.config.core.v3.Locality.zone', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='sub_zone', full_name='envoy.config.core.v3.Locality.sub_zone', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\232\305\210\036\034\n\032envoy.api.v2.core.Locality', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=516, serialized_end=609, ) _BUILDVERSION = _descriptor.Descriptor( name='BuildVersion', full_name='envoy.config.core.v3.BuildVersion', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='version', full_name='envoy.config.core.v3.BuildVersion.version', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='metadata', full_name='envoy.config.core.v3.BuildVersion.metadata', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\232\305\210\036 \n\036envoy.api.v2.core.BuildVersion', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=612, serialized_end=757, ) _EXTENSION = _descriptor.Descriptor( name='Extension', full_name='envoy.config.core.v3.Extension', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='name', full_name='envoy.config.core.v3.Extension.name', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='category', full_name='envoy.config.core.v3.Extension.category', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='type_descriptor', full_name='envoy.config.core.v3.Extension.type_descriptor', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='version', full_name='envoy.config.core.v3.Extension.version', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='disabled', full_name='envoy.config.core.v3.Extension.disabled', index=4, number=5, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\232\305\210\036\035\n\033envoy.api.v2.core.Extension', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=760, serialized_end=935, ) _NODE_DYNAMICPARAMETERSENTRY = _descriptor.Descriptor( name='DynamicParametersEntry', full_name='envoy.config.core.v3.Node.DynamicParametersEntry', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='key', full_name='envoy.config.core.v3.Node.DynamicParametersEntry.key', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='value', full_name='envoy.config.core.v3.Node.DynamicParametersEntry.value', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'8\001', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1425, serialized_end=1509, ) _NODE = _descriptor.Descriptor( name='Node', full_name='envoy.config.core.v3.Node', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='id', full_name='envoy.config.core.v3.Node.id', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='cluster', full_name='envoy.config.core.v3.Node.cluster', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='metadata', full_name='envoy.config.core.v3.Node.metadata', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='dynamic_parameters', full_name='envoy.config.core.v3.Node.dynamic_parameters', index=3, number=12, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='locality', full_name='envoy.config.core.v3.Node.locality', index=4, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='user_agent_name', full_name='envoy.config.core.v3.Node.user_agent_name', index=5, number=6, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='user_agent_version', full_name='envoy.config.core.v3.Node.user_agent_version', index=6, number=7, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='user_agent_build_version', full_name='envoy.config.core.v3.Node.user_agent_build_version', index=7, number=8, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='extensions', full_name='envoy.config.core.v3.Node.extensions', index=8, number=9, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='client_features', full_name='envoy.config.core.v3.Node.client_features', index=9, number=10, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='listening_addresses', full_name='envoy.config.core.v3.Node.listening_addresses', index=10, number=11, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\030\001\222\307\206\330\004\0033.0', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[_NODE_DYNAMICPARAMETERSENTRY, ], enum_types=[ ], serialized_options=b'\232\305\210\036\030\n\026envoy.api.v2.core.Node', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='user_agent_version_type', full_name='envoy.config.core.v3.Node.user_agent_version_type', index=0, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), ], serialized_start=938, serialized_end=1588, ) _METADATA_FILTERMETADATAENTRY = _descriptor.Descriptor( name='FilterMetadataEntry', full_name='envoy.config.core.v3.Metadata.FilterMetadataEntry', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='key', full_name='envoy.config.core.v3.Metadata.FilterMetadataEntry.key', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='value', full_name='envoy.config.core.v3.Metadata.FilterMetadataEntry.value', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'8\001', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1768, serialized_end=1846, ) _METADATA_TYPEDFILTERMETADATAENTRY = _descriptor.Descriptor( name='TypedFilterMetadataEntry', full_name='envoy.config.core.v3.Metadata.TypedFilterMetadataEntry', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='key', full_name='envoy.config.core.v3.Metadata.TypedFilterMetadataEntry.key', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='value', full_name='envoy.config.core.v3.Metadata.TypedFilterMetadataEntry.value', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'8\001', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1848, serialized_end=1928, ) _METADATA = _descriptor.Descriptor( name='Metadata', full_name='envoy.config.core.v3.Metadata', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='filter_metadata', full_name='envoy.config.core.v3.Metadata.filter_metadata', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='typed_filter_metadata', full_name='envoy.config.core.v3.Metadata.typed_filter_metadata', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[_METADATA_FILTERMETADATAENTRY, _METADATA_TYPEDFILTERMETADATAENTRY, ], enum_types=[ ], serialized_options=b'\232\305\210\036\034\n\032envoy.api.v2.core.Metadata', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1591, serialized_end=1963, ) _RUNTIMEUINT32 = _descriptor.Descriptor( name='RuntimeUInt32', full_name='envoy.config.core.v3.RuntimeUInt32', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='default_value', full_name='envoy.config.core.v3.RuntimeUInt32.default_value', index=0, number=2, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='runtime_key', full_name='envoy.config.core.v3.RuntimeUInt32.runtime_key', index=1, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\004r\002\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\232\305\210\036!\n\037envoy.api.v2.core.RuntimeUInt32', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=1965, serialized_end=2073, ) _RUNTIMEPERCENT = _descriptor.Descriptor( name='RuntimePercent', full_name='envoy.config.core.v3.RuntimePercent', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='default_value', full_name='envoy.config.core.v3.RuntimePercent.default_value', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='runtime_key', full_name='envoy.config.core.v3.RuntimePercent.runtime_key', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\004r\002\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2075, serialized_end=2168, ) _RUNTIMEDOUBLE = _descriptor.Descriptor( name='RuntimeDouble', full_name='envoy.config.core.v3.RuntimeDouble', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='default_value', full_name='envoy.config.core.v3.RuntimeDouble.default_value', index=0, number=1, type=1, cpp_type=5, label=1, has_default_value=False, default_value=float(0), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='runtime_key', full_name='envoy.config.core.v3.RuntimeDouble.runtime_key', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\004r\002\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\232\305\210\036!\n\037envoy.api.v2.core.RuntimeDouble', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2170, serialized_end=2278, ) _RUNTIMEFEATUREFLAG = _descriptor.Descriptor( name='RuntimeFeatureFlag', full_name='envoy.config.core.v3.RuntimeFeatureFlag', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='default_value', full_name='envoy.config.core.v3.RuntimeFeatureFlag.default_value', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\005\212\001\002\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='runtime_key', full_name='envoy.config.core.v3.RuntimeFeatureFlag.runtime_key', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\004r\002\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\232\305\210\036&\n$envoy.api.v2.core.RuntimeFeatureFlag', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2281, serialized_end=2437, ) _QUERYPARAMETER = _descriptor.Descriptor( name='QueryParameter', full_name='envoy.config.core.v3.QueryParameter', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='key', full_name='envoy.config.core.v3.QueryParameter.key', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\004r\002\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='value', full_name='envoy.config.core.v3.QueryParameter.value', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2439, serialized_end=2492, ) _HEADERVALUE = _descriptor.Descriptor( name='HeaderValue', full_name='envoy.config.core.v3.HeaderValue', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='key', full_name='envoy.config.core.v3.HeaderValue.key', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\016r\014\020\001(\200\200\001\300\001\001\310\001\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='value', full_name='envoy.config.core.v3.HeaderValue.value', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\014r\n(\200\200\001\300\001\002\310\001\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\232\305\210\036\037\n\035envoy.api.v2.core.HeaderValue', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2494, serialized_end=2609, ) _HEADERVALUEOPTION = _descriptor.Descriptor( name='HeaderValueOption', full_name='envoy.config.core.v3.HeaderValueOption', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='header', full_name='envoy.config.core.v3.HeaderValueOption.header', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\005\212\001\002\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='append', full_name='envoy.config.core.v3.HeaderValueOption.append', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='append_action', full_name='envoy.config.core.v3.HeaderValueOption.append_action', index=2, number=3, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\005\202\001\002\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _HEADERVALUEOPTION_HEADERAPPENDACTION, ], serialized_options=b'\232\305\210\036%\n#envoy.api.v2.core.HeaderValueOption', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2612, serialized_end=2975, ) _HEADERMAP = _descriptor.Descriptor( name='HeaderMap', full_name='envoy.config.core.v3.HeaderMap', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='headers', full_name='envoy.config.core.v3.HeaderMap.headers', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\232\305\210\036\035\n\033envoy.api.v2.core.HeaderMap', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=2977, serialized_end=3076, ) _WATCHEDDIRECTORY = _descriptor.Descriptor( name='WatchedDirectory', full_name='envoy.config.core.v3.WatchedDirectory', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='path', full_name='envoy.config.core.v3.WatchedDirectory.path', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\004r\002\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=3078, serialized_end=3119, ) _DATASOURCE = _descriptor.Descriptor( name='DataSource', full_name='envoy.config.core.v3.DataSource', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='filename', full_name='envoy.config.core.v3.DataSource.filename', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\004r\002\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='inline_bytes', full_name='envoy.config.core.v3.DataSource.inline_bytes', index=1, number=2, type=12, cpp_type=9, label=1, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='inline_string', full_name='envoy.config.core.v3.DataSource.inline_string', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='environment_variable', full_name='envoy.config.core.v3.DataSource.environment_variable', index=3, number=4, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\004r\002\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\232\305\210\036\036\n\034envoy.api.v2.core.DataSource', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='specifier', full_name='envoy.config.core.v3.DataSource.specifier', index=0, containing_type=None, create_key=_descriptor._internal_create_key, fields=[], serialized_options=b'\370B\001'), ], serialized_start=3122, serialized_end=3308, ) _RETRYPOLICY = _descriptor.Descriptor( name='RetryPolicy', full_name='envoy.config.core.v3.RetryPolicy', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='retry_back_off', full_name='envoy.config.core.v3.RetryPolicy.retry_back_off', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='num_retries', full_name='envoy.config.core.v3.RetryPolicy.num_retries', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\362\230\376\217\005\r\n\013max_retries', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\232\305\210\036\037\n\035envoy.api.v2.core.RetryPolicy', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=3311, serialized_end=3497, ) _REMOTEDATASOURCE = _descriptor.Descriptor( name='RemoteDataSource', full_name='envoy.config.core.v3.RemoteDataSource', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='http_uri', full_name='envoy.config.core.v3.RemoteDataSource.http_uri', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\005\212\001\002\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='sha256', full_name='envoy.config.core.v3.RemoteDataSource.sha256', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\004r\002\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='retry_policy', full_name='envoy.config.core.v3.RemoteDataSource.retry_policy', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\232\305\210\036$\n\"envoy.api.v2.core.RemoteDataSource', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=3500, serialized_end=3702, ) _ASYNCDATASOURCE = _descriptor.Descriptor( name='AsyncDataSource', full_name='envoy.config.core.v3.AsyncDataSource', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='local', full_name='envoy.config.core.v3.AsyncDataSource.local', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='remote', full_name='envoy.config.core.v3.AsyncDataSource.remote', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\232\305\210\036#\n!envoy.api.v2.core.AsyncDataSource', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='specifier', full_name='envoy.config.core.v3.AsyncDataSource.specifier', index=0, containing_type=None, create_key=_descriptor._internal_create_key, fields=[], serialized_options=b'\370B\001'), ], serialized_start=3705, serialized_end=3891, ) _TRANSPORTSOCKET = _descriptor.Descriptor( name='TransportSocket', full_name='envoy.config.core.v3.TransportSocket', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='name', full_name='envoy.config.core.v3.TransportSocket.name', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\004r\002\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='typed_config', full_name='envoy.config.core.v3.TransportSocket.typed_config', index=1, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\232\305\210\036#\n!envoy.api.v2.core.TransportSocket', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ _descriptor.OneofDescriptor( name='config_type', full_name='envoy.config.core.v3.TransportSocket.config_type', index=0, containing_type=None, create_key=_descriptor._internal_create_key, fields=[]), ], serialized_start=3894, serialized_end=4051, ) _RUNTIMEFRACTIONALPERCENT = _descriptor.Descriptor( name='RuntimeFractionalPercent', full_name='envoy.config.core.v3.RuntimeFractionalPercent', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='default_value', full_name='envoy.config.core.v3.RuntimeFractionalPercent.default_value', index=0, number=1, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=b'\372B\005\212\001\002\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='runtime_key', full_name='envoy.config.core.v3.RuntimeFractionalPercent.runtime_key', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\232\305\210\036,\n*envoy.api.v2.core.RuntimeFractionalPercent', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=4054, serialized_end=4219, ) _CONTROLPLANE = _descriptor.Descriptor( name='ControlPlane', full_name='envoy.config.core.v3.ControlPlane', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='identifier', full_name='envoy.config.core.v3.ControlPlane.identifier', index=0, number=1, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=b'\232\305\210\036 \n\036envoy.api.v2.core.ControlPlane', is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=4221, serialized_end=4294, ) _BUILDVERSION.fields_by_name['version'].message_type = envoy_dot_type_dot_v3_dot_semantic__version__pb2._SEMANTICVERSION _BUILDVERSION.fields_by_name['metadata'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT _EXTENSION.fields_by_name['version'].message_type = _BUILDVERSION _NODE_DYNAMICPARAMETERSENTRY.fields_by_name['value'].message_type = xds_dot_core_dot_v3_dot_context__params__pb2._CONTEXTPARAMS _NODE_DYNAMICPARAMETERSENTRY.containing_type = _NODE _NODE.fields_by_name['metadata'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT _NODE.fields_by_name['dynamic_parameters'].message_type = _NODE_DYNAMICPARAMETERSENTRY _NODE.fields_by_name['locality'].message_type = _LOCALITY _NODE.fields_by_name['user_agent_build_version'].message_type = _BUILDVERSION _NODE.fields_by_name['extensions'].message_type = _EXTENSION _NODE.fields_by_name['listening_addresses'].message_type = envoy_dot_config_dot_core_dot_v3_dot_address__pb2._ADDRESS _NODE.oneofs_by_name['user_agent_version_type'].fields.append( _NODE.fields_by_name['user_agent_version']) _NODE.fields_by_name['user_agent_version'].containing_oneof = _NODE.oneofs_by_name['user_agent_version_type'] _NODE.oneofs_by_name['user_agent_version_type'].fields.append( _NODE.fields_by_name['user_agent_build_version']) _NODE.fields_by_name['user_agent_build_version'].containing_oneof = _NODE.oneofs_by_name['user_agent_version_type'] _METADATA_FILTERMETADATAENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT _METADATA_FILTERMETADATAENTRY.containing_type = _METADATA _METADATA_TYPEDFILTERMETADATAENTRY.fields_by_name['value'].message_type = google_dot_protobuf_dot_any__pb2._ANY _METADATA_TYPEDFILTERMETADATAENTRY.containing_type = _METADATA _METADATA.fields_by_name['filter_metadata'].message_type = _METADATA_FILTERMETADATAENTRY _METADATA.fields_by_name['typed_filter_metadata'].message_type = _METADATA_TYPEDFILTERMETADATAENTRY _RUNTIMEPERCENT.fields_by_name['default_value'].message_type = envoy_dot_type_dot_v3_dot_percent__pb2._PERCENT _RUNTIMEFEATUREFLAG.fields_by_name['default_value'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE _HEADERVALUEOPTION.fields_by_name['header'].message_type = _HEADERVALUE _HEADERVALUEOPTION.fields_by_name['append'].message_type = google_dot_protobuf_dot_wrappers__pb2._BOOLVALUE _HEADERVALUEOPTION.fields_by_name['append_action'].enum_type = _HEADERVALUEOPTION_HEADERAPPENDACTION _HEADERVALUEOPTION_HEADERAPPENDACTION.containing_type = _HEADERVALUEOPTION _HEADERMAP.fields_by_name['headers'].message_type = _HEADERVALUE _DATASOURCE.oneofs_by_name['specifier'].fields.append( _DATASOURCE.fields_by_name['filename']) _DATASOURCE.fields_by_name['filename'].containing_oneof = _DATASOURCE.oneofs_by_name['specifier'] _DATASOURCE.oneofs_by_name['specifier'].fields.append( _DATASOURCE.fields_by_name['inline_bytes']) _DATASOURCE.fields_by_name['inline_bytes'].containing_oneof = _DATASOURCE.oneofs_by_name['specifier'] _DATASOURCE.oneofs_by_name['specifier'].fields.append( _DATASOURCE.fields_by_name['inline_string']) _DATASOURCE.fields_by_name['inline_string'].containing_oneof = _DATASOURCE.oneofs_by_name['specifier'] _DATASOURCE.oneofs_by_name['specifier'].fields.append( _DATASOURCE.fields_by_name['environment_variable']) _DATASOURCE.fields_by_name['environment_variable'].containing_oneof = _DATASOURCE.oneofs_by_name['specifier'] _RETRYPOLICY.fields_by_name['retry_back_off'].message_type = envoy_dot_config_dot_core_dot_v3_dot_backoff__pb2._BACKOFFSTRATEGY _RETRYPOLICY.fields_by_name['num_retries'].message_type = google_dot_protobuf_dot_wrappers__pb2._UINT32VALUE _REMOTEDATASOURCE.fields_by_name['http_uri'].message_type = envoy_dot_config_dot_core_dot_v3_dot_http__uri__pb2._HTTPURI _REMOTEDATASOURCE.fields_by_name['retry_policy'].message_type = _RETRYPOLICY _ASYNCDATASOURCE.fields_by_name['local'].message_type = _DATASOURCE _ASYNCDATASOURCE.fields_by_name['remote'].message_type = _REMOTEDATASOURCE _ASYNCDATASOURCE.oneofs_by_name['specifier'].fields.append( _ASYNCDATASOURCE.fields_by_name['local']) _ASYNCDATASOURCE.fields_by_name['local'].containing_oneof = _ASYNCDATASOURCE.oneofs_by_name['specifier'] _ASYNCDATASOURCE.oneofs_by_name['specifier'].fields.append( _ASYNCDATASOURCE.fields_by_name['remote']) _ASYNCDATASOURCE.fields_by_name['remote'].containing_oneof = _ASYNCDATASOURCE.oneofs_by_name['specifier'] _TRANSPORTSOCKET.fields_by_name['typed_config'].message_type = google_dot_protobuf_dot_any__pb2._ANY _TRANSPORTSOCKET.oneofs_by_name['config_type'].fields.append( _TRANSPORTSOCKET.fields_by_name['typed_config']) _TRANSPORTSOCKET.fields_by_name['typed_config'].containing_oneof = _TRANSPORTSOCKET.oneofs_by_name['config_type'] _RUNTIMEFRACTIONALPERCENT.fields_by_name['default_value'].message_type = envoy_dot_type_dot_v3_dot_percent__pb2._FRACTIONALPERCENT DESCRIPTOR.message_types_by_name['Locality'] = _LOCALITY DESCRIPTOR.message_types_by_name['BuildVersion'] = _BUILDVERSION DESCRIPTOR.message_types_by_name['Extension'] = _EXTENSION DESCRIPTOR.message_types_by_name['Node'] = _NODE DESCRIPTOR.message_types_by_name['Metadata'] = _METADATA DESCRIPTOR.message_types_by_name['RuntimeUInt32'] = _RUNTIMEUINT32 DESCRIPTOR.message_types_by_name['RuntimePercent'] = _RUNTIMEPERCENT DESCRIPTOR.message_types_by_name['RuntimeDouble'] = _RUNTIMEDOUBLE DESCRIPTOR.message_types_by_name['RuntimeFeatureFlag'] = _RUNTIMEFEATUREFLAG DESCRIPTOR.message_types_by_name['QueryParameter'] = _QUERYPARAMETER DESCRIPTOR.message_types_by_name['HeaderValue'] = _HEADERVALUE DESCRIPTOR.message_types_by_name['HeaderValueOption'] = _HEADERVALUEOPTION DESCRIPTOR.message_types_by_name['HeaderMap'] = _HEADERMAP DESCRIPTOR.message_types_by_name['WatchedDirectory'] = _WATCHEDDIRECTORY DESCRIPTOR.message_types_by_name['DataSource'] = _DATASOURCE DESCRIPTOR.message_types_by_name['RetryPolicy'] = _RETRYPOLICY DESCRIPTOR.message_types_by_name['RemoteDataSource'] = _REMOTEDATASOURCE DESCRIPTOR.message_types_by_name['AsyncDataSource'] = _ASYNCDATASOURCE DESCRIPTOR.message_types_by_name['TransportSocket'] = _TRANSPORTSOCKET DESCRIPTOR.message_types_by_name['RuntimeFractionalPercent'] = _RUNTIMEFRACTIONALPERCENT DESCRIPTOR.message_types_by_name['ControlPlane'] = _CONTROLPLANE DESCRIPTOR.enum_types_by_name['RoutingPriority'] = _ROUTINGPRIORITY DESCRIPTOR.enum_types_by_name['RequestMethod'] = _REQUESTMETHOD DESCRIPTOR.enum_types_by_name['TrafficDirection'] = _TRAFFICDIRECTION _sym_db.RegisterFileDescriptor(DESCRIPTOR) Locality = _reflection.GeneratedProtocolMessageType('Locality', (_message.Message,), { 'DESCRIPTOR' : _LOCALITY, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.Locality) }) _sym_db.RegisterMessage(Locality) BuildVersion = _reflection.GeneratedProtocolMessageType('BuildVersion', (_message.Message,), { 'DESCRIPTOR' : _BUILDVERSION, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.BuildVersion) }) _sym_db.RegisterMessage(BuildVersion) Extension = _reflection.GeneratedProtocolMessageType('Extension', (_message.Message,), { 'DESCRIPTOR' : _EXTENSION, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.Extension) }) _sym_db.RegisterMessage(Extension) Node = _reflection.GeneratedProtocolMessageType('Node', (_message.Message,), { 'DynamicParametersEntry' : _reflection.GeneratedProtocolMessageType('DynamicParametersEntry', (_message.Message,), { 'DESCRIPTOR' : _NODE_DYNAMICPARAMETERSENTRY, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.Node.DynamicParametersEntry) }) , 'DESCRIPTOR' : _NODE, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.Node) }) _sym_db.RegisterMessage(Node) _sym_db.RegisterMessage(Node.DynamicParametersEntry) Metadata = _reflection.GeneratedProtocolMessageType('Metadata', (_message.Message,), { 'FilterMetadataEntry' : _reflection.GeneratedProtocolMessageType('FilterMetadataEntry', (_message.Message,), { 'DESCRIPTOR' : _METADATA_FILTERMETADATAENTRY, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.Metadata.FilterMetadataEntry) }) , 'TypedFilterMetadataEntry' : _reflection.GeneratedProtocolMessageType('TypedFilterMetadataEntry', (_message.Message,), { 'DESCRIPTOR' : _METADATA_TYPEDFILTERMETADATAENTRY, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.Metadata.TypedFilterMetadataEntry) }) , 'DESCRIPTOR' : _METADATA, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.Metadata) }) _sym_db.RegisterMessage(Metadata) _sym_db.RegisterMessage(Metadata.FilterMetadataEntry) _sym_db.RegisterMessage(Metadata.TypedFilterMetadataEntry) RuntimeUInt32 = _reflection.GeneratedProtocolMessageType('RuntimeUInt32', (_message.Message,), { 'DESCRIPTOR' : _RUNTIMEUINT32, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.RuntimeUInt32) }) _sym_db.RegisterMessage(RuntimeUInt32) RuntimePercent = _reflection.GeneratedProtocolMessageType('RuntimePercent', (_message.Message,), { 'DESCRIPTOR' : _RUNTIMEPERCENT, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.RuntimePercent) }) _sym_db.RegisterMessage(RuntimePercent) RuntimeDouble = _reflection.GeneratedProtocolMessageType('RuntimeDouble', (_message.Message,), { 'DESCRIPTOR' : _RUNTIMEDOUBLE, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.RuntimeDouble) }) _sym_db.RegisterMessage(RuntimeDouble) RuntimeFeatureFlag = _reflection.GeneratedProtocolMessageType('RuntimeFeatureFlag', (_message.Message,), { 'DESCRIPTOR' : _RUNTIMEFEATUREFLAG, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.RuntimeFeatureFlag) }) _sym_db.RegisterMessage(RuntimeFeatureFlag) QueryParameter = _reflection.GeneratedProtocolMessageType('QueryParameter', (_message.Message,), { 'DESCRIPTOR' : _QUERYPARAMETER, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.QueryParameter) }) _sym_db.RegisterMessage(QueryParameter) HeaderValue = _reflection.GeneratedProtocolMessageType('HeaderValue', (_message.Message,), { 'DESCRIPTOR' : _HEADERVALUE, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.HeaderValue) }) _sym_db.RegisterMessage(HeaderValue) HeaderValueOption = _reflection.GeneratedProtocolMessageType('HeaderValueOption', (_message.Message,), { 'DESCRIPTOR' : _HEADERVALUEOPTION, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.HeaderValueOption) }) _sym_db.RegisterMessage(HeaderValueOption) HeaderMap = _reflection.GeneratedProtocolMessageType('HeaderMap', (_message.Message,), { 'DESCRIPTOR' : _HEADERMAP, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.HeaderMap) }) _sym_db.RegisterMessage(HeaderMap) WatchedDirectory = _reflection.GeneratedProtocolMessageType('WatchedDirectory', (_message.Message,), { 'DESCRIPTOR' : _WATCHEDDIRECTORY, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.WatchedDirectory) }) _sym_db.RegisterMessage(WatchedDirectory) DataSource = _reflection.GeneratedProtocolMessageType('DataSource', (_message.Message,), { 'DESCRIPTOR' : _DATASOURCE, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.DataSource) }) _sym_db.RegisterMessage(DataSource) RetryPolicy = _reflection.GeneratedProtocolMessageType('RetryPolicy', (_message.Message,), { 'DESCRIPTOR' : _RETRYPOLICY, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.RetryPolicy) }) _sym_db.RegisterMessage(RetryPolicy) RemoteDataSource = _reflection.GeneratedProtocolMessageType('RemoteDataSource', (_message.Message,), { 'DESCRIPTOR' : _REMOTEDATASOURCE, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.RemoteDataSource) }) _sym_db.RegisterMessage(RemoteDataSource) AsyncDataSource = _reflection.GeneratedProtocolMessageType('AsyncDataSource', (_message.Message,), { 'DESCRIPTOR' : _ASYNCDATASOURCE, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.AsyncDataSource) }) _sym_db.RegisterMessage(AsyncDataSource) TransportSocket = _reflection.GeneratedProtocolMessageType('TransportSocket', (_message.Message,), { 'DESCRIPTOR' : _TRANSPORTSOCKET, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.TransportSocket) }) _sym_db.RegisterMessage(TransportSocket) RuntimeFractionalPercent = _reflection.GeneratedProtocolMessageType('RuntimeFractionalPercent', (_message.Message,), { 'DESCRIPTOR' : _RUNTIMEFRACTIONALPERCENT, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.RuntimeFractionalPercent) }) _sym_db.RegisterMessage(RuntimeFractionalPercent) ControlPlane = _reflection.GeneratedProtocolMessageType('ControlPlane', (_message.Message,), { 'DESCRIPTOR' : _CONTROLPLANE, '__module__' : 'envoy.config.core.v3.base_pb2' # @@protoc_insertion_point(class_scope:envoy.config.core.v3.ControlPlane) }) _sym_db.RegisterMessage(ControlPlane) DESCRIPTOR._options = None _LOCALITY._options = None _BUILDVERSION._options = None _EXTENSION._options = None _NODE_DYNAMICPARAMETERSENTRY._options = None _NODE.fields_by_name['listening_addresses']._options = None _NODE._options = None _METADATA_FILTERMETADATAENTRY._options = None _METADATA_TYPEDFILTERMETADATAENTRY._options = None _METADATA._options = None _RUNTIMEUINT32.fields_by_name['runtime_key']._options = None _RUNTIMEUINT32._options = None _RUNTIMEPERCENT.fields_by_name['runtime_key']._options = None _RUNTIMEDOUBLE.fields_by_name['runtime_key']._options = None _RUNTIMEDOUBLE._options = None _RUNTIMEFEATUREFLAG.fields_by_name['default_value']._options = None _RUNTIMEFEATUREFLAG.fields_by_name['runtime_key']._options = None _RUNTIMEFEATUREFLAG._options = None _QUERYPARAMETER.fields_by_name['key']._options = None _HEADERVALUE.fields_by_name['key']._options = None _HEADERVALUE.fields_by_name['value']._options = None _HEADERVALUE._options = None _HEADERVALUEOPTION.fields_by_name['header']._options = None _HEADERVALUEOPTION.fields_by_name['append_action']._options = None _HEADERVALUEOPTION._options = None _HEADERMAP._options = None _WATCHEDDIRECTORY.fields_by_name['path']._options = None _DATASOURCE.oneofs_by_name['specifier']._options = None _DATASOURCE.fields_by_name['filename']._options = None _DATASOURCE.fields_by_name['environment_variable']._options = None _DATASOURCE._options = None _RETRYPOLICY.fields_by_name['num_retries']._options = None _RETRYPOLICY._options = None _REMOTEDATASOURCE.fields_by_name['http_uri']._options = None _REMOTEDATASOURCE.fields_by_name['sha256']._options = None _REMOTEDATASOURCE._options = None _ASYNCDATASOURCE.oneofs_by_name['specifier']._options = None _ASYNCDATASOURCE._options = None _TRANSPORTSOCKET.fields_by_name['name']._options = None _TRANSPORTSOCKET._options = None _RUNTIMEFRACTIONALPERCENT.fields_by_name['default_value']._options = None _RUNTIMEFRACTIONALPERCENT._options = None _CONTROLPLANE._options = None # @@protoc_insertion_point(module_scope)
py
1a5b618d552e44460529e663b1da1728ee3d8e8d
""" Normalization class for Matplotlib that can be used to produce colorbars. """ import inspect import warnings import numpy as np from numpy import ma from .interval import (PercentileInterval, AsymmetricPercentileInterval, ManualInterval, MinMaxInterval, BaseInterval) from .stretch import (LinearStretch, SqrtStretch, PowerStretch, LogStretch, AsinhStretch, BaseStretch) from ..utils.exceptions import AstropyDeprecationWarning try: import matplotlib # pylint: disable=W0611 from matplotlib.colors import Normalize from matplotlib import pyplot as plt except ImportError: class Normalize: def __init__(self, *args, **kwargs): raise ImportError('matplotlib is required in order to use this ' 'class.') __all__ = ['ImageNormalize', 'simple_norm', 'imshow_norm'] __doctest_requires__ = {'*': ['matplotlib']} class ImageNormalize(Normalize): """ Normalization class to be used with Matplotlib. Parameters ---------- data : `~numpy.ndarray`, optional The image array. This input is used only if ``interval`` is also input. ``data`` and ``interval`` are used to compute the vmin and/or vmax values only if ``vmin`` or ``vmax`` are not input. interval : `~astropy.visualization.BaseInterval` subclass instance, optional The interval object to apply to the input ``data`` to determine the ``vmin`` and ``vmax`` values. This input is used only if ``data`` is also input. ``data`` and ``interval`` are used to compute the vmin and/or vmax values only if ``vmin`` or ``vmax`` are not input. vmin, vmax : float, optional The minimum and maximum levels to show for the data. The ``vmin`` and ``vmax`` inputs override any calculated values from the ``interval`` and ``data`` inputs. stretch : `~astropy.visualization.BaseStretch` subclass instance The stretch object to apply to the data. The default is `~astropy.visualization.LinearStretch`. clip : bool, optional If `True`, data values outside the [0:1] range are clipped to the [0:1] range. invalid : `None` or float, optional Value to assign NaN values generated by this class. NaNs in the input ``data`` array are not changed. For matplotlib normalization, the ``invalid`` value should map to the matplotlib colormap "under" value (i.e., any finite value < 0). If `None`, then NaN values are not replaced. This keyword has no effect if ``clip=True``. """ def __init__(self, data=None, interval=None, vmin=None, vmax=None, stretch=LinearStretch(), clip=False, invalid=-1.0): # this super call checks for matplotlib super().__init__(vmin=vmin, vmax=vmax, clip=clip) self.vmin = vmin self.vmax = vmax if stretch is None: raise ValueError('stretch must be input') if not isinstance(stretch, BaseStretch): raise TypeError('stretch must be an instance of a BaseStretch ' 'subclass') self.stretch = stretch if interval is not None and not isinstance(interval, BaseInterval): raise TypeError('interval must be an instance of a BaseInterval ' 'subclass') self.interval = interval self.inverse_stretch = stretch.inverse self.clip = clip self.invalid = invalid # Define vmin and vmax if not None and data was input if data is not None: self._set_limits(data) def _set_limits(self, data): if self.vmin is not None and self.vmax is not None: return # Define vmin and vmax from the interval class if not None if self.interval is None: if self.vmin is None: self.vmin = np.min(data[np.isfinite(data)]) if self.vmax is None: self.vmax = np.max(data[np.isfinite(data)]) else: _vmin, _vmax = self.interval.get_limits(data) if self.vmin is None: self.vmin = _vmin if self.vmax is None: self.vmax = _vmax def __call__(self, values, clip=None, invalid=None): """ Transform values using this normalization. Parameters ---------- values : array_like The input values. clip : bool, optional If `True`, values outside the [0:1] range are clipped to the [0:1] range. If `None` then the ``clip`` value from the `ImageNormalize` instance is used (the default of which is `False`). invalid : `None` or float, optional Value to assign NaN values generated by this class. NaNs in the input ``data`` array are not changed. For matplotlib normalization, the ``invalid`` value should map to the matplotlib colormap "under" value (i.e., any finite value < 0). If `None`, then the `ImageNormalize` instance value is used. This keyword has no effect if ``clip=True``. """ if clip is None: clip = self.clip if invalid is None: invalid = self.invalid if isinstance(values, ma.MaskedArray): if clip: mask = False else: mask = values.mask values = values.filled(self.vmax) else: mask = False # Make sure scalars get broadcast to 1-d if np.isscalar(values): values = np.array([values], dtype=float) else: # copy because of in-place operations after values = np.array(values, copy=True, dtype=float) # Define vmin and vmax if not None self._set_limits(values) # Normalize based on vmin and vmax np.subtract(values, self.vmin, out=values) np.true_divide(values, self.vmax - self.vmin, out=values) # Clip to the 0 to 1 range if clip: values = np.clip(values, 0., 1., out=values) # Stretch values if self.stretch._supports_invalid_kw: values = self.stretch(values, out=values, clip=False, invalid=invalid) else: values = self.stretch(values, out=values, clip=False) # Convert to masked array for matplotlib return ma.array(values, mask=mask) def inverse(self, values, invalid=None): # Find unstretched values in range 0 to 1 if self.inverse_stretch._supports_invalid_kw: values_norm = self.inverse_stretch(values, clip=False, invalid=invalid) else: values_norm = self.inverse_stretch(values, clip=False) # Scale to original range return values_norm * (self.vmax - self.vmin) + self.vmin def simple_norm(data, stretch='linear', power=1.0, asinh_a=0.1, min_cut=None, max_cut=None, min_percent=None, max_percent=None, percent=None, clip=False, log_a=1000, invalid=-1.0): """ Return a Normalization class that can be used for displaying images with Matplotlib. This function enables only a subset of image stretching functions available in `~astropy.visualization.mpl_normalize.ImageNormalize`. This function is used by the ``astropy.visualization.scripts.fits2bitmap`` script. Parameters ---------- data : `~numpy.ndarray` The image array. stretch : {'linear', 'sqrt', 'power', log', 'asinh'}, optional The stretch function to apply to the image. The default is 'linear'. power : float, optional The power index for ``stretch='power'``. The default is 1.0. asinh_a : float, optional For ``stretch='asinh'``, the value where the asinh curve transitions from linear to logarithmic behavior, expressed as a fraction of the normalized image. Must be in the range between 0 and 1. The default is 0.1. min_cut : float, optional The pixel value of the minimum cut level. Data values less than ``min_cut`` will set to ``min_cut`` before stretching the image. The default is the image minimum. ``min_cut`` overrides ``min_percent``. max_cut : float, optional The pixel value of the maximum cut level. Data values greater than ``min_cut`` will set to ``min_cut`` before stretching the image. The default is the image maximum. ``max_cut`` overrides ``max_percent``. min_percent : float, optional The percentile value used to determine the pixel value of minimum cut level. The default is 0.0. ``min_percent`` overrides ``percent``. max_percent : float, optional The percentile value used to determine the pixel value of maximum cut level. The default is 100.0. ``max_percent`` overrides ``percent``. percent : float, optional The percentage of the image values used to determine the pixel values of the minimum and maximum cut levels. The lower cut level will set at the ``(100 - percent) / 2`` percentile, while the upper cut level will be set at the ``(100 + percent) / 2`` percentile. The default is 100.0. ``percent`` is ignored if either ``min_percent`` or ``max_percent`` is input. clip : bool, optional If `True`, data values outside the [0:1] range are clipped to the [0:1] range. log_a : float, optional The log index for ``stretch='log'``. The default is 1000. invalid : `None` or float, optional Value to assign NaN values generated by the normalization. NaNs in the input ``data`` array are not changed. For matplotlib normalization, the ``invalid`` value should map to the matplotlib colormap "under" value (i.e., any finite value < 0). If `None`, then NaN values are not replaced. This keyword has no effect if ``clip=True``. Returns ------- result : `ImageNormalize` instance An `ImageNormalize` instance that can be used for displaying images with Matplotlib. """ if percent is not None: interval = PercentileInterval(percent) elif min_percent is not None or max_percent is not None: interval = AsymmetricPercentileInterval(min_percent or 0., max_percent or 100.) elif min_cut is not None or max_cut is not None: interval = ManualInterval(min_cut, max_cut) else: interval = MinMaxInterval() if stretch == 'linear': stretch = LinearStretch() elif stretch == 'sqrt': stretch = SqrtStretch() elif stretch == 'power': stretch = PowerStretch(power) elif stretch == 'log': stretch = LogStretch(log_a) elif stretch == 'asinh': stretch = AsinhStretch(asinh_a) else: raise ValueError(f'Unknown stretch: {stretch}.') vmin, vmax = interval.get_limits(data) return ImageNormalize(vmin=vmin, vmax=vmax, stretch=stretch, clip=clip, invalid=invalid) # used in imshow_norm _norm_sig = inspect.signature(ImageNormalize) def imshow_norm(data, ax=None, imshow_only_kwargs={}, **kwargs): """ A convenience function to call matplotlib's `matplotlib.pyplot.imshow` function, using an `ImageNormalize` object as the normalization. Parameters ---------- data : 2D or 3D array_like - see `~matplotlib.pyplot.imshow` The data to show. Can be whatever `~matplotlib.pyplot.imshow` and `ImageNormalize` both accept. ax : None or `~matplotlib.axes.Axes`, optional If None, use pyplot's imshow. Otherwise, calls ``imshow`` method of the supplied axes. imshow_only_kwargs : dict, optional Deprecated since Astropy v4.1. Note that settting both ``norm`` and ``vmin/vmax`` is deprecated in ``matplotlib >= 3.3``. Arguments to be passed directly to `~matplotlib.pyplot.imshow` without first trying `ImageNormalize`. This is only for keywords that have the same name in both `ImageNormalize` and `~matplotlib.pyplot.imshow` - if you want to set the `~matplotlib.pyplot.imshow` keywords only, supply them in this dictionary. kwargs : dict, optional All other keyword arguments are parsed first by the `ImageNormalize` initializer, then to `~matplotlib.pyplot.imshow`. Returns ------- result : tuple A tuple containing the `~matplotlib.image.AxesImage` generated by `~matplotlib.pyplot.imshow` as well as the `ImageNormalize` instance. Notes ----- The ``norm`` matplotlib keyword is not supported. Examples -------- .. plot:: :include-source: import numpy as np import matplotlib.pyplot as plt from astropy.visualization import (imshow_norm, MinMaxInterval, SqrtStretch) # Generate and display a test image image = np.arange(65536).reshape((256, 256)) fig = plt.figure() ax = fig.add_subplot(1, 1, 1) im, norm = imshow_norm(image, ax, origin='lower', interval=MinMaxInterval(), stretch=SqrtStretch()) fig.colorbar(im) """ if imshow_only_kwargs: warnings.warn('imshow_only_kwargs is deprecated since v4.1 and will ' 'be removed in a future version.', AstropyDeprecationWarning) if 'X' in kwargs: raise ValueError('Cannot give both ``X`` and ``data``') if 'norm' in kwargs: raise ValueError('There is no point in using imshow_norm if you give ' 'the ``norm`` keyword - use imshow directly if you ' 'want that.') imshow_kwargs = dict(kwargs) norm_kwargs = {'data': data} for pname in _norm_sig.parameters: if pname in kwargs: norm_kwargs[pname] = imshow_kwargs.pop(pname) for k, v in imshow_only_kwargs.items(): if k not in _norm_sig.parameters: # the below is not strictly "has to be true", but is here so that # users don't start using both imshow_only_kwargs *and* keyword # arguments to this function, as that makes for more confusing # user code raise ValueError('You provided a keyword to imshow_only_kwargs ' '({}) that is not a keyword for ImageNormalize. ' 'This is not supported. Instead you should ' 'pass the keyword directly into imshow_norm' .format(k)) imshow_kwargs[k] = v imshow_kwargs['norm'] = ImageNormalize(**norm_kwargs) if ax is None: imshow_result = plt.imshow(data, **imshow_kwargs) else: imshow_result = ax.imshow(data, **imshow_kwargs) return imshow_result, imshow_kwargs['norm']
py
1a5b637838bc15d15bbddad6550f716c9943c8fd
import logging from airflow import DAG from operators.candles_aggregation import CandleAggregation from datetime import datetime, timedelta logger = logging.getLogger(__name__) default_args = { 'start_date': datetime(2020, 12, 23), 'owner': 'airflow', 'retries': 3, 'retry_delay': timedelta(minutes=1), 'max_active_runs': 1, 'catchup': True } with DAG(dag_id='trading_candles_aggregation', schedule_interval="@monthly", default_args=default_args) as dag: aggregated_candles = CandleAggregation(task_id='candles_aggregation', provide_context=True, scope='month')
py
1a5b63fbab97ad1075b7c07e032d16089a70c6b1
"""Tests for the intent helpers.""" import unittest import voluptuous as vol from homeassistant.core import State from homeassistant.helpers import (intent, config_validation as cv) import pytest class MockIntentHandler(intent.IntentHandler): """Provide a mock intent handler.""" def __init__(self, slot_schema): """Initialize the mock handler.""" self.slot_schema = slot_schema def test_async_match_state(): """Test async_match_state helper.""" state1 = State('light.kitchen', 'on') state2 = State('switch.kitchen', 'on') state = intent.async_match_state(None, 'kitch', [state1, state2]) assert state is state1 class TestIntentHandler(unittest.TestCase): """Test the Home Assistant event helpers.""" def test_async_validate_slots(self): """Test async_validate_slots of IntentHandler.""" handler1 = MockIntentHandler({ vol.Required('name'): cv.string, }) with pytest.raises(vol.error.MultipleInvalid): handler1.async_validate_slots({}) with pytest.raises(vol.error.MultipleInvalid): handler1.async_validate_slots({'name': 1}) with pytest.raises(vol.error.MultipleInvalid): handler1.async_validate_slots({'name': 'kitchen'}) handler1.async_validate_slots({'name': {'value': 'kitchen'}}) handler1.async_validate_slots({ 'name': {'value': 'kitchen'}, 'probability': {'value': '0.5'} })
py
1a5b64372bb266142f76a3381bea927c9161be8f
""" Django settings for noticias project. Generated by 'django-admin startproject' using Django 3.0.8. For more information on this file, see https://docs.djangoproject.com/en/3.0/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/3.0/ref/settings/ """ import os import django_heroku # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'ai0li9j3kaz11w&ta1n)tz+j3jyjisv_+iv1(hjgw)jl4!n*na' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'haystack', 'sitio', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'noticias.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'noticias.wsgi.application' # Database # https://docs.djangoproject.com/en/3.0/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } if os.environ.get('SEARCHBOX_URL'): # estoy corriendo en heroku HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'haystack.backends.elasticsearch_backend.ElasticsearchSearchEngine', 'URL': os.environ.get('SEARCHBOX_URL'), 'INDEX_NAME': 'documents', }, } else: # estoy corriendo en mi maquina (o en heroku y me olvide de agregar # el addon de searchbox) HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'haystack.backends.whoosh_backend.WhooshEngine', 'PATH': os.path.join(BASE_DIR, 'whoosh_index'), }, } # Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/3.0/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.0/howto/static-files/ STATIC_URL = '/static/' MEDIA_ROOT = os.path.join(BASE_DIR, 'media') MEDIA_URL = '/media/' # usar settings de heroku cuando estamos en heroku django_heroku.settings(locals())
py
1a5b64a641a33f5535bf75232b5a157fc21405a5
#! /usr/bin/env python # -*- coding: utf-8 -*- # # evaluate_mcd.py # Copyright (C) 2020 Wen-Chin HUANG # # Distributed under terms of the MIT license. # import sys import argparse import logging import numpy as np import scipy from fastdtw import fastdtw from joblib import Parallel, delayed from pathlib import Path import soundfile as sf from sprocket.speech import FeatureExtractor from crank.net.trainer.dataset import read_feature from crank.utils import load_yaml, open_featsscp from crank.utils import low_cut_filter def get_world_features(wavpath, spk, conf, spkr_conf): x, fs = sf.read(str(wavpath)) x = np.array(x, dtype=np.float) x = low_cut_filter(x, fs, cutoff=70) fe = FeatureExtractor( analyzer="world", fs=conf["feature"]["fs"], fftl=conf["feature"]["fftl"], shiftms=conf["feature"]["shiftms"], minf0=spkr_conf[spk]["minf0"], maxf0=spkr_conf[spk]["maxf0"], ) cv_f0, _, _ = fe.analyze(x) cv_mcep = fe.mcep( dim=conf["feature"]["mcep_dim"], alpha=conf["feature"]["mcep_alpha"] ) return cv_mcep, cv_f0 def calculate(cv_path, gt_file_list, conf, spkr_conf): basename = cv_path.stem number, orgspk, tarspk = basename.split("_") tarspk = tarspk.split("-")[-1] orgspk = orgspk.split("-")[-1] # get converted features. If mcep, from h5; else waveform if conf["output_feat_type"] == "mcep": cv_mcep = read_feature(cv_path, "feat") cv_f0 = read_feature(cv_path, "f0") else: cv_mcep, cv_f0 = get_world_features(cv_path, tarspk, conf, spkr_conf) # get ground truth features gt_mcep = read_feature(gt_file_list[f"{tarspk}_{number}"], "mcep") gt_f0 = read_feature(gt_file_list[f"{tarspk}_{number}"], "f0") # non-silence parts gt_idx = np.where(gt_f0 > 0)[0] gt_mcep = gt_mcep[gt_idx] cv_idx = np.where(cv_f0 > 0)[0] cv_mcep = cv_mcep[cv_idx] # DTW _, path = fastdtw(cv_mcep, gt_mcep, dist=scipy.spatial.distance.euclidean) twf = np.array(path).T cv_mcep_dtw = cv_mcep[twf[0]] gt_mcep_dtw = gt_mcep[twf[1]] # MCD diff2sum = np.sum((cv_mcep_dtw - gt_mcep_dtw) ** 2, 1) mcd = np.mean(10.0 / np.log(10.0) * np.sqrt(2 * diff2sum), 0) return f"{orgspk}-{tarspk}-{number}", mcd def main(): parser = argparse.ArgumentParser(description="calculate MCD.") parser.add_argument("--conf", type=str, help="configuration file") parser.add_argument("--spkr_conf", type=str, help="speaker configuration file") parser.add_argument( "--featdir", type=str, help="root directory of ground truth h5", ) parser.add_argument("--outwavdir", type=str, help="converted waveform directory") parser.add_argument( "--out", type=str, help="if omitted, then output to sys.stdout", ) parser.add_argument("--n_jobs", default=1, type=int, help="number of parallel jobs") args = parser.parse_args() # logging info logging.basicConfig( level=logging.INFO, stream=sys.stdout, format="%(asctime)s (%(module)s:%(lineno)d) " "%(levelname)s: %(message)s", ) # load configure files conf = load_yaml(args.conf) spkr_conf = load_yaml(args.spkr_conf) # load converted files. If mcep, use h5; else, waveform if conf["output_feat_type"] == "mcep": converted_files = sorted(list(Path(args.outwavdir).glob("*.h5"))) else: converted_files = sorted(list(Path(args.outwavdir).rglob("*.wav"))) logging.info(f"number of utterances = {len(converted_files)}") # load ground truth scp featdir = Path(args.featdir) / conf["feature"]["label"] gt_feats = open_featsscp(featdir / "eval" / "feats.scp") if args.out is None: out = sys.stdout else: out = open(args.out, "w", encoding="utf-8") MCD_list = Parallel(args.n_jobs)( [ delayed(calculate)(cv_path, gt_feats, conf, spkr_conf) for cv_path in converted_files ] ) # summarize by pair pairwise_MCD = {} for k, v in MCD_list: orgspk, tarspk, _ = k.split("-") pair = orgspk + " " + tarspk if pair not in pairwise_MCD: pairwise_MCD[pair] = [] pairwise_MCD[pair].append(v) for k in sorted(pairwise_MCD.keys()): mcd_list = pairwise_MCD[k] mean_mcd = float(sum(mcd_list) / len(mcd_list)) out.write(f"{k} {mean_mcd:.3f}\n") if __name__ == "__main__": main()
py
1a5b64b2b90ecfc9b2a340297245d31554eed3d4
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('README.md') as readme_file: readme = readme_file.read() requirements = [ "brainio @ git+https://github.com/brain-score/brainio", "brain-score @ git+https://github.com/brain-score/brain-score", "h5py", "Pillow", "numpy", "tqdm", "torch", "torchvision", "tensorflow==1.15", "keras==2.3.1", "scikit-learn", "result_caching @ git+https://github.com/brain-score/result_caching", ] setup( name='model-tools', version='0.1.0', description="Tools for predictive models of brain processing.", long_description=readme, author="Martin Schrimpf", author_email='[email protected]', url='https://github.com/brain-score/model-tools', packages=find_packages(exclude=['tests']), include_package_data=True, install_requires=requirements, license="MIT license", zip_safe=False, keywords='brain-score', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 3.7', ], test_suite='tests', )
py
1a5b65a14b17261185d633f79ae56164efc710ea
import os, sys # No Files if len(sys.argv) < 2: print("Must supply file name(s)!") exit() # Iterate Files file_list = iter(sys.argv) next(file_list) for file in file_list: # File Path file_path = os.getcwd() + "/" + file # File Exists if os.path.exists(file_path): # Update Timestamp os.utime(file_path) # Create File else: with open(file_path, "w"): pass
py
1a5b65ddae18790de67579667afbe4258eea3c8a
# -*- coding: utf-8 -*- """ Created on Sat Jan 9 15:36:04 2021 @author: TT User """ import numpy as np import matplotlib.pyplot as plt from time import gmtime, strftime #STRF time go praj vremeto vo string; GMtime go praj vremeto od pochetokot na epohata vo OBJEKT from scipy.signal import butter, lfilter #%% LOG_DIR = "logs/" PLOT_DIR = "plots/" class QRSDetectorOffline(object): """ Python Offline ECG QRS Detector based on the Pan-Tomkins algorithm. The QRS complex corresponds to the depolarization of the right and left ventricles of the human heart. It is the most visually obvious part of the ECG signal. QRS complex detection is essential for time-domain ECG signal analyses, namely heart rate variability. It makes it possible to compute inter-beat interval (RR interval) values that correspond to the time between two consecutive R peaks. Thus, a QRS complex detector is an ECG-based heart contraction detector. Offline version detects QRS complexes in a pre-recorded ECG signal dataset (e.g. stored in .csv format). This implementation of a QRS Complex Detector is by no means a certified medical tool and should not be used in health monitoring. It was created and used for experimental purposes in psychophysiology and psychology. MIT License THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ #%% def __init__(self, ecg_data_path, verbose=True, log_data=False, plot_data=False, show_plot=False): """ QRSDetectorOffline class initialisation method. :param string ecg_data_path: path to the ECG dataset :param bool verbose: flag for printing the results :param bool log_data: flag for logging the results :param bool plot_data: flag for plotting the results to a file :param bool show_plot: flag for showing generated results plot - will not show anything if plot is not generated """ # Configuration parameters. self.ecg_data_path = ecg_data_path self.signal_frequency = 250 # Set ECG device frequency in samples per second here. self.filter_lowcut = 0.01 #self.filter_highcut = 15.0 self.filter_highcut = 0.99 self.filter_order = 1 self.integration_window = 15 # Change proportionally when adjusting frequency (in samples). self.findpeaks_limit = 0.35 self.findpeaks_spacing = 50 # Change proportionally when adjusting frequency (in samples). self.refractory_period = 120 # Change proportionally when adjusting frequency (in samples). self.qrs_peak_filtering_factor = 0.125 self.noise_peak_filtering_factor = 0.125 self.qrs_noise_diff_weight = 0.25 # Loaded ECG data. self.ecg_data_raw = None # Measured and calculated values. self.filtered_ecg_measurements = None self.differentiated_ecg_measurements = None self.squared_ecg_measurements = None self.integrated_ecg_measurements = None self.detected_peaks_indices = None self.detected_peaks_values = None self.qrs_peak_value = 0.0 self.noise_peak_value = 0.0 self.threshold_value = 0.0 # Detection results. self.qrs_peaks_indices = np.array([], dtype=int) self.noise_peaks_indices = np.array([], dtype=int) # Final ECG data and QRS detection results array - samples with detected QRS are marked with 1 value. self.ecg_data_detected = None # Run whole detector flow. self.load_ecg_data() self.detect_peaks() self.detect_qrs() if verbose: self.print_detection_data() if log_data: self.log_path = "{:s}QRS_offline_detector_log_{:s}.csv".format(LOG_DIR, strftime("%Y_%m_%d_%H_%M_%S", gmtime())) self.log_detection_data() if plot_data: self.plot_path = "{:s}QRS_offline_detector_plot_{:s}.png".format(PLOT_DIR, strftime("%Y_%m_%d_%H_%M_%S", gmtime())) self.plot_detection_data(show_plot=show_plot) #%% """Loading ECG measurements data methods.""" def load_ecg_data(self): """ Method loading ECG data set from a file. """ self.ecg_data_raw = np.loadtxt(self.ecg_data_path, skiprows=1, delimiter=',') #%% """ECG measurements data processing methods.""" def detect_peaks(self): """ Method responsible for extracting peaks from loaded ECG measurements data through measurements processing. """ # Extract measurements from loaded ECG data. ecg_measurements = self.ecg_data_raw[:, 1] #Go zema merenjeto, poshto nultiot element e TIMESTAMP # Measurements filtering - 0-15 Hz band pass filter. self.filtered_ecg_measurements = self.bandpass_filter(ecg_measurements, lowcut=self.filter_lowcut, highcut=self.filter_highcut, signal_freq=self.signal_frequency, filter_order=self.filter_order) self.filtered_ecg_measurements[:5] = self.filtered_ecg_measurements[5] # Derivative - provides QRS slope information. self.differentiated_ecg_measurements = np.ediff1d(self.filtered_ecg_measurements) # Squaring - intensifies values received in derivative. self.squared_ecg_measurements = self.differentiated_ecg_measurements ** 2 # Moving-window integration. self.integrated_ecg_measurements = np.convolve(self.squared_ecg_measurements, np.ones(self.integration_window)) # Fiducial mark - peak detection on integrated measurements. self.detected_peaks_indices = self.findpeaks(data=self.integrated_ecg_measurements, limit=self.findpeaks_limit, spacing=self.findpeaks_spacing) self.detected_peaks_values = self.integrated_ecg_measurements[self.detected_peaks_indices] #%% """QRS detection methods.""" def detect_qrs(self): """ Method responsible for classifying detected ECG measurements peaks either as noise or as QRS complex (heart beat). """ for detected_peak_index, detected_peaks_value in zip(self.detected_peaks_indices, self.detected_peaks_values): try: last_qrs_index = self.qrs_peaks_indices[-1] except IndexError: last_qrs_index = 0 # After a valid QRS complex detection, there is a 200 ms refractory period before next one can be detected. if detected_peak_index - last_qrs_index > self.refractory_period or not self.qrs_peaks_indices.size: # Peak must be classified either as a noise peak or a QRS peak. # To be classified as a QRS peak it must exceed dynamically set threshold value. if detected_peaks_value > self.threshold_value: self.qrs_peaks_indices = np.append(self.qrs_peaks_indices, detected_peak_index) # Adjust QRS peak value used later for setting QRS-noise threshold. self.qrs_peak_value = self.qrs_peak_filtering_factor * detected_peaks_value + \ (1 - self.qrs_peak_filtering_factor) * self.qrs_peak_value else: self.noise_peaks_indices = np.append(self.noise_peaks_indices, detected_peak_index) # Adjust noise peak value used later for setting QRS-noise threshold. self.noise_peak_value = self.noise_peak_filtering_factor * detected_peaks_value + \ (1 - self.noise_peak_filtering_factor) * self.noise_peak_value # Adjust QRS-noise threshold value based on previously detected QRS or noise peaks value. self.threshold_value = self.noise_peak_value + \ self.qrs_noise_diff_weight * (self.qrs_peak_value - self.noise_peak_value) # Create array containing both input ECG measurements data and QRS detection indication column. # We mark QRS detection with '1' flag in 'qrs_detected' log column ('0' otherwise). measurement_qrs_detection_flag = np.zeros([len(self.ecg_data_raw[:, 1]), 1]) measurement_qrs_detection_flag[self.qrs_peaks_indices] = 1 self.ecg_data_detected = np.append(self.ecg_data_raw, measurement_qrs_detection_flag, 1) #%% """Results reporting methods.""" def print_detection_data(self): """ Method responsible for printing the results. """ print("qrs peaks indices") print(self.qrs_peaks_indices) print("noise peaks indices") print(self.noise_peaks_indices) def log_detection_data(self): """ Method responsible for logging measured ECG and detection results to a file. """ with open(self.log_path, "wb") as fin: fin.write(b"timestamp,ecg_measurement,qrs_detected\n") np.savetxt(fin, self.ecg_data_detected, delimiter=",") def plot_detection_data(self, show_plot=False): """ Method responsible for plotting detection results. :param bool show_plot: flag for plotting the results and showing plot """ def plot_data(axis, data, title='', fontsize=10): axis.set_title(title, fontsize=fontsize) axis.grid(which='both', axis='both', linestyle='--') axis.plot(data, color="salmon", zorder=1) def plot_points(axis, values, indices): axis.scatter(x=indices, y=values[indices], c="black", s=50, zorder=2) plt.close('all') fig, axarr = plt.subplots(6, sharex=True, figsize=(15, 18)) plot_data(axis=axarr[0], data=self.ecg_data_raw[:, 1], title='Raw ECG measurements') plot_data(axis=axarr[1], data=self.filtered_ecg_measurements, title='Filtered ECG measurements') plot_data(axis=axarr[2], data=self.differentiated_ecg_measurements, title='Differentiated ECG measurements') plot_data(axis=axarr[3], data=self.squared_ecg_measurements, title='Squared ECG measurements') plot_data(axis=axarr[4], data=self.integrated_ecg_measurements, title='Integrated ECG measurements with QRS peaks marked (black)') plot_points(axis=axarr[4], values=self.integrated_ecg_measurements, indices=self.qrs_peaks_indices) plot_data(axis=axarr[5], data=self.ecg_data_detected[:, 1], title='Raw ECG measurements with QRS peaks marked (black)') plot_points(axis=axarr[5], values=self.ecg_data_detected[:, 1], indices=self.qrs_peaks_indices) plt.tight_layout() fig.savefig(self.plot_path) if show_plot: plt.show() plt.close() #%% """Tools methods.""" def bandpass_filter(self, data, lowcut, highcut, signal_freq, filter_order): """ Method responsible for creating and applying Butterworth filter. :param deque data: raw data :param float lowcut: filter lowcut frequency value :param float highcut: filter highcut frequency value :param int signal_freq: signal frequency in samples per second (Hz) :param int filter_order: filter order :return array: filtered data """ nyquist_freq = 0.5 * signal_freq #nyquist=fs/2 low = lowcut / nyquist_freq high = highcut / nyquist_freq #b, a = butter(filter_order, [low, high], btype="band",output='ba') b,a = butter(filter_order,[low, high], btype="band") #b, a = butter(filter_order, [0,1], btype="band") y = lfilter(b, a, data) return y #%% def findpeaks(self, data, spacing=1, limit=None): len = data.size x = np.zeros(len + 2 * spacing) x[:spacing] = data[0] - 1.e-6 x[-spacing:] = data[-1] - 1.e-6 x[spacing:spacing + len] = data peak_candidate = np.zeros(len) peak_candidate[:] = True for s in range(spacing): start = spacing - s - 1 h_b = x[start: start + len] # before start = spacing h_c = x[start: start + len] # central start = spacing + s + 1 h_a = x[start: start + len] # after peak_candidate = np.logical_and(peak_candidate, np.logical_and(h_c > h_b, h_c > h_a)) ind = np.argwhere(peak_candidate) ind = ind.reshape(ind.size) if limit is not None: ind = ind[data[ind] > limit] return ind #%% if __name__ == "__main__": qrs_detector = QRSDetectorOffline(ecg_data_path="ecg_data_1.csv", verbose=True, log_data=True, plot_data=True, show_plot=False)
py
1a5b661f5b710eeccdf60e3c9b2c36d3b430ea3a
import yagot @yagot.garbage_checked() def test_selfref_dict(): d1 = dict() d1['self'] = d1
py
1a5b66439b187a8f3e6f76ddf869de09c4e672de
#!/usr/bin/env python2 """mailerdaemon - classes to parse mailer-daemon messages""" import rfc822 import calendar import re import os import sys Unparseable = 'mailerdaemon.Unparseable' class ErrorMessage(rfc822.Message): def __init__(self, fp): rfc822.Message.__init__(self, fp) self.sub = '' def is_warning(self): sub = self.getheader('Subject') if not sub: return 0 sub = sub.lower() if sub.startswith('waiting mail'): return 1 if 'warning' in sub: return 1 self.sub = sub return 0 def get_errors(self): for p in EMPARSERS: self.rewindbody() try: return p(self.fp, self.sub) except Unparseable: pass raise Unparseable # List of re's or tuples of re's. # If a re, it should contain at least a group (?P<email>...) which # should refer to the email address. The re can also contain a group # (?P<reason>...) which should refer to the reason (error message). # If no reason is present, the emparse_list_reason list is used to # find a reason. # If a tuple, the tuple should contain 2 re's. The first re finds a # location, the second re is repeated one or more times to find # multiple email addresses. The second re is matched (not searched) # where the previous match ended. # The re's are compiled using the re module. emparse_list_list = [ 'error: (?P<reason>unresolvable): (?P<email>.+)', ('----- The following addresses had permanent fatal errors -----\n', '(?P<email>[^ \n].*)\n( .*\n)?'), 'remote execution.*\n.*rmail (?P<email>.+)', ('The following recipients did not receive your message:\n\n', ' +(?P<email>.*)\n(The following recipients did not receive your message:\n\n)?'), '------- Failure Reasons --------\n\n(?P<reason>.*)\n(?P<email>.*)', '^<(?P<email>.*)>:\n(?P<reason>.*)', '^(?P<reason>User mailbox exceeds allowed size): (?P<email>.+)', '^5\\d{2} <(?P<email>[^\n>]+)>\\.\\.\\. (?P<reason>.+)', '^Original-Recipient: rfc822;(?P<email>.*)', '^did not reach the following recipient\\(s\\):\n\n(?P<email>.*) on .*\n +(?P<reason>.*)', '^ <(?P<email>[^\n>]+)> \\.\\.\\. (?P<reason>.*)', '^Report on your message to: (?P<email>.*)\nReason: (?P<reason>.*)', '^Your message was not delivered to +(?P<email>.*)\n +for the following reason:\n +(?P<reason>.*)', '^ was not +(?P<email>[^ \n].*?) *\n.*\n.*\n.*\n because:.*\n +(?P<reason>[^ \n].*?) *\n', ] # compile the re's in the list and store them in-place. for i in range(len(emparse_list_list)): x = emparse_list_list[i] if type(x) is type(''): x = re.compile(x, re.MULTILINE) else: xl = [] for x in x: xl.append(re.compile(x, re.MULTILINE)) x = tuple(xl) del xl emparse_list_list[i] = x del x del i # list of re's used to find reasons (error messages). # if a string, "<>" is replaced by a copy of the email address. # The expressions are searched for in order. After the first match, # no more expressions are searched for. So, order is important. emparse_list_reason = [ r'^5\d{2} <>\.\.\. (?P<reason>.*)', '<>\.\.\. (?P<reason>.*)', re.compile(r'^<<< 5\d{2} (?P<reason>.*)', re.MULTILINE), re.compile('===== stderr was =====\nrmail: (?P<reason>.*)'), re.compile('^Diagnostic-Code: (?P<reason>.*)', re.MULTILINE), ] emparse_list_from = re.compile('^From:', re.IGNORECASE|re.MULTILINE) def emparse_list(fp, sub): data = fp.read() res = emparse_list_from.search(data) if res is None: from_index = len(data) else: from_index = res.start(0) errors = [] emails = [] reason = None for regexp in emparse_list_list: if type(regexp) is type(()): res = regexp[0].search(data, 0, from_index) if res is not None: try: reason = res.group('reason') except IndexError: pass while 1: res = regexp[1].match(data, res.end(0), from_index) if res is None: break emails.append(res.group('email')) break else: res = regexp.search(data, 0, from_index) if res is not None: emails.append(res.group('email')) try: reason = res.group('reason') except IndexError: pass break if not emails: raise Unparseable if not reason: reason = sub if reason[:15] == 'returned mail: ': reason = reason[15:] for regexp in emparse_list_reason: if type(regexp) is type(''): for i in range(len(emails)-1,-1,-1): email = emails[i] exp = re.compile(re.escape(email).join(regexp.split('<>')), re.MULTILINE) res = exp.search(data) if res is not None: errors.append(' '.join((email.strip()+': '+res.group('reason')).split())) del emails[i] continue res = regexp.search(data) if res is not None: reason = res.group('reason') break for email in emails: errors.append(' '.join((email.strip()+': '+reason).split())) return errors EMPARSERS = [emparse_list, ] def sort_numeric(a, b): a = int(a) b = int(b) if a < b: return -1 elif a > b: return 1 else: return 0 def parsedir(dir, modify): os.chdir(dir) pat = re.compile('^[0-9]*$') errordict = {} errorfirst = {} errorlast = {} nok = nwarn = nbad = 0 # find all numeric file names and sort them files = filter(lambda fn, pat=pat: pat.match(fn) is not None, os.listdir('.')) files.sort(sort_numeric) for fn in files: # Lets try to parse the file. fp = open(fn) m = ErrorMessage(fp) sender = m.getaddr('From') print '%s\t%-40s\t'%(fn, sender[1]), if m.is_warning(): fp.close() print 'warning only' nwarn = nwarn + 1 if modify: os.rename(fn, ','+fn) ## os.unlink(fn) continue try: errors = m.get_errors() except Unparseable: print '** Not parseable' nbad = nbad + 1 fp.close() continue print len(errors), 'errors' # Remember them for e in errors: try: mm, dd = m.getdate('date')[1:1+2] date = '%s %02d' % (calendar.month_abbr[mm], dd) except: date = '??????' if not errordict.has_key(e): errordict[e] = 1 errorfirst[e] = '%s (%s)' % (fn, date) else: errordict[e] = errordict[e] + 1 errorlast[e] = '%s (%s)' % (fn, date) fp.close() nok = nok + 1 if modify: os.rename(fn, ','+fn) ## os.unlink(fn) print '--------------' print nok, 'files parsed,',nwarn,'files warning-only,', print nbad,'files unparseable' print '--------------' list = [] for e in errordict.keys(): list.append((errordict[e], errorfirst[e], errorlast[e], e)) list.sort() for num, first, last, e in list: print '%d %s - %s\t%s' % (num, first, last, e) def main(): modify = 0 if len(sys.argv) > 1 and sys.argv[1] == '-d': modify = 1 del sys.argv[1] if len(sys.argv) > 1: for folder in sys.argv[1:]: parsedir(folder, modify) else: parsedir('/ufs/jack/Mail/errorsinbox', modify) if __name__ == '__main__' or sys.argv[0] == __name__: main()
py
1a5b666ae10705517fd6c8ae4a11e8108cb80b2a
""" Use Python Math Programming (pympl) to use glpk through python See: /home/crobin/dev/src/pympl-4.2 ~/dev/lib/python2.7/site-packages Cyril Robin -- LAAS-CNRS -- 2014 TODO Descriptif """ from pymprog import * from pprint import pprint import itertools from constant import * class GLPKSolver: 'Class embedding the GLPK solvers' """ Init the solver. """ def __init__(self): self.cost_penalty = COST_PENALTY """ Solve the problem as a position-based TOP. Update the plan of the team of Robots according to the best solution found before the time out, and return the solver status ('undef' or 'feas' or 'opt' for 'no solution found', 'found one feasible solution', and 'found optimal solution' respectively. One may provide available comlinks, otherwise the solver will not consider the necessity of communications while planning.""" def solve_position_top(self, team, utility_map, observable_points, period, available_comlinks = None ): # Utility = do not take sensor model into account # Solely consider the current position utility def computed_utility( robot, position): return utility_map.image[position] return self._solve_top(computed_utility, team, period, available_comlinks) """ Solve the problem as a perception-based TOP. Update the plan of the team of Robots according to the best solution found before the time out, and return the solver status ('undef' or 'feas' or 'opt' for 'no solution found', 'found one feasible solution', and 'found optimal solution' respectively. One may provide available comlinks, otherwise the solver will not consider the necessity of communications while planning.""" def solve_perception_top(self, team, utility_map, observable_points, period, available_comlinks = None ): # Utility = weighted sum of observed areas def computed_utility( robot, position): return sum( utility_map.image[observed] * \ robot.sensor(position,observed) \ for observed in observable_points ) return self._solve_top(computed_utility, team, period, available_comlinks) """ Solve a top problem as a flow formulation. The utility function is given as an argument (mandatory). Update the plan of the team of Robots according to the best solution found before the time out, and return the solver status ('undef' or 'feas' or 'opt' for 'no solution found', 'found one feasible solution', and 'found optimal solution' respectively. One may provide available comlinks, otherwise the solver will not consider the necessity of communications while planning.""" def _solve_top(self, computed_utility, team, period, \ available_comlinks = None ): # DATA: define useful sets R = team N = {r:r.points for r in R} M = [(r,p) for r in R for p in N[r]] E = [(r,p,q) for r in R for p in N[r] for q in N[r] ] T = period # Maximal cost allowed # Utility = weighted sum of observed areas u = { (r,p): computed_utility(r,p) for r in R for p in N[r] } # Comlink model (Boolean-like, == 1 if there is an effective comlink) if available_comlinks: """ There is a comlink if both ends can etablished a link. At least one is needed.""" def check_comlink(r,p): res = [ r.comlink(p,q) * partner.comlink(q,p) \ for (partner,q) in available_comlinks ] res.append(1) return min(res) g = { (r,p): check_comlink(r,p) for r in R for p in N[r] } else: # When there is no available comlink, assume global com is available if VERBOSITY_LEVEL > 1: print "!WARNING! [Planning] No comlink available: assume full communication links instead." g = { (r,p): 1 for r in R for p in N[r] } # Pymprog init and option pb = model('Team Orienteering Problem through flow formulation') # see http://pymprog.sourceforge.net/solvopt.html for options # or http://www.cnd.mcgill.ca/~ivan/it_ineq_script/python%20LP%20solvers/pympl.4.2/Doc/solvopt.rst #pb.solvopt(method='exact', verbosity=2) # seems less efficient pb.solvopt(tm_lim=SOLVER_TIME_OUT,verbosity=2) if VERBOSITY_LEVEL > 2: print "Solver options: {}".format( pb.solvopt() ) # DECISION VARIABLES # x[r,p,q] is a boolean variable that indicates if robot r goes from p to q x = pb.var(E, 'x', bool) # NETWORK CONSTRAINTS (INCLUDING FINISH CONSTRAINTS) # Only use valid path links pb.st( [ x[r,p,q] == 0 for r,p,q in E \ if not p in r.paths or not q in r.paths[p] ], 'check path validity') # each robot leaves its own starting position (once!) pb.st( [ sum( x[r,r.pose[0:2],q] for q in N[r] ) == 1 for r in R ], \ 'leave starting postion' ) # nw define the last position, which is unique nw = pb.var( M, 'nw', bool) # == 1 iff final node, 0 otherwise pb.st( [ sum( nw[r,p] for p in N[r] ) == 1 for r in R ], 'unique final pose' ) pb.st( [ nw[r,p] <= g[r,p] for r in R for p in N[r] ], 'Must communicate at the final pose' ) # robots entering an accessible node must leave the same node but the final one pb.st( [ sum( x[r,q,p] for q in N[r] ) \ - sum( x[r,p,q] for q in N[r] ) \ - nw[r,p] \ == 0 for r,p in M if p != r.pose[0:2] ], 'enter' ) # Go to the position only once pb.st( [ sum( x[r,q,p] for q in N[r] ) <= 1 for r in R for p in N[r] ], 'enter once' ) # MTZ Subtour Eliminating Constraints z = pb.var( M, 'z', int) #integer >=0 pb.st( [ z[r,p] >= 0 for r,p in M ]) pb.st( [ z[r,r.pose[0:2]] == 0 for r in R ]) pb.st( [ z[r,p] <= len(N[r]) for r,p in M ]) pb.st( [ z[r,p] - z[r,q] + 1 <= ( len(N[r]) ) * (1 - x[r,p,q]) for r,p,q in E ] ) # Cost limit plan_cost = pb.var(R, 'plan cost', float) pb.st( [ plan_cost[r] <= T for r in R ], 'maximal cost allowed') pb.st( [ sum( r.cost(p,q)*x[r,p,q] for p in N[r] for q in N[r]) <= plan_cost[r] for r in R ], 'compute plan cost') if VERBOSITY_LEVEL > 2: print "[Planning] GLPK: init done. Solving..." # OBJECTIVE pb.max( sum( u[r,j]*x[r,i,j] - self.cost_penalty*plan_cost[r] for r,i,j in E), 'utility' ) pb.solve() #solve the TOP problem if VERBOSITY_LEVEL > 1: print "[Planning] GLPK Solver status:",pb.status() # Report Karush-Kuhn-Tucker optimality conditions (= error bounds) if VERBOSITY_LEVEL > 2: print pb.reportKKT() # Retrieve solution for r in R: r.plan = [] curr = r.pose[0:2] r.plan.append(curr) for s in N[r]: for p,q in itertools.product(N[r],N[r]): if p == curr and x[r,p,q].primal == 1: curr = q r.plan.append(curr) if VERBOSITY_LEVEL > 2: print "[Planning] {} ({} chekpoints) : {}".format(r.name,len(r.plan),r.plan) print r.plan if VERBOSITY_LEVEL > 1: if pb.status() == 'undef': print "!WARNING! NO SOLUTION found by the GLPK solver so far." else: print "[Planning] Gathered utility = %.2f" % sum( u[r,j]*x[r,i,j].primal for r,i,j in E) print "[Planning] for a Global cost = %.2f " % sum(plan_cost[r].primal for r in R ) # Return status: # - feas = solution found (but no necessary optimal) # - undef = no solution so far return pb.status() """ Solve a SP problem as a flow formulation. Update the plan of the team of Robots according to the best solution found before the time out, and return the solver status ('undef' or 'feas' or 'opt' for 'no solution found', 'found one feasible solution', and 'found optimal solution' respectively. One may provide available comlinks, otherwise the solver will not consider the necessity of communications while planning.""" def solve_sp(self, team, utility_map, observable_points, period, available_comlinks = None ): # DATA: define useful sets R = team N = { r:r.points for r in R } M = [ (r,p) for r in R for p in N[r] ] E = [ (r,p,q) for r in R for p in N[r] for q in N[r] ] Q = [ q for q in observable_points ] V = [ (q,m) for q in Q for m in M ] T = period # Maximal cost allowed # Utility of the observable areas def get_utility( observed ): return int(utility_map.image[observed]) # int is needed for pymprog u = { q: get_utility(q) for q in Q } # Comlink model (Boolean-like, == 1 if there is an effective comlink) if available_comlinks: """ There is a comlink if both ends can etablished a link. At least one is needed.""" def check_comlink(r,p): res = [ r.comlink(p,q) * partner.comlink(q,p) \ for (partner,q) in available_comlinks ] res.append(1) return min(res) g = { (r,p): check_comlink(r,p) for r in R for p in N[r] } else: # When there is no available comlink, assume global com is available if VERBOSITY_LEVEL > 1: print "!WARNING! [Planning] No comlink available: assume full communication links instead." g = { (r,p): 1 for r in R for p in N[r] } # Pymprog init and option pb = model('Perception Team Orienteering Problem through flow formulation') # see http://pymprog.sourceforge.net/solvopt.html for options # or http://www.cnd.mcgill.ca/~ivan/it_ineq_script/python%20LP%20solvers/pympl.4.2/Doc/solvopt.rst #pb.solvopt(method='exact', verbosity=2) # seems less efficient pb.solvopt(tm_lim=SOLVER_TIME_OUT,verbosity=2) if VERBOSITY_LEVEL > 2: print "Solver options: {}".format( pb.solvopt() ) # DECISION VARIABLES # x[r,p,q] is a boolean variable that indicates if robot r goes from p to q x = pb.var(E, 'x', bool) # y[q] is a variable in [0,1] that indicates the quality of the best # observation of q by the team y = pb.var(Q, 'y', float) pb.st( [ 0 <= y[q] <= 1 for q in Q ], 'observations') # NETWORK CONSTRAINTS (INCLUDING FINISH CONSTRAINTS) # Only use valid path links pb.st( [ x[r,p,q] == 0 for r,p,q in E \ if not p in r.paths or not q in r.paths[p] ], 'check path validity') # each robot leaves its own starting position (once!) pb.st( [ sum( x[r,r.pose[0:2],q] for q in N[r] ) == 1 for r in R ], \ 'leave starting postion' ) # nw define the last position, which is unique nw = pb.var( M, 'nw', bool) # == 1 iff final node, 0 otherwise pb.st( [ sum( nw[r,p] for p in N[r] ) == 1 for r in R ], 'unique final pose' ) pb.st( [ nw[r,p] <= g[r,p] for r in R for p in N[r] ], 'Must communicate at the final pose' ) # robots entering an accessible node must leave the same node but the final one pb.st( [ sum( x[r,q,p] for q in N[r] ) \ - sum( x[r,p,q] for q in N[r] ) \ - nw[r,p] \ == 0 for r,p in M if p != r.pose[0:2] ], 'enter' ) # Go to the position only once pb.st( [ sum( x[r,q,p] for q in N[r] ) <= 1 for r in R for p in N[r] ], 'enter once' ) # MTZ Subtour Eliminating Constraints z = pb.var( M, 'z', int) #integer >=0 pb.st( [ z[r,p] >= 0 for r,p in M ]) pb.st( [ z[r,r.pose[0:2]] == 0 for r in R ]) pb.st( [ z[r,p] <= len(N[r]) for r,p in M ]) pb.st( [ z[r,p] - z[r,q] + 1 <= ( len(N[r]) ) * (1 - x[r,p,q]) for r,p,q in E ] ) # Cost limit plan_cost = pb.var(R, 'plan cost', float) pb.st( [ plan_cost[r] <= T for r in R ], 'maximal cost allowed') pb.st( [ sum( r.cost(p,q)*x[r,p,q] for p in N[r] for q in N[r]) <= plan_cost[r] for r in R ], 'compute plan cost') # OBJECTIVE = BEST OBSERVATIONS # Find the best observation of the observables points (aka compute y) # One wants to maximize the y[q], and to be equal to the best observation of q so far # Below is a linearization of this max function # A "Big C" used to linearize the original pb (max function with max objective) C = 9999 # New binary variables reguired by for the linearization, that, for a # given observable areas q, indicates the best observation so far (= a # couple (robot r, from position p) ) v = pb.var( V, 'v', bool) pb.st( [ sum( v[q,m] for m in M) == 1 for q in Q ], 'Best observation is unique') # = determine which v is equal to one (= which one the best observation) pb.st( [ y[q] <= ( sum(x[r,p2,p] * r.sensor(p,q) for p2 in N[r] ) + C*(1 - v[q,(r,p)]) ) for q in Q for (r,p) in M ], 'Define best observation') if VERBOSITY_LEVEL > 2: print "[Planning] GLPK: init done. Solving..." # OBJECTIVE # Maximize the utility gathered along the path pb.max( sum( u[q]*y[q] for q in Q) - sum( self.cost_penalty*plan_cost[r] for r in R), 'utility' ) pb.solve() #solve the SP problem if VERBOSITY_LEVEL > 1: print "[Planning] GLPK Solver status:",pb.status() # Report Karush-Kuhn-Tucker optimality conditions (= error bounds) if VERBOSITY_LEVEL > 2: print pb.reportKKT() # Retrieve solution for r in R: r.plan = [] curr = r.pose[0:2] r.plan.append(curr) for s in N[r]: for p,q in itertools.product(N[r],N[r]): if p == curr and x[r,p,q].primal == 1: curr = q r.plan.append(curr) if VERBOSITY_LEVEL > 2: print "[Planning] {} ({} chekpoints) : {}".format(r.name,len(r.plan),r.plan) print r.plan if VERBOSITY_LEVEL > 1: if pb.status() == 'undef': print "!WARNING! NO SOLUTION found by the GLPK solver so far." else: print "[Planning] Gathered utility = %.2f (out of %.2f)" % ( sum(u[q]*y[q].primal for q in Q) , sum( u[q] for q in Q) ) print "[Planning] for a Global cost = %.2f " % sum(plan_cost[r].primal for r in R ) # Return status: # - opt = an optimal integer solution has been found ! # - feas = solution found (but no necessary optimal) # - undef = no solution so far return pb.status()
py
1a5b66aca005982bb362cf03568dee41fb17da16
# Generated by Django 2.0.6 on 2018-07-10 00:25 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('students', '0054_auto_20180708_0818'), ] operations = [ migrations.RemoveField( model_name='parents', name='parentsname', ), migrations.RemoveField( model_name='customuser', name='name', ), migrations.RemoveField( model_name='customuser', name='religion', ), migrations.RemoveField( model_name='studentbio', name='parentsnameinfo', ), migrations.AddField( model_name='customuser', name='fathersname', field=models.CharField(blank=True, max_length=64, verbose_name='Complete Fathers Name'), ), migrations.AddField( model_name='customuser', name='fathersreligion', field=models.CharField(blank=True, choices=[('B', 'Buddhist'), ('M', 'Muslim'), ('C', 'Catholic'), ('I', 'INC')], default='C', help_text='Please select Religion', max_length=30), ), migrations.AddField( model_name='customuser', name='mothersname', field=models.CharField(blank=True, max_length=64, verbose_name='Complete Mothers Name'), ), migrations.AddField( model_name='customuser', name='mothersreligion', field=models.CharField(blank=True, choices=[('B', 'Buddhist'), ('M', 'Muslim'), ('C', 'Catholic'), ('I', 'INC')], default='C', help_text='Please select Religion', max_length=30), ), migrations.AlterField( model_name='customuser', name='applicationtype', field=models.CharField(blank=True, choices=[('CA', 'CASA Program'), ('SH', 'Senior High School Program'), ('JH', 'Junior High School Program'), ('GS', 'Grade School Program'), ('SPED', 'Special Education Program')], default='CA', help_text='Choose Application Program', max_length=10), ), migrations.AlterField( model_name='customuser', name='civilstatus', field=models.CharField(blank=True, choices=[('W', 'Widowed'), ('SP', 'Single Parent'), ('M', 'Married'), ('D', 'Divorcee')], default='M', help_text='Please select Civit Status', max_length=20), ), migrations.AlterField( model_name='students', name='groupinfo', field=models.CharField(blank=True, choices=[('TEA', 'TEACH AM'), ('G2', 'GRADE 2'), ('G4', 'GRADE 4'), ('G5', 'GRADE 5'), ('G6', 'GRADE 6'), ('G7', 'GRADE 7'), ('G3', 'GRADE 3'), ('G1', 'GRADE 1'), ('G9', 'GRADE 9'), ('TEP-GR2', 'TEACH PM GRADE 3'), ('TEP-GR1', 'TEACH PM GRADE 1'), ('CA', 'CASA AFTERNOON 1:30'), ('TEP', 'TEACH PM'), ('G10', 'GRADE 10'), ('TEP-GR2', 'TEACH PM GRADE 2'), ('G8', 'GRADE 8'), ('CM', 'CASA AM'), ('PG', 'PLAY GROUP')], default='CM', help_text='Choose Group for Students', max_length=10), ), migrations.AlterField( model_name='teachers', name='rolegroup', field=models.CharField(blank=True, choices=[('S', 'STAFF'), ('SH', 'SCHOOL HEAD'), ('F', 'FACULTY')], default='F', help_text='Please choose Role / Duty', max_length=10), ), migrations.DeleteModel( name='Parents', ), ]
py
1a5b66df8d8e9d86209391587e4a5f1e3c276314
# coding=utf8 from models import c3d_model from keras.optimizers import SGD import numpy as np import cv2 import datetime import os import configparser os.environ["CUDA_VISIBLE_DEVICES"] = "1" def main(video_stream): # read config.txt root_dir=os.path.abspath(os.path.dirname(__file__)) #获取当前文件所在的目录 configpath = os.path.join(root_dir, "config.txt") config = configparser.ConfigParser() config.read(configpath) classInd_path = config.get("C3D", "classInd_path") weights_path = config.get("C3D", "weights_path") lr = config.get("C3D", "lr") momentum = config.get("C3D", "momentum") image_read = config.get("image", "image_read") image_write = config.get("image", "image_write") video_image = config.get("choose", "video_image") with open(classInd_path, 'r') as f: class_names = f.readlines() f.close() # init model num = 1 camera_ids =video_stream.keys() cap_write ={} model = c3d_model() sgd = SGD(lr=float(lr), momentum=float(momentum), nesterov=True) model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy']) model.summary() model.load_weights(weights_path, by_name=True) def multi_detecion(clip, frame): inputs = np.array(clip).astype(np.float32) inputs = np.expand_dims(inputs, axis=0) inputs[..., 0] -= 99.9 inputs[..., 1] -= 92.1 inputs[..., 2] -= 82.6 inputs[..., 0] /= 65.8 inputs[..., 1] /= 62.3 inputs[..., 2] /= 60.3 inputs = inputs[:, :, 8:120, 30:142, :] inputs = np.transpose(inputs, (0, 2, 3, 1, 4)) pred = model.predict(inputs) label = np.argmax(pred[0]) cv2.putText(frame, class_names[label].split(' ')[-1].strip(), (20, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 0, 255), 1) cv2.putText(frame, "prob: %.4f" % pred[0][label], (20, 40), cv2.FONT_HERSHEY_SIMPLEX, 0.6, (0, 0, 255), 1) clip.pop(0) return (frame) for i in camera_ids: cap_write['cap_'+i] =cv2.VideoCapture(video_stream[i][1]) size_1 = (int(cap_write['cap_'+i].get(cv2.CAP_PROP_FRAME_WIDTH)), int(cap_write['cap_'+i].get(cv2.CAP_PROP_FRAME_HEIGHT))) fps_1 = cap_write['cap_'+i].get(cv2.CAP_PROP_FPS) cap_write["write_" + i]= cv2.VideoWriter(video_stream[i][2], cv2.VideoWriter_fourcc('m', 'p', '4', 'v'), fps_1, size_1) if video_image == 'video': while True: if num % 2 == 0: camera = 'camera_1' else: camera = 'camera_2' ret_1, frame_1 = cap_write['cap_'+str(camera)].read() if ret_1: tmp = cv2.cvtColor(frame_1, cv2.COLOR_BGR2RGB) video_stream[camera][0].append(cv2.resize(tmp, (171, 128))) if len(video_stream[camera][0]) == 16: frame_1 = multi_detecion(video_stream[camera][0], frame_1) print("16") cap_write['write_'+str(camera)].write(frame_1) print (camera+"success") num =num + 1 elif video_image == 'image': fileList = os.listdir(image_read) fileList.reverse() clip = [] for fileName in fileList: frame = cv2.imread(image_read + '/' + fileName) clip.append(cv2.resize(frame, (171, 128))) if len(clip) == 16: frame = multi_detecion(clip, frame) cv2.imwrite(image_write + '/' + str(num) + ".jpg", frame) print("write success") num = num+1 else: print("choose image or video") #for i in camera_ids: # cap_write['cap_' + i].release() # print('release'+i) if __name__ == '__main__': video_stream = {'camera_1': [], 'camera_2': [[],'/home/shixi/C3D-keras/datasets/ucf101/abnormal_event/abnormal-event_100.avi','results/abnormal_test.mp4' ]} video_stream['camera_1'].append([]) video_stream['camera_1'].append('/home/shixi/C3D-keras/videos/shooting.mpg') video_stream['camera_1'].append('results/normal_test.mp4') main(video_stream)
py
1a5b67efbd0da28ef9d852a284747c94ff4e998e
""" @Author :Furqan Khan @Email :[email protected] @Date :1/2/2017 Objective : The purpose of this file /module /Class is to stop an on going discover scan It will take the process id from the web service which would inturn get the id from the project id and will then kill the process with recursively kiling all its child processes """ import main_class_based_backup as main import os import ConfigParser import time import psutil import subprocess import sys import psutil import threading NmapScanObj=main.NmapScan() targethosts=sys.argv[1] path=sys.argv[2] targetports=sys.argv[3] scan_type=sys.argv[4] switch=sys.argv[5] project_id=sys.argv[6] mode=sys.argv[7] assessment_id=sys.argv[8] app_id=sys.argv[9] print "Inside Stopper.PY \n\n" print (targethosts,path,targetports,scan_type,switch,project_id,mode,assessment_id,app_id) p_id=0 p_id=NmapScanObj.IPtable.get_processId(project_id) process_id=int(p_id) if process_id: try: process = psutil.Process(process_id) print str(process) for proc in process.children(recursive=True): try: print( "Killing Process with id -->"+str(proc)) proc.kill() print( "Killed Process with id -->"+str(proc)) except Exception ,excep: print "Exception while killing but ignoring and continuing to kill "+str(excep) #self.process.terminate() try: process = psutil.Process(process_id) if process: process.kill() thread=threading.current_thread() thread.join(60) #commands_executed.append('Process killed--.timeout') except: print("Parent Process already KIlled") except Exception ,ee: print("Exception caught in th-controllor--"+str(ee)) else: print "No process id associated with scan !!!" """class Stopper(): def __init__(targethosts,path,targetports,scan_type,switch,project_id,mode,assessment_id,app_id): self.NmapScanObj=main.NmapScan() print "Inside Stopper.PY \n\n" def stop(self): p_id=self.NmapScanObj.IPtable.get_processId(project_id) process_id=int(p_id) if process_id: try: process = psutil.Process(process_id) print str(process) for proc in process.children(recursive=True): print( "Killing Process with id -->"+str(proc)) proc.kill() print( "Killed Process with id -->"+str(proc)) #self.process.terminate() try: process = psutil.Process(process_id) if process: process.kill() thread=threading.current_thread() thread.join(60) #commands_executed.append('Process killed--.timeout') except: print("Parent Process already KIlled") except Exception ,ee: print("Exception caught in th-controllor--"+str(ee)) else: print "No process id associated with scan !!!" """
py
1a5b681ba9c665384c4fed65089df6f73ce0f0fb
import os import jwt from functools import wraps from flask import request, make_response, jsonify,abort def verify_tokens(): """ Method to verify that auth token is valid """ token = None if 'Authorization' in request.headers: token = request.headers['Authorization'] if not token: abort(make_response(jsonify({"Message": "You need to login"}), 401)) try: data = jwt.decode(token, os.getenv('JWT_SECRET_KEY', default='SdaHv342nx!jknr837bjwd?c,lsajjjhw673hdsbgeh')) return data["email"], data["user_id"] except: abort(make_response(jsonify({ "Message":"The token is invalid" }), 403))
py
1a5b68717c9592eda0e56515f2c82b7e29ff5f94
import zhdate from nonebot import on_command, CommandSession, permission, log from .get_divination_of_thing import get_divination_of_thing from omega_miya.plugins.Group_manage.group_permissions import * __plugin_name__ = '求签' __plugin_usage__ = r'''【求签】 使用这个命令可以对任何事求运势, 包括且不限于吃饭、睡懒觉、DD 用法: /求签 [所求之事]''' # on_command 装饰器将函数声明为一个命令处理器 @on_command('maybe', aliases='求签', only_to_me=False, permission=permission.EVERYBODY) async def maybe(session: CommandSession): group_id = session.event.group_id user_id = session.event.user_id session_type = session.event.detail_type if session_type == 'group': if not has_command_permissions(group_id): await session.send('本群组没有执行命令的权限呢QAQ') log.logger.info(f'{__name__}: 群组: {group_id} 没有命令权限, 已中止命令执行') return elif session_type == 'private': await session.send('本命令不支持在私聊中使用QAQ') log.logger.info(f'{__name__}: 用户: {session.event.user_id} 在{session_type}中使用了命令, 已中止命令执行') return else: log.logger.info(f'{__name__}: 用户: {session.event.user_id} 在{session_type}环境中使用了命令, 已中止命令执行') return # 从会话状态(session.state)中获取事项, 如果当前不存在, 则询问用户 divination = session.get('divination', prompt='你想问什么事呢?') try: # 求签者昵称, 优先使用群昵称 divination_user = session.event['sender']['card'] if not divination_user: divination_user = session.event['sender']['nickname'] # 求签 divination_result = await get_divination_of_thing(divination=divination, divination_user=user_id) # 向用户发送结果 date_luna = zhdate.ZhDate.today().chinese() msg = f'今天是{date_luna}\n{divination_user}所求事项: 【{divination}】\n\n结果: 【{divination_result}】' await session.send(msg) except Exception as e: log.logger.warning(f'{__name__}: 群组: {group_id}, 用户: {session.event.user_id} 试图使用命令maybe时发生了错误: {e}') # args_parser 装饰器将函数声明为命令的参数解析器 # 命令解析器用于将用户输入的参数解析成命令真正需要的数据 @maybe.args_parser async def _(session: CommandSession): group_id = session.event.group_id session_type = session.event.detail_type if session_type == 'group': if not has_command_permissions(group_id): return elif session_type == 'private': return else: return # 去掉消息首尾的空白符 stripped_arg = session.current_arg_text.strip() if session.is_first_run: # 该命令第一次运行(第一次进入命令会话) if stripped_arg: # 第一次运行参数不为空 session.state['divination'] = stripped_arg return if not stripped_arg: # 用户没有发送有效的字符(而是发送了空白字符), 则提示重新输入 # 这里 session.pause() 将会发送消息并暂停当前会话(该行后面的代码不会被运行) session.pause('你还没告诉我你想问什么事呢~') # 如果当前正在向用户询问更多信息(例如本例中的要查询的城市), 且用户输入有效, 则放入会话状态 session.state[session.current_key] = stripped_arg
py
1a5b68ffe29119f855a727c0389c0bc94563d7d9
import os import base64 import hashlib import datetime # parse an ISO formatted timestamp string, converting it to a python datetime object; # note: this function is also defined in server code def parse_json_datetime(json_timestamp): assert json_timestamp.endswith('Z') format = '' if '.' in json_timestamp: format = '%Y-%m-%dT%H:%M:%S.%f' else: format = '%Y-%m-%dT%H:%M:%S' if json_timestamp.endswith(' Z'): format += ' Z' else: format += 'Z' return datetime.datetime.strptime(json_timestamp, format) # build an auth_code string by hashing a secret key def build_auth_code(secret_key): nonce = base64.b64encode(os.urandom(32)).decode() key_hash = base64.b64encode(hashlib.sha512((nonce + ';' + secret_key).encode()).digest()).decode() key_part = secret_key[:3] + secret_key[-3:] return key_part + ';' + nonce + ';' + key_hash
py
1a5b6b28641088bae0cd8bcc6820436766fb2736
import os class Config(object): """Parent configuration class.""" DEBUG = False TESTING = False SECRET_KEY = os.getenv('SECRET') class DevelopmentConfig(Config): """Configurations for Development.""" DEBUG = True TESTING = True class TestingConfig(Config): """Configurations for Testing, with a separate test database.""" TESTING = True DEBUG = True class ProductionConfig(Config): """Configurations for Production.""" DEBUG = False TESTING = False app_config = { 'development': DevelopmentConfig, 'testing': TestingConfig, 'production': ProductionConfig }
py
1a5b6b9764c6240beb6608dfe2764bcdd24bdf21
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for ReduceJoin op from string_ops.""" import itertools import numpy as np from six.moves import xrange # pylint: disable=redefined-builtin from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.framework import test_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import string_ops from tensorflow.python.platform import test def _input_array(num_dims): """Creates an ndarray where each element is the binary of its linear index. Args: num_dims: The number of dimensions to create. Returns: An ndarray of shape [2] * num_dims. """ formatter = "{:0%db}" % num_dims strings = [formatter.format(i) for i in xrange(2**num_dims)] return np.array(strings, dtype="S%d" % num_dims).reshape([2] * num_dims) def _joined_array(num_dims, reduce_dim): """Creates an ndarray with the result from reduce_join on input_array. Args: num_dims: The number of dimensions of the original input array. reduce_dim: The dimension to reduce. Returns: An ndarray of shape [2] * (num_dims - 1). """ formatter = "{:0%db}" % (num_dims - 1) result = np.zeros(shape=[2] * (num_dims - 1), dtype="S%d" % (2 * num_dims)) flat = result.ravel() for i in xrange(2**(num_dims - 1)): dims = formatter.format(i) flat[i] = "".join([(dims[:reduce_dim] + "%d" + dims[reduce_dim:]) % j for j in xrange(2)]) return result class UnicodeTestCase(test.TestCase): """Test case with Python3-compatible string comparator.""" def assertAllEqualUnicode(self, truth, actual): self.assertAllEqual( np.array(truth).astype("U"), np.array(actual).astype("U")) class ReduceJoinTestHelperTest(UnicodeTestCase): """Tests for helper functions.""" def testInputArray(self): num_dims = 3 truth = ["{:03b}".format(i) for i in xrange(2**num_dims)] output_array = _input_array(num_dims).reshape([-1]) self.assertAllEqualUnicode(truth, output_array) def testJoinedArray(self): num_dims = 3 truth_dim_zero = [["000100", "001101"], ["010110", "011111"]] truth_dim_one = [["000010", "001011"], ["100110", "101111"]] truth_dim_two = [["000001", "010011"], ["100101", "110111"]] output_array_dim_zero = _joined_array(num_dims, reduce_dim=0) output_array_dim_one = _joined_array(num_dims, reduce_dim=1) output_array_dim_two = _joined_array(num_dims, reduce_dim=2) self.assertAllEqualUnicode(truth_dim_zero, output_array_dim_zero) self.assertAllEqualUnicode(truth_dim_one, output_array_dim_one) self.assertAllEqualUnicode(truth_dim_two, output_array_dim_two) class ReduceJoinTest(UnicodeTestCase): def _testReduceJoin(self, input_array, truth, truth_shape, axis, keep_dims=False, separator=""): """Compares the output of reduce_join to an expected result. Args: input_array: The string input to be joined. truth: An array or np.array of the expected result. truth_shape: An array or np.array of the expected shape. axis: The indices to reduce over. keep_dims: Whether or not to retain reduced dimensions. separator: The separator to use for joining. """ with self.cached_session(): output = string_ops.reduce_join( inputs=input_array, axis=axis, keep_dims=keep_dims, separator=separator) output_array = self.evaluate(output) self.assertAllEqualUnicode(truth, output_array) self.assertAllEqual(truth_shape, output.get_shape()) def _testMultipleReduceJoin(self, input_array, axis, separator=" "): """Tests reduce_join for one input and multiple axes. Does so by comparing the output to that from nested reduce_string_joins. The correctness of single-dimension reduce_join is verified by other tests below using _testReduceJoin. Args: input_array: The input to test. axis: The indices to reduce. separator: The separator to use when joining. """ with self.cached_session(): output = string_ops.reduce_join( inputs=input_array, axis=axis, keep_dims=False, separator=separator) output_keep_dims = string_ops.reduce_join( inputs=input_array, axis=axis, keep_dims=True, separator=separator) truth = input_array for index in axis: truth = string_ops.reduce_join( inputs=truth, axis=index, keep_dims=True, separator=separator) if not axis: truth = constant_op.constant(truth) truth_squeezed = array_ops.squeeze(truth, axis=axis) output_array = self.evaluate(output) output_keep_dims_array = self.evaluate(output_keep_dims) truth_array = self.evaluate(truth) truth_squeezed_array = self.evaluate(truth_squeezed) self.assertAllEqualUnicode(truth_array, output_keep_dims_array) self.assertAllEqualUnicode(truth_squeezed_array, output_array) self.assertAllEqual(truth.get_shape(), output_keep_dims.get_shape()) self.assertAllEqual(truth_squeezed.get_shape(), output.get_shape()) def testRankOne(self): input_array = ["this", "is", "a", "test"] truth = "thisisatest" truth_shape = [] self._testReduceJoin(input_array, truth, truth_shape, axis=0) def testRankTwo(self): input_array = [["this", "is", "a", "test"], ["please", "do", "not", "panic"]] truth_dim_zero = ["thisplease", "isdo", "anot", "testpanic"] truth_shape_dim_zero = [4] truth_dim_one = ["thisisatest", "pleasedonotpanic"] truth_shape_dim_one = [2] self._testReduceJoin( input_array, truth_dim_zero, truth_shape_dim_zero, axis=0) self._testReduceJoin( input_array, truth_dim_one, truth_shape_dim_one, axis=1) expected_val = "thisisatestpleasedonotpanic" expected_shape = [] self._testReduceJoin(input_array, expected_val, expected_shape, axis=None) # Using axis=[] is a no-op. expected_val = input_array expected_shape = [2, 4] self._testReduceJoin(input_array, expected_val, expected_shape, axis=[]) def testRankFive(self): input_array = _input_array(num_dims=5) truths = [_joined_array(num_dims=5, reduce_dim=i) for i in xrange(5)] truth_shape = [2] * 4 for i in xrange(5): self._testReduceJoin(input_array, truths[i], truth_shape, axis=i) def testNegative(self): input_array = _input_array(num_dims=5) truths = [_joined_array(num_dims=5, reduce_dim=i) for i in xrange(5)] truth_shape = [2] * 4 for i in xrange(5): self._testReduceJoin(input_array, truths[i], truth_shape, axis=i - 5) def testSingletonDimension(self): input_arrays = [ _input_array(num_dims=5).reshape([2] * i + [1] + [2] * (5 - i)) for i in xrange(6) ] truth = _input_array(num_dims=5) truth_shape = [2] * 5 for i in xrange(6): self._testReduceJoin(input_arrays[i], truth, truth_shape, axis=i) def testSeparator(self): input_array = [["this", "is", "a", "test"], ["please", "do", "not", "panic"]] truth_dim_zero = ["this please", "is do", "a not", "test panic"] truth_shape_dim_zero = [4] truth_dim_one = ["this is a test", "please do not panic"] truth_shape_dim_one = [2] self._testReduceJoin( input_array, truth_dim_zero, truth_shape_dim_zero, axis=0, separator=" ") self._testReduceJoin( input_array, truth_dim_one, truth_shape_dim_one, axis=1, separator=" ") @test_util.run_deprecated_v1 def testUnknownShape(self): input_array = [["a"], ["b"]] truth = ["ab"] truth_shape = None with self.cached_session(): placeholder = array_ops.placeholder(dtypes.string, name="placeholder") reduced = string_ops.reduce_join(placeholder, axis=0) output_array = reduced.eval(feed_dict={placeholder.name: input_array}) self.assertAllEqualUnicode(truth, output_array) self.assertAllEqual(truth_shape, reduced.get_shape()) @test_util.run_deprecated_v1 def testUnknownIndices(self): input_array = [["this", "is", "a", "test"], ["please", "do", "not", "panic"]] truth_dim_zero = ["thisplease", "isdo", "anot", "testpanic"] truth_dim_one = ["thisisatest", "pleasedonotpanic"] truth_shape = None with self.cached_session(): placeholder = array_ops.placeholder(dtypes.int32, name="placeholder") reduced = string_ops.reduce_join(input_array, axis=placeholder) output_array_dim_zero = reduced.eval(feed_dict={placeholder.name: [0]}) output_array_dim_one = reduced.eval(feed_dict={placeholder.name: [1]}) self.assertAllEqualUnicode(truth_dim_zero, output_array_dim_zero) self.assertAllEqualUnicode(truth_dim_one, output_array_dim_one) self.assertAllEqual(truth_shape, reduced.get_shape()) def testKeepDims(self): input_array = [["this", "is", "a", "test"], ["please", "do", "not", "panic"]] truth_dim_zero = [["thisplease", "isdo", "anot", "testpanic"]] truth_shape_dim_zero = [1, 4] truth_dim_one = [["thisisatest"], ["pleasedonotpanic"]] truth_shape_dim_one = [2, 1] self._testReduceJoin( input_array, truth_dim_zero, truth_shape_dim_zero, axis=0, keep_dims=True) self._testReduceJoin( input_array, truth_dim_one, truth_shape_dim_one, axis=1, keep_dims=True) expected_val = [["thisisatestpleasedonotpanic"]] expected_shape = [1, 1] self._testReduceJoin( constant_op.constant(input_array), expected_val, expected_shape, keep_dims=True, axis=None) # Using axis=[] is a no-op. expected_val = input_array expected_shape = [2, 4] self._testReduceJoin( input_array, expected_val, expected_shape, keep_dims=True, axis=[]) def testMultiIndex(self): num_dims = 3 input_array = _input_array(num_dims=num_dims) # Also tests []. for i in xrange(num_dims + 1): for permutation in itertools.permutations(xrange(num_dims), i): self._testMultipleReduceJoin(input_array, axis=permutation) @test_util.run_deprecated_v1 def testInvalidReductionIndices(self): with self.cached_session(): with self.assertRaisesRegex(ValueError, "Invalid reduction dim"): string_ops.reduce_join(inputs="", axis=0) with self.assertRaisesRegex(ValueError, "Invalid reduction dimension -3"): string_ops.reduce_join(inputs=[[""]], axis=-3) with self.assertRaisesRegex(ValueError, "Invalid reduction dimension 2"): string_ops.reduce_join(inputs=[[""]], axis=2) with self.assertRaisesRegex(ValueError, "Invalid reduction dimension -3"): string_ops.reduce_join(inputs=[[""]], axis=[0, -3]) with self.assertRaisesRegex(ValueError, "Invalid reduction dimension 2"): string_ops.reduce_join(inputs=[[""]], axis=[0, 2]) def testZeroDims(self): with self.cached_session(): inputs = np.zeros([0, 1], dtype=str) # Reduction that drops the dim of size 0. output = string_ops.reduce_join(inputs=inputs, axis=0) self.assertAllEqualUnicode([""], self.evaluate(output)) # Reduction that keeps the dim of size 0. output = string_ops.reduce_join(inputs=inputs, axis=1) output_shape = self.evaluate(output).shape self.assertAllEqual([0], output_shape) @test_util.run_deprecated_v1 def testInvalidArgsUnknownShape(self): with self.cached_session(): placeholder = array_ops.placeholder(dtypes.string, name="placeholder") index_too_high = string_ops.reduce_join(placeholder, axis=1) duplicate_index = string_ops.reduce_join(placeholder, axis=[-1, 1]) with self.assertRaisesOpError("Invalid reduction dimension 1"): index_too_high.eval(feed_dict={placeholder.name: [""]}) with self.assertRaisesOpError("Duplicate reduction dimension 1"): duplicate_index.eval(feed_dict={placeholder.name: [[""]]}) @test_util.run_deprecated_v1 def testInvalidArgsUnknownIndices(self): with self.cached_session(): placeholder = array_ops.placeholder(dtypes.int32, name="placeholder") reduced = string_ops.reduce_join(["test", "test2"], axis=placeholder) with self.assertRaisesOpError("reduction dimension -2"): reduced.eval(feed_dict={placeholder.name: -2}) with self.assertRaisesOpError("reduction dimension 2"): reduced.eval(feed_dict={placeholder.name: 2}) def testDeprecatedArgs(self): foobar = constant_op.constant(["foobar"]) # Old names: keep_dims and reduction_indices output = string_ops.reduce_join( ["foo", "bar"], reduction_indices=0, keep_dims=True) self.assertAllEqual(foobar, output) # New names keepdims and axis. output = string_ops.reduce_join(["foo", "bar"], axis=0, keepdims=True) self.assertAllEqual(foobar, output) if __name__ == "__main__": test.main()
py
1a5b6bad9bcbb6adcb56c03f3d4654eb5c48c1f2
import remit.settings as settings from django.template.loader import render_to_string def clean_phonenumber(number): try: number = number.replace('-', '') number = number.replace(' ', '') number = number.replace(',', '') except Exception, e: debug(e, 'Error cleaning phonenumber %s' % number, 'sms') return number def debug(error, message='', efile=''): from remit.utils import debug return debug(error, message, efile) def nexmo_sms(message, to): from nexmo.libpynexmo.nexmomessage import NexmoMessage title = settings.NEXMO_FROM to = clean_phonenumber(to) try: if to[0] == 1: title = '12134657620' except Exception, e: debug(e, 'send sms error', 'sms') params = { 'api_key': settings.NEXMO_USERNAME, 'api_secret': settings.NEXMO_PASSWORD, 'from': title, 'to': '%s%s' % ('+', to), 'text': message, } # print params sms = NexmoMessage(params) response = sms.send_request() return response def twilio_sms(to, message): from twilio.rest import TwilioRestClient response = False to = clean_phonenumber(to) try: if not to[0] == '+': to = '%s%s' % ('+', to) except Exception, e: debug(e, 'Error sending twilio sms', 'sms') client = TwilioRestClient( settings.TWILIO_ACCOUNT_SID, settings.TWILIO_AUTH_TOKEN) response = client.messages.create( body=message, to=to, from_='+16092574786') try: debug(e, 'Twilio sms response %s' % response, 'sms') except Exception, e: pass return response
py
1a5b6bf9625c885c31cf64ebc39e724aee459578
# -*- coding: UTF-8 -*- from struct import pack, unpack_from SPL_STANDARD_MESSAGE_STRUCT_HEADER = "SPL " SPL_STANDARD_MESSAGE_STRUCT_VERSION = 6 SPL_STANDARD_MESSAGE_DATA_SIZE = 780 SPL_STANDARD_MESSAGE_MAX_NUM_OF_PLAYERS = 5 SIZE_SPL_MESSAGE = 852 SIZE_ROBOT = 64 SIZE_BALL = 36 SENSOR_DATA_SIZE = 660 class Message: def __init__(self, data=None): self.timestamp = 0 if data is not None: self.parseData(data) def parseData(self, data): vals = unpack_from("i", data) self.timestamp = vals[0] def __str__(self): return " (Message at %d) " % (self.timestamp) class Ball: def __init__(self, data=None): self.message = Message() self.id = -1 self.posX = 0.0 self.posY = 0.0 self.confidence = 0.0 self.motionX = 0.0 self.motionY = 0.0 self.motionConfidence = 0.0 self.localConfidence = 0.0 if data is not None: self.parseData(data) def parseData(self, data): self.message = Message(data[:4]) vals = unpack_from("i7f", data[4:]) self.id = vals[0] self.posX = vals[1] self.posY = vals[2] self.confidence = vals[3] self.motionX = vals[4] self.motionY = vals[5] self.motionConfidence = vals[6] self.localConfidence = vals[7] def __str__(self): msg = "Ball at (%0.2f, %0.2f) @ %0.2f with motion (%0.2f, %0.2f) @ %0.2f" % (self.posX, self.posY, self.confidence, self.motionX, self.motionY, self.motionConfidence) return msg + str(self.message) class Robot: def __init__(self, data=None): if data is not None: self.message = Message(data[:4]) vals = unpack_from("2if2i4fi4fi", data[4:]) self.robotId = vals[0] self.role = vals[1] self.fallenSince = vals[2] self.active = vals[3] try: self.alphaDeg = int(vals[4]) self.alpha = float(vals[5]) except Exception, e: self.alphaDeg = 0 self.alpha = 0.0 print "warning: failed to parse angle: %s!" % str(e) self.posX = vals[6] self.posY = vals[7] self.confidence = vals[8] self.GTposAlphaDeg = vals[9] self.GTposAlpha = vals[10] self.GTposX = vals[11] self.GTposY = vals[12] self.GTconfidence = vals[13] self.GTtimestamp = vals[14] else: self.message = Message() self.robotId = -1 self.role = -1 self.fallenSince = 0.0 self.active = 0 self.alphaDeg = 0 self.alpha = 0.0 self.posX = 0.0 self.posY = 0.0 self.confidence = 0.0 self.GTposAlphaDeg = 0 self.GTposAlpha = 0.0 self.GTposX = 0.0 self.GTposY = 0.0 self.GTconfidence = 0.0 self.GTtimestamp = 0 def setFromString(self, s): # new Robot id=0 t=3179 c=1 @ -3.21, -3, 1.568(rad)rad GROUNDTRUTH: 0,0,0(rad),Conf: 0 # Robot id=2 s=2 team=3 t=6130 c=0 @ 2.14, -2.47, 2.04rad GROUNDTRUTH: 0,0,0,Conf: 0 v = s.split(" ") #new Robot if v[2][0] == "t": self.message = Message() self.message.timestamp = int(v[2][2:]) self.robotId = int(v[1][3:]) self.posX = float(v[5][:-1]) self.posY = float(v[6][:-1]) self.alpha = float(v[7][:-8]) gt = v[9].split(",") self.GTposX = float(gt[0]) self.GTposY = float(gt[1]) self.GTposAlpha = float(gt[2][:-5]) self.confidence = float(v[3][2:]) #old Robot else: self.message.timestamp = int(v[4][2:]) self.robotId = int(v[1][3:]) self.posX = float(v[7][:-1]) self.posY = float(v[8][:-1]) self.alpha = float(v[9][:-3]) gt = v[11].split(",") self.GTposX = float(gt[0]) self.GTposY = float(gt[1]) self.GTposAlpha = float(gt[2]) self.confidence = float(v[5][2:]) def c_str(self): msg = "Robot id=%d s=%d team=%d t=%d c=%f @ %f, %f, %frad GROUNDTRUTH: %f,%f,%f,Conf: %f" % \ (self.robotId, \ self.message.sender, \ self.message.teamNumber, \ self.message.timestamp, \ self.confidence, \ self.posX, \ self.posY, \ self.alpha, \ self.GTposX, \ self.GTposY, \ self.GTposAlpha, \ self.GTconfidence) return msg def __str__(self): msg = "Robot #%d (%0.2f, %0.2f) @ %0.2f with conf %0.2f, (%0.2f, %0.2f) @ %0.2f " % \ (self.robotId, \ self.posX, \ self.posY, \ self.alpha, \ self.confidence,\ self.GTposX, \ self.GTposY, \ self.GTposAlpha) return msg + str(self.message) class SPLStandardMessage: def __init__(self, data): if len(data) < SIZE_SPL_MESSAGE: print "size does not match spl standard message format!" return self.bembelbotsMessage = False raw = data[:(SIZE_SPL_MESSAGE-SPL_STANDARD_MESSAGE_DATA_SIZE)] #print len(raw) vals = unpack_from("4sBbbb3f2f2ff2f2f5bbhhbbh", raw) #print vals self.header = vals[0] if self.header != SPL_STANDARD_MESSAGE_STRUCT_HEADER: print "packet does not look like a spl standard message!" return self.version = vals[1] if self.version != SPL_STANDARD_MESSAGE_STRUCT_VERSION: print "spl message has wrong version!" return self.playerNum = vals[2] self.team = vals[3] self.fallen = vals[4] self.posX = vals[5] self.posY = vals[6] self.alpha = vals[7] self.walkX = vals[8] self.walkY = vals[9] self.shootX = vals[10] self.shootY = vals[11] self.ballAge = vals[12] self.ballX = vals[13] self.ballY = vals[14] self.motionX = vals[15] self.motionY = vals[16] self.suggestions = vals[17:17+SPL_STANDARD_MESSAGE_MAX_NUM_OF_PLAYERS] self.intention = vals[22] self.walkSpeed = vals[23] self.kickDist = vals[24] self.posConfidence = vals[25] self.sideConfidence = vals[26] self.dataSize = vals[27] #print #print "protocol version: %d" % self.version #print "player number: %d" % self.playerNum #print "team number %d" % self.team #print "fallen down? %d" % self.fallen #print "pos (%f,%f) @ %f" % (self.posX, self.posY, self.alpha) #print "ball age %d " % self.ballAge #print "ball (%f, %f)" % (self.ballX, self.ballY) #print "motion (%f, %f)" % (self.motionX, self.motionY) #print "additional data size %d" % self.dataSize #print blob = data[len(raw)-2:] if self.dataSize > 3 and blob[0] == 'B' and blob[1] == 'B' and blob[2] == ' ': try: self.bbRobot = Robot(blob[3:3+SIZE_ROBOT]) self.bbBall = Ball(blob[3+SIZE_ROBOT:]) self.bembelbotsMessage = True except Exception, e: print "failed to parse additional info\n%s" % str(e) class SensorData(): def __init__(self, data): if len(data) != SENSOR_DATA_SIZE: return lenName = 24 lenSensors = 144 lenEulers = 6 all = unpack_from("24c144f6f6f2i", data) robotNameD = all[0:lenName] robotName = "" for i in robotNameD: if ord(i) == 0: break else: robotName += i self.robotName = robotName p = lenName self.sensors = all[p:p+lenSensors] p += lenSensors self.eulersTop = all[p:p+lenEulers] p += lenEulers self.eulersBottom = all[p:p+lenEulers] self.timestamp = all[-2] self.tick = all[-1]
py
1a5b6d423281274536b8f4365b92d2e03a931b05
""" ESC/POS Commands (Constants) """ # Feed control sequences CTL_LF = '\x0a' # Print and line feed CTL_FF = '\x0c' # Form feed CTL_CR = '\x0d' # Carriage return CTL_HT = '\x09' # Horizontal tab CTL_VT = '\x0b' # Vertical tab # Printer hardware HW_INIT = '\x1b\x40' # Clear data in buffer and reset modes HW_SELECT = '\x1b\x3d\x01' # Printer select HW_RESET = '\x1b\x3f\x0a\x00' # Reset printer hardware # Cash Drawer CD_KICK_2 = '\x1b\x70\x00' # Sends a pulse to pin 2 [] CD_KICK_5 = '\x1b\x70\x01' # Sends a pulse to pin 5 [] # Paper PAPER_FULL_CUT = '\x1d\x56\x00' # Full cut paper PAPER_PART_CUT = '\x1d\x56\x01' # Partial cut paper # Text format BARCODE_TXT_OFF = '\x1d\x48\x00' # HRI barcode chars OFF BARCODE_TXT_ABV = '\x1d\x48\x01' # HRI barcode chars above BARCODE_TXT_BLW = '\x1d\x48\x02' # HRI barcode chars below BARCODE_TXT_BTH = '\x1d\x48\x03' # HRI barcode chars both above and below BARCODE_FONT_A = '\x1d\x66\x00' # Font type A for HRI barcode chars BARCODE_FONT_B = '\x1d\x66\x01' # Font type B for HRI barcode chars BARCODE_HEIGHT = '\x1d\x68\x64' # Barcode Height [1-255] BARCODE_WIDTH = '\x1d\x77\x03' # Barcode Width [2-6] BARCODE_UPC_A = '\x1d\x6b\x00' # Barcode type UPC-A BARCODE_UPC_E = '\x1d\x6b\x01' # Barcode type UPC-E BARCODE_EAN13 = '\x1d\x6b\x02' # Barcode type EAN13 BARCODE_EAN8 = '\x1d\x6b\x03' # Barcode type EAN8 BARCODE_CODE39 = '\x1d\x6b\x04' # Barcode type CODE39 BARCODE_ITF = '\x1d\x6b\x05' # Barcode type ITF BARCODE_NW7 = '\x1d\x6b\x06' # Barcode type NW7 # Image format S_RASTER_N = '\x1d\x76\x30\x00' # Set raster image normal size S_RASTER_2W = '\x1d\x76\x30\x01' # Set raster image double width S_RASTER_2H = '\x1d\x76\x30\x02' # Set raster image double height S_RASTER_Q = '\x1d\x76\x30\x03' # Set raster image quadruple RESET = '\x1b\x40' TEXT_STYLE = { 'bold': { 0: '\x1b\x45\x00', # Bold font OFF 1: '\x1b\x45\x01', # Bold font ON }, 'underline': { None: '\x1b\x2d\x00', # Underline font OFF 1: '\x1b\x2d\x01', # Underline font 1-dot ON 2: '\x1b\x2d\x02', # Underline font 2-dot ON }, 'size': { 'normal': '\x1b\x21\x00', # Normal text '2h': '\x1b\x21\x10', # Double height text '2w': '\x1b\x21\x20', # Double width text '2x': '\x1b\x21\x30', # Quad area text }, 'font': { 'a': '\x1b\x4d\x00', # Font type A 'b': '\x1b\x4d\x01', # Font type B 'c': '\x1b\x4d\x02', # Font type C (may not support) }, 'align': { 'left': '\x1b\x61\x00', # Left justification 'right': '\x1b\x61\x02', # Right justification 'center': '\x1b\x61\x01', # Centering }, 'inverted': { False: '\x1d\x42\x00', # Inverted mode ON True: '\x1d\x42\x01', # Inverted mode OFF }, 'color': { 1: '\x1b\x72\x00', # Select 1st printing color 2: '\x1b\x72\x00', # Select 2nd printing color } } PAGE_CP_SET_COMMAND = '\x1b\x74' PAGE_CP_CODE = { 'cp437' : 0, # 'katakana' : 1, 'cp850' : 2, 'cp860' : 3, 'cp863' : 4, 'cp865' : 5, 'cp1251' : 6, 'cp866' : 7, 'mac_cyrillic': 8, 'cp775' : 9, 'cp1253' : 10, 'cp737' : 11, 'cp857' : 12, 'iso8859_9' : 13, 'cp864' : 14, 'cp862' : 15, 'iso8859_2' : 16, 'cp1253' : 17, 'cp1250' : 18, 'cp858' : 19, 'cp1254' : 20, # 'TIS_14' : 21, # 'TIS_17' : 22, # 'TIS_11' : 23, 'cp737' : 24, 'cp1257' : 25, 'cp847' : 26, # 'cp720' : 27, 'cp885' : 28, 'cp857' : 29, 'cp1250' : 30, 'cp775' : 31, 'cp1254' : 32, # '' : 33, 'cp1256' : 34, 'cp1258' : 35, 'iso8859_2' : 36, 'iso8859_3' : 37, 'iso8859_4' : 38, 'iso8859_5' : 39, 'iso8859_6' : 40, 'iso8859_7' : 41, 'iso8859_8' : 42, 'iso8859_9' : 43, 'iso8859_15' : 44, # '???' : 45, 'cp856' : 46, 'cp874' : 47, }
py
1a5b6d78c3e8068a28ad2556f6fc1cdc8ce1133d
class Node(object): def __init__(self, name, which): self.name = name self.which = which self.next = next self.timestamp = 0 class AnimalShelter(object): def __init__(self): self.first_cat = None self.first_dog = None self.last_cat = None self.last_dog = None self.counter = 0 def enqueue(self, name, which): self.counter += 1 node = Node(name, which) node.timestamp = self.counter if which == 'cat': if not self.first_cat: self.first_cat = node if self.last_cat: self.last_cat.next = node self.last_cat = node if which == 'dog': if not self.first_dog: self.first_dog = node if self.last_dog: self.last_dog.next = node self.last_dog = node def dequeueDog(self): if self.first_dog: node = self.first_dog self.first_dog = node.next return str(node.name) raise Exception('No Dogs!') def dequeueCat(self): if self.first_cat: node = self.first_cat self.first_cat = node.next return str(node.name) raise Exception('No Cats!') def dequeueAny(self): nodecat = self.first_cat nodedog = self.first_dog if nodecat and not nodedog: return self.dequeueCat() elif nodedog and not nodecat: return self.dequeueDog() elif nodedog and nodecat: if nodedog.timestamp < nodecat.timestamp: return self.dequeueDog() else: return self.dequeueCat() raise Exception('No Animals!') def main(): qs = AnimalShelter() qs.enqueue('bob', 'cat') qs.enqueue('mia', 'cat') qs.enqueue('yoda', 'dog') qs.enqueue('wolf', 'dog') assert(qs.dequeueDog() == 'yoda') assert(qs.dequeueCat() == 'bob') print(qs.dequeueAny() == 'mia') if __name__ == '__main__': main()
py
1a5b6dd6c9d5f88cf4569b742a4deba0ce1455a7
"""The customComp component."""
py
1a5b6f431b64f8ce5ad77fdb539ad3be5edbdbf7
import cv2 as cv import numpy as np import ctypes def Mbox(title, text, style): return ctypes.windll.user32.MessageBoxW(0, text, title, style) # https://docs.opencv.org/3.4/dc/d9b/classcv_1_1ppf__match__3d_1_1ICP.html def rotation(theta): tx, ty, tz = theta Rx = np.array([[1, 0, 0], [0, np.cos(tx), -np.sin(tx)], [0, np.sin(tx), np.cos(tx)]]) Ry = np.array([[np.cos(ty), 0, -np.sin(ty)], [0, 1, 0], [np.sin(ty), 0, np.cos(ty)]]) Rz = np.array([[np.cos(tz), -np.sin(tz), 0], [np.sin(tz), np.cos(tz), 0], [0, 0, 1]]) return np.dot(Rx, np.dot(Ry, Rz)) width = 20 height = 10 max_deg = np.pi / 12 cloud, rotated_cloud = [None]*3, [None]*3 retval, residual, pose = [None]*3, [None]*3, [None]*3 noise = np.random.normal(0.0, 0.1, height * width * 3).reshape((-1, 3)) noise2 = np.random.normal(0.0, 1.0, height * width) x, y = np.meshgrid( range(-width//2, width//2), range(-height//2, height//2), sparse=False, indexing='xy' ) z = np.zeros((height, width)) cloud[0] = np.dstack((x, y, z)).reshape((-1, 3)).astype(np.float32) cloud[1] = noise.astype(np.float32) + cloud[0] cloud[2] = cloud[1] cloud[2][:, 2] += noise2.astype(np.float32) R = rotation([ 0, #np.random.uniform(-max_deg, max_deg), np.random.uniform(-max_deg, max_deg), 0, #np.random.uniform(-max_deg, max_deg) ]) t = np.zeros((3, 1)) Rt = np.vstack(( np.hstack((R, t)), np.array([0, 0, 0, 1]) )).astype(np.float32) icp = cv.ppf_match_3d_ICP(100) I = np.eye(4) print("Unaligned error:\t%.6f" % np.linalg.norm(I - Rt)) sprintfStr = "Unaligned error:\t%.6f\n" % np.linalg.norm(I - Rt) for i in range(3): rotated_cloud[i] = np.matmul(Rt[0:3,0:3], cloud[i].T).T + Rt[:3,3].T retval[i], residual[i], pose[i] = icp.registerModelToScene(rotated_cloud[i], cloud[i]) print("ICP error:\t\t%.6f" % np.linalg.norm(I - np.matmul(pose[0], Rt))) sprintfStr += "ICP error:\t\t%.6f\n" % np.linalg.norm(I - np.matmul(pose[0], Rt)) Mbox('ICP complete', sprintfStr, 1)
py
1a5b6f4be6e62ff1a9c8a26d0443695a459497f7
# # Copyright (c) [2021] Huawei Technologies Co.,Ltd.All rights reserved. # # OpenArkCompiler is licensed under Mulan PSL v2. # You can use this software according to the terms and conditions of the Mulan PSL v2. # # http://license.coscl.org.cn/MulanPSL2 # # THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR # FIT FOR A PARTICULAR PURPOSE. # See the Mulan PSL v2 for more details. # from api import * SCO2_TEST = { "compile": [ C2ast( clang="${OUT_ROOT}/tools/bin/clang", include_path=[ "${OUT_ROOT}/aarch64-clang-release/lib/include", "${OUT_ROOT}/tools/gcc-linaro-7.5.0/aarch64-linux-gnu/libc/usr/include", "${OUT_ROOT}/tools/gcc-linaro-7.5.0/lib/gcc/aarch64-linux-gnu/7.5.0/include" ], option="--target=aarch64", infile="${APP}.c", outfile="${APP}.ast", extra_opt="${SPEC_PARAM}" ), Mplfe( hir2mpl="${OUT_ROOT}/aarch64-clang-release/bin/hir2mpl", infile="${APP}.ast", outfile="${APP}.mpl" ), Maple( maple="${OUT_ROOT}/aarch64-clang-release/bin/maple", run=["me", "mpl2mpl", "mplcg"], option={ "me": "-O2 --quiet", "mpl2mpl": "-O2 --quiet", "mplcg": "--O2 --fpic --quiet --no-pie --verbose-asm" }, global_option="", infile="${APP}.mpl" ), CLinker( infile="${APP}.s", front_option="-O2 -std=c99", outfile="${APP}.o", back_option="", mid_opt="-c" ) ], "link": [ CLinker( infile="${APP}", front_option="-std=gnu99 -no-pie", outfile="${EXE}", back_option="-lm -L${OUT_ROOT}/tools/gcc-linaro-7.5.0/aarch64-linux-gnu/libc/lib/" ) ], "cp_data":[ Shell( "cp -r data/test/${APP} ${TARGET}" ) ], "run": [ Shell( "${OUT_ROOT}/tools/bin/qemu-aarch64 -L ${OUT_ROOT}/tools/gcc-linaro-7.5.0/aarch64-linux-gnu/libc ${EXE} ${APP} > output.log" ) ], "compare": [ Shell( "${MAPLE_ROOT}/testsuite/c_test/spec_test/specperl ${MAPLE_ROOT}/testsuite/c_test/spec_test/specdiff -m -l 10 ${EXTRA_COMPARE} output.log data/test/${APP}" ) ] }
py
1a5b70ce2dab2cf9f1b964bf8cf17e7655ab1269
import torch import numpy as np from sklearn.metrics.pairwise import cosine_similarity def grad_cosine(grad_1, grad_2): cos = np.zeros(len(grad_1)) for i in range(len(grad_1)): cos_arr = grad_1[i] * grad_2[i] cos_arr /= np.sqrt(np.sum(grad_1[i] ** 2)) cos_arr /= np.sqrt(np.sum(grad_2[i] ** 2)) cos[i] = np.sum(cos_arr) return cos def grad_vs_optimal(grad_list, param_list): final_param = param_list[-1] cos = [] for i in range(len(param_list) - 1): param = param_list[i] grad = grad_list[i] ideal_direction = [param[j] - final_param[j] for j in range(len(param))] cos.append(grad_cosine(grad, ideal_direction)) return np.stack(cos) def plot_grad_flow(named_parameters): '''Plots the gradients flowing through different layers in the net during training. Can be used for checking for possible gradient vanishing / exploding problems. Usage: Plug this function in Trainer class after loss.backwards() as "plot_grad_flow(self.model.named_parameters())" to visualize the gradient flow''' ave_grads = [] max_grads= [] layers = [] for n, p in named_parameters: if(p.requires_grad) and ("bias" not in n): layers.append(n) ave_grads.append(p.grad.abs().mean()) max_grads.append(p.grad.abs().max()) plt.bar(np.arange(len(max_grads)), max_grads, alpha=0.1, lw=1, color="c") plt.bar(np.arange(len(max_grads)), ave_grads, alpha=0.1, lw=1, color="b") plt.hlines(0, 0, len(ave_grads)+1, lw=2, color="k" ) plt.xticks(range(0,len(ave_grads), 1), layers, rotation="vertical") plt.xlim(left=0, right=len(ave_grads)) plt.ylim(bottom = -0.001, top=0.02) # zoom in on the lower gradient regions plt.xlabel("Layers") plt.ylabel("average gradient") plt.title("Gradient flow") plt.grid(True) plt.legend([plt.Line2D([0], [0], color="c", lw=4), plt.Line2D([0], [0], color="b", lw=4), plt.Line2D([0], [0], color="k", lw=4)], ['max-gradient', 'mean-gradient', 'zero-gradient']) class GradAnalysis(object): def __init__(self, model): self.model = model self.names = [] self.params = [] self.grad = [] self.get_param() def get_param(self): self.params = [] for n, p in self.model.named_parameters(): if (p.requires_grad) and ("bias" not in n): self.names.append(n) self.params.append(p.data.clone().cpu().numpy()) return self.params def loss_grad(self, loss): # Backward and optimize loss.backward(retain_graph=True) self.grad = [ p.grad.clone().cpu().numpy() for n, p in self.model.named_parameters() if (p.requires_grad) and ("bias" not in n) ] return self.grad def clear_grad(self): for n, p in self.model.named_parameters(): if (p.requires_grad) and ("bias" not in n): p.grad.data.zero_()
py
1a5b72fdfa51ad5203eb677ff870dc5680d5985b
from properties.property import Property class Sound(Property): sound = 1 volume = 300 def __init__(self, sound, volume=300): self.sound = sound self.volume = volume def __str__(self): return '{0} {1}'.format(self.sound, self.volume)
py
1a5b74bbaf0e89ccd25b3e178ffefd9eb211b37a
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkdrds.endpoint import endpoint_data class DescribeRdsCommodityRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Drds', '2019-01-23', 'DescribeRdsCommodity','Drds') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_CommodityCode(self): return self.get_query_params().get('CommodityCode') def set_CommodityCode(self,CommodityCode): self.add_query_param('CommodityCode',CommodityCode) def get_DrdsInstanceId(self): return self.get_query_params().get('DrdsInstanceId') def set_DrdsInstanceId(self,DrdsInstanceId): self.add_query_param('DrdsInstanceId',DrdsInstanceId) def get_OrderType(self): return self.get_query_params().get('OrderType') def set_OrderType(self,OrderType): self.add_query_param('OrderType',OrderType)
py
1a5b750554a25c3382a7bdf2e04924c4f9e59eaf
#!/usr/bin/env python3 # Copyright (c) 2014-2021 The Garliccoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. ''' Script to generate list of seed nodes for chainparams.cpp. This script expects two text files in the directory that is passed as an argument: nodes_main.txt nodes_test.txt These files must consist of lines in the format <ip>:<port> [<ipv6>]:<port> <onion>.onion:<port> <i2p>.b32.i2p:<port> The output will be two data structures with the peers in binary format: static const uint8_t chainparams_seed_{main,test}[]={ ... } These should be pasted into `src/chainparamsseeds.h`. ''' from base64 import b32decode from enum import Enum import struct import sys import os import re class BIP155Network(Enum): IPV4 = 1 IPV6 = 2 TORV2 = 3 TORV3 = 4 I2P = 5 CJDNS = 6 def name_to_bip155(addr): '''Convert address string to BIP155 (networkID, addr) tuple.''' if addr.endswith('.onion'): vchAddr = b32decode(addr[0:-6], True) if len(vchAddr) == 10: return (BIP155Network.TORV2, vchAddr) elif len(vchAddr) == 35: assert(vchAddr[34] == 3) return (BIP155Network.TORV3, vchAddr[:32]) else: raise ValueError('Invalid onion %s' % vchAddr) elif addr.endswith('.b32.i2p'): vchAddr = b32decode(addr[0:-8] + '====', True) if len(vchAddr) == 32: return (BIP155Network.I2P, vchAddr) else: raise ValueError(f'Invalid I2P {vchAddr}') elif '.' in addr: # IPv4 return (BIP155Network.IPV4, bytes((int(x) for x in addr.split('.')))) elif ':' in addr: # IPv6 sub = [[], []] # prefix, suffix x = 0 addr = addr.split(':') for i,comp in enumerate(addr): if comp == '': if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end continue x += 1 # :: skips to suffix assert(x < 2) else: # two bytes per component val = int(comp, 16) sub[x].append(val >> 8) sub[x].append(val & 0xff) nullbytes = 16 - len(sub[0]) - len(sub[1]) assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0)) return (BIP155Network.IPV6, bytes(sub[0] + ([0] * nullbytes) + sub[1])) else: raise ValueError('Could not parse address %s' % addr) def parse_spec(s): '''Convert endpoint string to BIP155 (networkID, addr, port) tuple.''' match = re.match(r'\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s) if match: # ipv6 host = match.group(1) port = match.group(2) elif s.count(':') > 1: # ipv6, no port host = s port = '' else: (host,_,port) = s.partition(':') if not port: port = 0 else: port = int(port) host = name_to_bip155(host) return host + (port, ) def ser_compact_size(l): r = b"" if l < 253: r = struct.pack("B", l) elif l < 0x10000: r = struct.pack("<BH", 253, l) elif l < 0x100000000: r = struct.pack("<BI", 254, l) else: r = struct.pack("<BQ", 255, l) return r def bip155_serialize(spec): ''' Serialize (networkID, addr, port) tuple to BIP155 binary format. ''' r = b"" r += struct.pack('B', spec[0].value) r += ser_compact_size(len(spec[1])) r += spec[1] r += struct.pack('>H', spec[2]) return r def process_nodes(g, f, structname): g.write('static const uint8_t %s[] = {\n' % structname) for line in f: comment = line.find('#') if comment != -1: line = line[0:comment] line = line.strip() if not line: continue spec = parse_spec(line) blob = bip155_serialize(spec) hoststr = ','.join(('0x%02x' % b) for b in blob) g.write(f' {hoststr},\n') g.write('};\n') def main(): if len(sys.argv)<2: print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr) sys.exit(1) g = sys.stdout indir = sys.argv[1] g.write('#ifndef GARLICCOIN_CHAINPARAMSSEEDS_H\n') g.write('#define GARLICCOIN_CHAINPARAMSSEEDS_H\n') g.write('/**\n') g.write(' * List of fixed seed nodes for the garliccoin network\n') g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n') g.write(' *\n') g.write(' * Each line contains a BIP155 serialized (networkID, addr, port) tuple.\n') g.write(' */\n') with open(os.path.join(indir,'nodes_main.txt'), 'r', encoding="utf8") as f: process_nodes(g, f, 'chainparams_seed_main') g.write('\n') with open(os.path.join(indir,'nodes_test.txt'), 'r', encoding="utf8") as f: process_nodes(g, f, 'chainparams_seed_test') g.write('#endif // GARLICCOIN_CHAINPARAMSSEEDS_H\n') if __name__ == '__main__': main()
py
1a5b7553b10f2f168ae12c99b693d2c38cf665ca
import argparse import time import torch import torch.nn as nn from torch.utils import data import numpy as np import pickle import cv2 import torch.optim as optim import scipy.misc import torch.backends.cudnn as cudnn import sys import os from tqdm import tqdm import os.path as osp #from networks.gcnet import Res_Deeplab from dataset.datasets import CSDataSet #import matplotlib.pyplot as plt import random import timeit import logging from tensorboardX import SummaryWriter from utils.utils import decode_labels, inv_preprocess, decode_predictions from utils.criterion import CriterionCrossEntropy, CriterionOhemCrossEntropy, CriterionDSN, CriterionOhemDSN from utils.encoding import DataParallelModel, DataParallelCriterion from utils.utils import fromfile torch_ver = torch.__version__[:3] if torch_ver == '0.3': from torch.autograd import Variable start = timeit.default_timer() IMG_MEAN = np.array((104.00698793,116.66876762,122.67891434), dtype=np.float32) ''' BATCH_SIZE = 8 DATA_DIRECTORY = 'cityscapes' DATA_LIST_PATH = './dataset/list/cityscapes/train.lst' IGNORE_LABEL = 255 INPUT_SIZE = '769,769' LEARNING_RATE = 1e-2 MOMENTUM = 0.9 NUM_CLASSES = 19 NUM_STEPS = 60000 POWER = 0.9 RANDOM_SEED = 1234 RESTORE_FROM = './dataset/resnet101-imagenet.pth' SAVE_NUM_IMAGES = 2 SAVE_PRED_EVERY = 10000 SNAPSHOT_DIR = 'snapshots/' WEIGHT_DECAY = 0.0005 ''' def str2bool(v): if v.lower() in ('yes', 'true', 't', 'y', '1'): return True elif v.lower() in ('no', 'false', 'f', 'n', '0'): return False else: raise argparse.ArgumentTypeError('Boolean value expected.') def get_arguments(): """Parse all the arguments provided from the CLI. Returns: A list of parsed arguments. """ parser = argparse.ArgumentParser(description="DeepLab-ResNet Network") parser.add_argument("--data-dir", type=str, default=None, help="Path to the directory containing the PASCAL VOC dataset.") parser.add_argument("--is-training", action="store_true", help="Whether to updates the running means and variances during the training.") parser.add_argument("--not-restore-last", action="store_true", help="Whether to not restore last (FC) layers.") parser.add_argument("--start-iters", type=int, default=0, help="Number of classes to predict (including background).") parser.add_argument("--random-mirror", action="store_true", help="Whether to randomly mirror the inputs during the training.") parser.add_argument("--random-scale", action="store_true", help="Whether to randomly scale the inputs during the training.") parser.add_argument("--restore-from", type=str, default=None, help="Where restore model parameters from.") parser.add_argument("--gpu", type=str, default='None', help="choose gpu device.") parser.add_argument("--recurrence", type=int, default=1, help="choose the number of recurrence.") parser.add_argument("--ft", type=bool, default=False, help="fine-tune the model with large input size.") parser.add_argument('--config', help='train config file path') parser.add_argument("--ohem", type=str2bool, default='False', help="use hard negative mining") parser.add_argument("--ohem-thres", type=float, default=0.6, help="choose the samples with correct probability underthe threshold.") parser.add_argument("--ohem-keep", type=int, default=200000, help="choose the samples with correct probability underthe threshold.") parser.add_argument("--use-zip", type=str2bool, default='True', help="use zipfile as dataset") return parser.parse_args() args = get_arguments() cfg=fromfile(args.config) if cfg.model.type == 'basenet': from networks.basenet import Res_Deeplab def lr_poly(base_lr, iter, max_iter, power): return base_lr*((1-float(iter)/max_iter)**(power)) def adjust_learning_rate(optimizer, i_iter): """Sets the learning rate to the initial LR divided by 5 at 60th, 120th and 160th epochs""" lr = lr_poly(cfg.train_cfg.learning_rate, i_iter, cfg.train_cfg.num_steps, cfg.train_cfg.power) optimizer.param_groups[0]['lr'] = lr return lr def set_bn_eval(m): classname = m.__class__.__name__ if classname.find('BatchNorm') != -1: m.eval() def set_bn_momentum(m): classname = m.__class__.__name__ if classname.find('BatchNorm') != -1 or classname.find('InPlaceABN') != -1: m.momentum = 0.0003 def main(): """Create the model and start the training.""" writer = SummaryWriter(cfg.train_cfg.snapshot_dir) if args.gpu is not None: os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu if args.data_dir is not None: cfg.data_cfg.data_dir = args.data_dir if args.restore_from is not None: cfg.train_cfg.restore_from = args.restore_from if args.start_iters is not None: cfg.train_cfg.start_iters = args.start_iters h, w = map(int, cfg.data_cfg.input_size.split(',')) input_size = (h, w) cudnn.enabled = True # Create network. deeplab = Res_Deeplab(cfg.model,cfg.data_cfg.num_classes) print(deeplab) if cfg.train_cfg.start_iters == 0: saved_state_dict = torch.load(cfg.train_cfg.restore_from) new_params = deeplab.state_dict().copy() for i in saved_state_dict: i_parts = i.split('.') if not i_parts[0] == 'fc': new_params['.'.join(i_parts[0:])] = saved_state_dict[i] deeplab.load_state_dict(new_params) print('new params loaded') else: saved_state_dict = torch.load(cfg.train_cfg.restore_from) deeplab.load_state_dict(saved_state_dict) model = DataParallelModel(deeplab) model.train() model.float() # model.apply(set_bn_momentum) model.cuda() if args.ohem: criterion = CriterionOhemDSN(thresh=args.ohem_thres, min_kept=args.ohem_keep) else: criterion = CriterionDSN() criterion = DataParallelCriterion(criterion) criterion.cuda() cudnn.benchmark = True if not os.path.exists(cfg.train_cfg.snapshot_dir): os.makedirs(cfg.train_cfg.snapshot_dir) trainloader = data.DataLoader(CSDataSet(cfg.data_cfg.data_dir, cfg.data_cfg.data_list, max_iters=cfg.train_cfg.num_steps*cfg.train_cfg.batch_size, crop_size=input_size,scale=args.random_scale, mirror=args.random_mirror, mean=IMG_MEAN, use_zip=args.use_zip), batch_size=cfg.train_cfg.batch_size, shuffle=True, num_workers=4, pin_memory=True) def get_params(tmp_model): lr_wd_group = [] lr_nowd_group = [] for name, p in tmp_model.named_parameters(): if p.requires_grad: if p.__dict__.get('wd', -1) == 0: lr_nowd_group.append(p) print(name) else: lr_wd_group.append(p) return [dict(params=lr_wd_group), dict(params=lr_nowd_group, weight_decay=0.0)] optimizer = optim.SGD(get_params(deeplab), lr=cfg.train_cfg.learning_rate, momentum=cfg.train_cfg.momentum,weight_decay=cfg.train_cfg.weight_decay) optimizer.zero_grad() for i_iter, batch in enumerate(trainloader): i_iter += cfg.train_cfg.start_iters images, labels, _, _ = batch images = images.cuda() labels = labels.long().cuda() if torch_ver == "0.3": images = Variable(images) labels = Variable(labels) optimizer.zero_grad() lr = adjust_learning_rate(optimizer, i_iter) preds = model(images) loss = criterion(preds, labels) loss.backward() optimizer.step() if i_iter % 100 == 0: writer.add_scalar('learning_rate', lr, i_iter) writer.add_scalar('loss', loss.data.cpu().numpy(), i_iter) if 'nowd' in cfg.model.module.type and cfg.model.module.get('with_nl', True): writer.add_scalar('convkey_mean', model.module.head.ctb.conv_key.weight.mean(), i_iter) writer.add_scalar('convkey_std', model.module.head.ctb.conv_key.weight.var().sqrt(), i_iter) writer.add_scalar('convkey_max', model.module.head.ctb.conv_key.weight.abs().max(), i_iter) writer.add_scalar('convquery_std', model.module.head.ctb.conv_query.weight.var().sqrt(), i_iter) writer.add_scalar('convquery_mean', model.module.head.ctb.conv_query.weight.mean(), i_iter) writer.add_scalar('convquery_max', model.module.head.ctb.conv_query.weight.abs().max(), i_iter) # if i_iter % 5000 == 0: # images_inv = inv_preprocess(images, args.save_num_images, IMG_MEAN) # labels_colors = decode_labels(labels, args.save_num_images, args.num_classes) # if isinstance(preds, list): # preds = preds[0] # preds_colors = decode_predictions(preds, args.save_num_images, args.num_classes) # for index, (img, lab) in enumerate(zip(images_inv, labels_colors)): # writer.add_image('Images/'+str(index), img, i_iter) # writer.add_image('Labels/'+str(index), lab, i_iter) # writer.add_image('preds/'+str(index), preds_colors[index], i_iter) print('Time {}, iter = {} of {} completed, loss = {}'.format(time.strftime("%Y-%m-%d %H:%M:%S"), i_iter, cfg.train_cfg.num_steps, loss.data.cpu().numpy())) if 'nowd' in cfg.model.module.type and cfg.model.module.get('with_nl', True): print('convkey: mean {}, std {}, absmax {}'.format( model.module.head.ctb.conv_key.weight.mean(), model.module.head.ctb.conv_key.weight.var().sqrt(), model.module.head.ctb.conv_key.weight.abs().max())) print('convquery: mean {}, std {}, absmax {}'.format( model.module.head.ctb.conv_query.weight.mean(), model.module.head.ctb.conv_query.weight.var().sqrt(), model.module.head.ctb.conv_query.weight.abs().max())) if i_iter >= cfg.train_cfg.num_steps-1: print('save model ...') torch.save(deeplab.state_dict(),osp.join(cfg.train_cfg.snapshot_dir, 'CS_scenes_'+str(cfg.train_cfg.num_steps)+'.pth')) break if i_iter % cfg.train_cfg.save_pred_every == 0 and i_iter >= cfg.train_cfg.save_from-1: print('taking snapshot ...') torch.save(deeplab.state_dict(),osp.join(cfg.train_cfg.snapshot_dir, 'CS_scenes_'+str(i_iter)+'.pth')) end = timeit.default_timer() print(end-start,'seconds') if __name__ == '__main__': main()
py
1a5b755b45141c492058ef913c853da198949514
""" Try all efforts to minimize the distribution size of Depsland, then extract archived files on client side. WIP: This module is not stable to use. """ import os import shutil import subprocess import sys sys.path.append(os.path.abspath(f'{__file__}/../..')) # noinspection PyUnresolvedReferences from minimal_setup.index import ResourcesIndex # noqa python_exe = sys.executable res_idx = ... def main(): global res_idx res_idx = _indexing_resources() _extract() _setup_venv_packages() _clean() def _indexing_resources(): res_idx = ResourcesIndex() return res_idx def _extract(): def _extract(file_i: str, dir_o): if file_i.endswith(('.tar.gz', '.tar')): import tarfile file_handle = tarfile.open(file_i) else: from zipfile import ZipFile file_handle = ZipFile(file_i) file_handle.extractall(dir_o) return dir_o _extract(res_idx.assets_zip, res_idx.assets) _extract(res_idx.venv_packages_zip, res_idx.venv_packages_unzip) def _setup_venv_packages(): # note: assert pip and setuptools already exist send_cmd(f'{python_exe} -m pip install -r {res_idx.requirements} ' f'--no-index -f {res_idx.venv_packages_unzip}') def _clean(): for i in ( res_idx.assets_zip, res_idx.temp, res_idx.venv_packages_zip, ): if os.path.exists(i): if os.path.isfile(i): os.remove(i) else: shutil.rmtree(i) # ----------------------------------------------------------------------------- def copy_dirs(dir_i, dir_o): for n in os.listdir(dir_i): i = f'{dir_i}/{n}' o = f'{dir_o}/{n}' shutil.copytree(i, o) def send_cmd(cmd: str) -> str: try: ret = subprocess.run( cmd, shell=True, check=True, capture_output=True ) out = ret.stdout.decode(encoding='utf-8').replace('\r\n', '\n') except subprocess.CalledProcessError as e: out = e.stderr.decode(encoding='utf-8') raise Exception(out) return out
py
1a5b756de8b326a15f8971f97cfac76e92ad59c6
# coding: utf-8 from typing import List from domain.entities import Link class ImportLinksRequest: def __init__(self, send_results: bool = False) -> None: self.send_results = send_results class AddLinksRequest: def __init__(self, links: List[Link]) -> None: self.links = links class FindByTagRequest: def __init__(self, tag: str) -> None: self.tag = tag
py
1a5b760d6125f4c394138f6962edf21d665a2519
# -*- coding: utf-8 -*- ''' © 2012-2013 eBay Software Foundation Authored by: Tim Keefer Licensed under CDDL 1.0 ''' import os import sys import gevent from optparse import OptionParser sys.path.insert(0, '%s/../' % os.path.dirname(__file__)) from common import dump from ebaysdk.finding import Connection as finding from ebaysdk.http import Connection as html from ebaysdk.exception import ConnectionError def init_options(): usage = "usage: %prog [options]" parser = OptionParser(usage=usage) parser.add_option("-d", "--debug", action="store_true", dest="debug", default=False, help="Enabled debugging [default: %default]") parser.add_option("-y", "--yaml", dest="yaml", default='ebay.yaml', help="Specifies the name of the YAML defaults file. [default: %default]") parser.add_option("-a", "--appid", dest="appid", default=None, help="Specifies the eBay application id to use.") (opts, args) = parser.parse_args() return opts, args def run(opts): timeout = gevent.Timeout(4) timeout.start() try: calls = [] for page in range(1, 10): api = finding(debug=opts.debug, appid=opts.appid, config_file=opts.yaml) call = gevent.spawn(api.execute, 'findItemsAdvanced', {'keywords': 'python', 'paginationInput': {'pageNumber': page}}) calls.append(call) gevent.joinall(calls) try: call_results = [c.get() for c in calls] toprated = 0 for resp in call_results: for item in resp.reply.searchResult.item: if item.topRatedListing == 'true': toprated += 1 print("Top Rated Listings: %s" % toprated) except ConnectionError as e: print("%s" % e) except gevent.timeout.Timeout as e: print("Calls reached timeout threshold: %s" % e) finally: timeout.cancel() if __name__ == "__main__": (opts, args) = init_options() run(opts)
py
1a5b76bf2ebd069bc36ec107c61adaa99c4c180c
# coding: utf-8 from __future__ import unicode_literals from .common import InfoExtractor from ..utils import ( clean_html, get_element_by_class, js_to_json, ) class TVNoeIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?tvnoe\.cz/video/(?P<id>[0-9]+)' _TEST = { 'url': 'http://www.tvnoe.cz/video/10362', 'md5': 'aee983f279aab96ec45ab6e2abb3c2ca', 'info_dict': { 'id': '10362', 'ext': 'mp4', 'series': 'Noční univerzita', 'title': 'prof. Tomáš Halík, Th.D. - Návrat náboženství a střet civilizací', 'description': 'md5:f337bae384e1a531a52c55ebc50fff41', } } def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage(url, video_id) iframe_url = self._search_regex( r'<iframe[^>]+src="([^"]+)"', webpage, 'iframe URL') ifs_page = self._download_webpage(iframe_url, video_id) jwplayer_data = self._parse_json( self._find_jwplayer_data(ifs_page), video_id, transform_source=js_to_json) info_dict = self._parse_jwplayer_data( jwplayer_data, video_id, require_title=False, base_url=iframe_url) info_dict.update({ 'id': video_id, 'title': clean_html(get_element_by_class( 'field-name-field-podnazev', webpage)), 'description': clean_html(get_element_by_class( 'field-name-body', webpage)), 'series': clean_html(get_element_by_class('title', webpage)) }) return info_dict
py
1a5b76fa78466e7198302817e25f9e4a9d167b80
from collections import OrderedDict import logging LOG = logging.getLogger(__name__) class Dispatcher(object): def __init__(self, mount=None): self._endpoints = OrderedDict() self.mount = mount def add_endpoint(self, nickname, endpoint): if self.mount: endpoint = self.mount + endpoint self._endpoints[nickname] = (endpoint, None) def get_endpoint_path(self, req, nickname, **kwargs): path = '' if nickname in self._endpoints: path = self._endpoints[nickname][0] if '{tenant_id}' in path: tenant_id = req.env['tenant_id'] path = path.replace('{tenant_id}', tenant_id) for var, value in kwargs.items(): if '{%s}' % var in path: path = path.replace('{%s}' % var, str(value)) return path def get_endpoint_url(self, req, nickname, **kwargs): return (req.protocol + '://' + req.get_header('host') + req.app + self.get_endpoint_path(req, nickname, **kwargs)) def get_unused_endpoints(self): results = [] for nickname, endpoint in self._endpoints.items(): if not endpoint[1]: results.append(nickname) return results def set_handler(self, nickname, handler): if nickname not in self._endpoints: raise ValueError("Unsupported endpoint '%s' specified." % nickname) endpoint, _ = self._endpoints[nickname] self._endpoints[nickname] = (endpoint, handler) def get_routes(self): endpoints = [] for endpoint, h in self._endpoints.values(): if h: endpoints.append((endpoint, h)) return endpoints
py
1a5b776af9e9dac37f5c30aa409b8ec2cc90f7ae
from eth.beacon.types.blocks import BaseBeaconBlock class SerenityBeaconBlock(BaseBeaconBlock): pass
py
1a5b7897e3f7a34670f6f3dc7ed3a409c2cb7f14
import json from . import indexDbAPIs from . import redisAPIs class IndexDataRequestHandlers: def __init__(self): self.indexDbAPIs = indexDbAPIs.IndexDbAPIs() async def handler_indexSymbolList(self, request): ''' Returns the list of symbols in cash market segment /api/{marketType} ''' # FROM MongoDB SERVER #return await self.indexDbAPIs.getIndexSymbolList() # FROM REDIS SERVER data = redisAPIs.readDataFromRedis('INDEX_SYMBOLS') if data: return json.loads(redisAPIs.readDataFromRedis('INDEX_SYMBOLS')) else: #return json.loads({'ERROR' : 'Redis data needs to be built'}) #return ('ERROR: FNOIDX_SYMBOLS') return ([]) async def handler_indexMarketData(self, request): ''' Returns the details of a stock symbol /api/cash/data?symbol=Nifty 50&startdate=5-jul-2019&enddate=15-jul-2019 ''' symbol = request.rel_url.query.get('symbol') # Symbol is Case sensititve in this case startDate = request.rel_url.query.get('startdate') endDate = request.rel_url.query.get('enddate') result = await self.indexDbAPIs.getIndexMarketData(symbol, startDate, endDate) return result
py
1a5b795e7c78f01a1b4fa6cb8419650a7b221e4d
import logging from django.core.cache import cache logger = logging.getLogger(__name__) class BaseCacheHandler: timeout = None key = None def get(self, *args, **kwargs): key = self.get_key(*args, **kwargs) if key is None: raise ValueError('Redis key cannot be None') data = cache.get(key) return data def set(self, data, *args, **kwargs): key = self.get_key(*args, **kwargs) timeout = self.get_timeout(*args, **kwargs) cache.set(key, data, timeout=timeout) def get_key(self, *args, **kwargs): return self.key def get_timeout(self, *args, **kwargs): return self.timeout
py
1a5b7987b6f0fa137979a250078be8295298a38b
# Generated by Django 2.2.2 on 2019-07-04 08:51 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('account', '0004_auto_20190703_1625'), ] operations = [ migrations.RemoveField( model_name='userprofile', name='usertype', ), migrations.AddField( model_name='userprofile', name='role', field=models.CharField(choices=[('SUPERADMIN', '超级管理员'), ('STATIONADMIN', '检查点管理员'), ('CLIENT', '普通用户')], default='CLIENT', max_length=16, verbose_name='用户角色'), ), migrations.AddField( model_name='userprofile', name='user_id', field=models.CharField(blank=True, max_length=20, verbose_name='身份证号码'), ), migrations.AlterField( model_name='userprofile', name='avatar', field=models.ImageField(default='/media/avatar/head.gif', upload_to='avatar', verbose_name='头像'), ), ]
py
1a5b79b0804d08ba33f6385b7d82bf9e5b692321
#!/usr/bin/python from __future__ import print_function from bcc import BPF import re, signal, sys from time import sleep # for influxdb from influxdb import InfluxDBClient import lmp_influxdb as db from db_modules import write2db from datetime import datetime DBNAME = 'lmp' client = db.connect(DBNAME,user='root',passwd=123456) # load BPF program b = BPF(text=""" #include <uapi/linux/ptrace.h> #include <linux/blkdev.h> struct val_t { u32 pid; char name[TASK_COMM_LEN]; u64 ts; }; struct data_t { u32 pid; u64 rwflag; u64 delta; u64 sector; u64 len; u64 ts; char disk_name[DISK_NAME_LEN]; char name[TASK_COMM_LEN]; }; BPF_HASH(infobyreq, struct request *, struct val_t); BPF_PERF_OUTPUT(events); // cache PID and comm by-req int trace_pid_start(struct pt_regs *ctx, struct request *req) { struct val_t val = {}; if (bpf_get_current_comm(&val.name, sizeof(val.name)) == 0) { val.pid = bpf_get_current_pid_tgid(); val.ts = bpf_ktime_get_ns(); infobyreq.update(&req, &val); } return 0; } // output int trace_req_completion(struct pt_regs *ctx, struct request *req) { u64 delta; u32 *pidp = 0; struct val_t *valp; struct data_t data = {}; u64 ts; // fetch timestamp and calculate delta ts = bpf_ktime_get_ns(); //if(data.delta < 1000000){ // return 0; //} valp = infobyreq.lookup(&req); //data.delta = ts - valp->ts; data.ts = ts/1000; if (valp == 0) { data.len = req->__data_len; strcpy(data.name, "?"); } else { data.delta = ts - valp->ts; data.pid = valp->pid; data.len = req->__data_len; data.sector = req->__sector; bpf_probe_read(&data.name, sizeof(data.name), valp->name); struct gendisk *rq_disk = req->rq_disk; bpf_probe_read(&data.disk_name, sizeof(data.disk_name), rq_disk->disk_name); } #ifdef REQ_WRITE data.rwflag = !!(req->cmd_flags & REQ_WRITE); #elif defined(REQ_OP_SHIFT) data.rwflag = !!((req->cmd_flags >> REQ_OP_SHIFT) == REQ_OP_WRITE); #else data.rwflag = !!((req->cmd_flags & REQ_OP_MASK) == REQ_OP_WRITE); #endif events.perf_submit(ctx, &data, sizeof(data)); infobyreq.delete(&req); return 0; } """, debug=0) # data structure from template class lmp_data(object): def __init__(self,a,b,c,d,e,f,g,h): self.time = a self.glob = b self.comm = c self.pid = d self.disk = e self.t = f self.bytes = g self.lat = h data_struct = {"measurement":'HardDiskReadWriteTime', "time":[], "tags":['glob','comm','pid',], "fields":['disk','t','bytes','lat']} if BPF.get_kprobe_functions(b'blk_start_request'): b.attach_kprobe(event="blk_start_request", fn_name="trace_pid_start") b.attach_kprobe(event="blk_mq_start_request", fn_name="trace_pid_start") b.attach_kprobe(event="blk_account_io_completion", fn_name="trace_req_completion") TASK_COMM_LEN = 16 # linux/sched.h DISK_NAME_LEN = 32 # linux/genhd.h # header # print("%-14s %-14s %-6s %-7s %-2s %-22s %-10s %7s " % ("TIME(s)", "COMM", "PID", # "DISK", "T", "SECTOR", "BYTES", "LAT(ms)")) rwflg = "" start_ts = 0 prev_ts = 0 delta = 0 # process event def print_event(cpu, data, size): event = b["events"].event(data) val = -1 global start_ts global prev_ts global delta if event.rwflag == 1: rwflg = "W" if event.rwflag == 0: rwflg = "R" if not re.match(b'\?', event.name): val = event.sector if start_ts == 0: prev_ts = start_ts if start_ts == 1: delta = float(delta) + (event.ts - prev_ts) # print("%-14.9f %-14.14s %-6s %-7s %-2s %-22s %-7s %7.2f " % ( # delta / 1000000, event.name.decode('utf-8', 'replace'), event.pid, # event.disk_name.decode('utf-8', 'replace'), rwflg, val, # event.len, float(event.delta) / 1000000)) test_data = lmp_data(datetime.now().isoformat(),'glob', event.name.decode('utf-8', 'replace'), event.pid, event.disk_name.decode('utf-8', 'replace'), rwflg, event.len, float(event.delta) / 1000000) # print(event.pid, time) write2db(data_struct, test_data, client) prev_ts = event.ts start_ts = 1 def quit(signum, frame): sys.exit() # loop with callback to print_event b["events"].open_perf_buffer(print_event, page_cnt=64) while 1: try: sleep(1) signal.signal(signal.SIGINT, quit) signal.signal(signal.SIGTERM, quit) b.perf_buffer_poll() print() except Exception as exc: print(exc) # except KeyboardInterrupt: # db.close() # exit()
py
1a5b79be37eecf0b2851fb644a452234df8e4bde
# -*- coding: utf-8 -*- ''' Functions for querying and modifying a user account and the groups to which it belongs. ''' from __future__ import absolute_import # Import Python libs import ctypes import getpass import logging import os import sys # Import Salt libs import salt.utils.path import salt.utils.platform from salt.exceptions import CommandExecutionError from salt.utils.decorators.jinja import jinja_filter # Import 3rd-party libs from salt.ext import six # Conditional imports try: import pwd HAS_PWD = True except ImportError: HAS_PWD = False try: import grp HAS_GRP = True except ImportError: HAS_GRP = False try: import pysss HAS_PYSSS = True except ImportError: HAS_PYSSS = False try: import salt.utils.win_functions HAS_WIN_FUNCTIONS = True except ImportError: HAS_WIN_FUNCTIONS = False log = logging.getLogger(__name__) def get_user(): ''' Get the current user ''' if HAS_PWD: return pwd.getpwuid(os.geteuid()).pw_name elif HAS_WIN_FUNCTIONS and salt.utils.win_functions.HAS_WIN32: return salt.utils.win_functions.get_current_user() else: raise CommandExecutionError( 'Required external library (pwd or win32api) not installed') @jinja_filter('get_uid') def get_uid(user=None): ''' Get the uid for a given user name. If no user given, the current euid will be returned. If the user does not exist, None will be returned. On systems which do not support pwd or os.geteuid, None will be returned. ''' if not HAS_PWD: return None elif user is None: try: return os.geteuid() except AttributeError: return None else: try: return pwd.getpwnam(user).pw_uid except KeyError: return None def _win_user_token_is_admin(user_token): ''' Using the win32 api, determine if the user with token 'user_token' has administrator rights. See MSDN entry here: http://msdn.microsoft.com/en-us/library/aa376389(VS.85).aspx ''' class SID_IDENTIFIER_AUTHORITY(ctypes.Structure): _fields_ = [ ("byte0", ctypes.c_byte), ("byte1", ctypes.c_byte), ("byte2", ctypes.c_byte), ("byte3", ctypes.c_byte), ("byte4", ctypes.c_byte), ("byte5", ctypes.c_byte), ] nt_authority = SID_IDENTIFIER_AUTHORITY() nt_authority.byte5 = 5 SECURITY_BUILTIN_DOMAIN_RID = 0x20 DOMAIN_ALIAS_RID_ADMINS = 0x220 administrators_group = ctypes.c_void_p() if ctypes.windll.advapi32.AllocateAndInitializeSid( ctypes.byref(nt_authority), 2, SECURITY_BUILTIN_DOMAIN_RID, DOMAIN_ALIAS_RID_ADMINS, 0, 0, 0, 0, 0, 0, ctypes.byref(administrators_group)) == 0: raise Exception("AllocateAndInitializeSid failed") try: is_admin = ctypes.wintypes.BOOL() if ctypes.windll.advapi32.CheckTokenMembership( user_token, administrators_group, ctypes.byref(is_admin)) == 0: raise Exception("CheckTokenMembership failed") return is_admin.value != 0 finally: ctypes.windll.advapi32.FreeSid(administrators_group) def _win_current_user_is_admin(): ''' ctypes.windll.shell32.IsUserAnAdmin() is intentionally avoided due to this function being deprecated. ''' return _win_user_token_is_admin(0) def get_specific_user(): ''' Get a user name for publishing. If you find the user is "root" attempt to be more specific ''' user = get_user() if salt.utils.platform.is_windows(): if _win_current_user_is_admin(): return 'sudo_{0}'.format(user) else: env_vars = ('SUDO_USER',) if user == 'root': for evar in env_vars: if evar in os.environ: return 'sudo_{0}'.format(os.environ[evar]) return user def chugid(runas, group=None): ''' Change the current process to belong to the specified user (and the groups to which it belongs) ''' uinfo = pwd.getpwnam(runas) supgroups = [] supgroups_seen = set() if group: try: target_pw_gid = grp.getgrnam(group).gr_gid except KeyError as err: raise CommandExecutionError( 'Failed to fetch the GID for {0}. Error: {1}'.format( group, err ) ) else: target_pw_gid = uinfo.pw_gid # The line below used to exclude the current user's primary gid. # However, when root belongs to more than one group # this causes root's primary group of '0' to be dropped from # his grouplist. On FreeBSD, at least, this makes some # command executions fail with 'access denied'. # # The Python documentation says that os.setgroups sets only # the supplemental groups for a running process. On FreeBSD # this does not appear to be strictly true. group_list = get_group_dict(runas, include_default=True) if sys.platform == 'darwin': group_list = dict((k, v) for k, v in six.iteritems(group_list) if not k.startswith('_')) for group_name in group_list: gid = group_list[group_name] if (gid not in supgroups_seen and not supgroups_seen.add(gid)): supgroups.append(gid) if os.getgid() != target_pw_gid: try: os.setgid(target_pw_gid) except OSError as err: raise CommandExecutionError( 'Failed to change from gid {0} to {1}. Error: {2}'.format( os.getgid(), target_pw_gid, err ) ) # Set supplemental groups if sorted(os.getgroups()) != sorted(supgroups): try: os.setgroups(supgroups) except OSError as err: raise CommandExecutionError( 'Failed to set supplemental groups to {0}. Error: {1}'.format( supgroups, err ) ) if os.getuid() != uinfo.pw_uid: try: os.setuid(uinfo.pw_uid) except OSError as err: raise CommandExecutionError( 'Failed to change from uid {0} to {1}. Error: {2}'.format( os.getuid(), uinfo.pw_uid, err ) ) def chugid_and_umask(runas, umask, group=None): ''' Helper method for for subprocess.Popen to initialise uid/gid and umask for the new process. ''' set_runas = False set_grp = False current_user = getpass.getuser() if runas and runas != current_user: set_runas = True runas_user = runas else: runas_user = current_user current_grp = grp.getgrgid(pwd.getpwnam(getpass.getuser()).pw_gid).gr_name if group and group != current_grp: set_grp = True runas_grp = group else: runas_grp = current_grp if set_runas or set_grp: chugid(runas_user, runas_grp) if umask is not None: os.umask(umask) def get_default_group(user): ''' Returns the specified user's default group. If the user doesn't exist, a KeyError will be raised. ''' return grp.getgrgid(pwd.getpwnam(user).pw_gid).gr_name \ if HAS_GRP and HAS_PWD \ else None def get_group_list(user, include_default=True): ''' Returns a list of all of the system group names of which the user is a member. ''' if HAS_GRP is False or HAS_PWD is False: return [] group_names = None ugroups = set() if hasattr(os, 'getgrouplist'): # Try os.getgrouplist, available in python >= 3.3 log.trace('Trying os.getgrouplist for \'%s\'', user) try: group_names = [ grp.getgrgid(grpid).gr_name for grpid in os.getgrouplist(user, pwd.getpwnam(user).pw_gid) ] except Exception: pass elif HAS_PYSSS: # Try pysss.getgrouplist log.trace('Trying pysss.getgrouplist for \'%s\'', user) try: group_names = list(pysss.getgrouplist(user)) except Exception: pass if group_names is None: # Fall back to generic code # Include the user's default group to match behavior of # os.getgrouplist() and pysss.getgrouplist() log.trace('Trying generic group list for \'%s\'', user) group_names = [g.gr_name for g in grp.getgrall() if user in g.gr_mem] try: default_group = get_default_group(user) if default_group not in group_names: group_names.append(default_group) except KeyError: # If for some reason the user does not have a default group pass if group_names is not None: ugroups.update(group_names) if include_default is False: # Historically, saltstack code for getting group lists did not # include the default group. Some things may only want # supplemental groups, so include_default=False omits the users # default group. try: default_group = grp.getgrgid(pwd.getpwnam(user).pw_gid).gr_name ugroups.remove(default_group) except KeyError: # If for some reason the user does not have a default group pass log.trace('Group list for user \'%s\': %s', user, sorted(ugroups)) return sorted(ugroups) def get_group_dict(user=None, include_default=True): ''' Returns a dict of all of the system groups as keys, and group ids as values, of which the user is a member. E.g.: {'staff': 501, 'sudo': 27} ''' if HAS_GRP is False or HAS_PWD is False: return {} group_dict = {} group_names = get_group_list(user, include_default=include_default) for group in group_names: group_dict.update({group: grp.getgrnam(group).gr_gid}) return group_dict def get_gid_list(user, include_default=True): ''' Returns a list of all of the system group IDs of which the user is a member. ''' if HAS_GRP is False or HAS_PWD is False: return [] gid_list = list( six.itervalues( get_group_dict(user, include_default=include_default) ) ) return sorted(set(gid_list)) def get_gid(group=None): ''' Get the gid for a given group name. If no group given, the current egid will be returned. If the group does not exist, None will be returned. On systems which do not support grp or os.getegid it will return None. ''' if not HAS_GRP: return None if group is None: try: return os.getegid() except AttributeError: return None else: try: return grp.getgrnam(group).gr_gid except KeyError: return None
py
1a5b7a56e8e80185f8a1df3bc62e89a9bbea16ac
"""Sensor for Last.fm account status.""" import hashlib import logging import re import pylast as lastfm from pylast import WSError import voluptuous as vol from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ATTR_ATTRIBUTION, CONF_API_KEY import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity _LOGGER = logging.getLogger(__name__) ATTR_LAST_PLAYED = "last_played" ATTR_PLAY_COUNT = "play_count" ATTR_TOP_PLAYED = "top_played" ATTRIBUTION = "Data provided by Last.fm" CONF_USERS = "users" ICON = "mdi:lastfm" PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_API_KEY): cv.string, vol.Required(CONF_USERS, default=[]): vol.All(cv.ensure_list, [cv.string]), } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Last.fm sensor platform.""" api_key = config[CONF_API_KEY] users = config.get(CONF_USERS) lastfm_api = lastfm.LastFMNetwork(api_key=api_key) entities = [] for username in users: try: lastfm_api.get_user(username).get_image() entities.append(LastfmSensor(username, lastfm_api)) except WSError as error: _LOGGER.error(error) return add_entities(entities, True) class LastfmSensor(Entity): """A class for the Last.fm account.""" def __init__(self, user, lastfm_api): """Initialize the sensor.""" self._unique_id = hashlib.sha256(user.encode("utf-8")).hexdigest() self._user = lastfm_api.get_user(user) self._name = user self._lastfm = lastfm_api self._state = "Not Scrobbling" self._playcount = None self._lastplayed = None self._topplayed = None self._cover = None @property def unique_id(self): """Return the unique ID of the sensor.""" return self._unique_id @property def name(self): """Return the name of the sensor.""" return self._name @property def state(self): """Return the state of the sensor.""" return self._state def update(self): """Update device state.""" self._cover = self._user.get_image() self._playcount = self._user.get_playcount() last = self._user.get_recent_tracks(limit=2)[0] self._lastplayed = f"{last.track.artist} - {last.track.title}" top = self._user.get_top_tracks(limit=1)[0] toptitle = re.search("', '(.+?)',", str(top)) topartist = re.search("'(.+?)',", str(top)) self._topplayed = "{} - {}".format(topartist.group(1), toptitle.group(1)) if self._user.get_now_playing() is None: self._state = "Not Scrobbling" return now = self._user.get_now_playing() self._state = f"{now.artist} - {now.title}" @property def device_state_attributes(self): """Return the state attributes.""" return { ATTR_ATTRIBUTION: ATTRIBUTION, ATTR_LAST_PLAYED: self._lastplayed, ATTR_PLAY_COUNT: self._playcount, ATTR_TOP_PLAYED: self._topplayed, } @property def entity_picture(self): """Avatar of the user.""" return self._cover @property def icon(self): """Return the icon to use in the frontend.""" return ICON
py
1a5b7b8c5c86a41b42a911e7f1f46c7293a9a078
import json import io def create_snippet(file_path, first_n=5): with open(file_path, 'r') as f: return [next(f) for _ in range(first_n)] def create_jtr_snippet(file_path): return convert_simplequestions(file_path, first_n=5) def convert_simplequestions(file_path, first_n=None): instances = [] f = io.open(file_path, "r") i = 0 for l in f: i += 1 if first_n and i > first_n: break subj, rel, obj, qu = l.strip().split("\t") support = [" ".join([subj, rel])] qdict = { 'question': qu, 'answers': [obj] } qset_dict = { 'support': [{'text': supp} for supp in support], 'questions': [qdict] } instances.append(qset_dict) corpus_dict = { 'meta': "simpleQuestions.json", 'instances': instances } f.close() return corpus_dict def main(): # some tests: # raw_data = load_cbt_file(path=None, part='valid', mode='NE') # instances = split_cbt(raw_data) # = parse_cbt_example(instances[0]) import sys if len(sys.argv) == 3: # corpus = create_jtr_snippet(sys.argv[1]) # out = create_snippet(sys.argv[1]) # with open(sys.argv[2], 'w') as outfile: # outfile.writelines(out) corpus = convert_simplequestions(sys.argv[1]) with open(sys.argv[2], 'w') as outfile: json.dump(corpus, outfile, indent=2) else: print("Usage: python3 simpleQuestions2jtr.py path/to/simpleQuestions save/to/simpleQuestions.jtr.json") if __name__ == "__main__": main()
py
1a5b7bb2d65d85b562a232ac139f1e62a5e04a2a
# Copyright 2019 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """Add Secret Consumers table Revision ID: 0f8c192a061f Revises: 39cf2e645cba Create Date: 2019-08-19 12:03:08.567230 """ # revision identifiers, used by Alembic. revision = "0f8c192a061f" down_revision = "39cf2e645cba" from alembic import op import sqlalchemy as sa def upgrade(): ctx = op.get_context() con = op.get_bind() table_exists = ctx.dialect.has_table(con.engine, "secret_consumer_metadata") if not table_exists: op.create_table( "secret_consumer_metadata", # ModelBase sa.Column("id", sa.String(length=36), nullable=False), sa.Column("created_at", sa.DateTime(), nullable=False), sa.Column("updated_at", sa.DateTime(), nullable=False), sa.Column("deleted_at", sa.DateTime(), nullable=True), sa.Column("deleted", sa.Boolean(), nullable=False), sa.Column("status", sa.String(length=20), nullable=False), # SecretConsumerMetadatum sa.Column("secret_id", sa.String(36), nullable=False), sa.Column("project_id", sa.String(36), nullable=False), sa.Column("service", sa.String(255), nullable=False), sa.Column("resource_type", sa.String(255), nullable=False), sa.Column("resource_id", sa.String(36), nullable=False), # Constraints and Indexes sa.PrimaryKeyConstraint("id"), sa.ForeignKeyConstraint(["secret_id"], ["secrets.id"]), sa.UniqueConstraint( "secret_id", "resource_id", name="_secret_consumer_resource_uc" ), sa.Index("ix_secret_consumer_metadata_secret_id", "secret_id"), sa.Index("ix_secret_consumer_metadata_resource_id", "resource_id"), )
py
1a5b7c2a890fbc5d8146807812d8b27f8d78a729
#add parent dir to find package. Only needed for source code build, pip install doesn't need it. import os, inspect currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) parentdir = os.path.dirname(os.path.dirname(currentdir)) os.sys.path.insert(0,parentdir) import gym from pybullet_envs.bullet.kukaGymEnv import KukaGymEnv from baselines import deepq def main(): env = KukaGymEnv(renders=True, isDiscrete=True) act = deepq.load("kuka_model.pkl") print(act) while True: obs, done = env.reset(), False print("===================================") print("obs") print(obs) episode_rew = 0 while not done: env.render() obs, rew, done, _ = env.step(act(obs[None])[0]) episode_rew += rew print("Episode reward", episode_rew) if __name__ == '__main__': main()
py
1a5b7deec71b725331b784f0d89f81feb0237be3
from dataclasses import dataclass from typing import FrozenSet, Callable, List import heapq from contextlib import contextmanager from time import time @contextmanager def timing(description: str) -> None: start = time() yield elapsed_time = (time() - start) * 1000 print(f"{description}: {elapsed_time}ms") @dataclass class Action: name: str precondition: FrozenSet[str] positive_effect: FrozenSet[str] negative_effect: FrozenSet[str] @dataclass class Problem: actions: List[Action] init: FrozenSet[str] goal: FrozenSet[str] class Solver(): def __init__(self, heuristic: Callable[[FrozenSet[str]], float] = None): self.heuristic = heuristic if heuristic is None: self.heuristic = lambda x: 0 def solve(self, problem: Problem) -> List[str]: open_list = [(self.heuristic(set(problem.init)), problem.init, [])] closed_list = set() while open_list: _, current, path = heapq.heappop(open_list) if current not in closed_list: closed_list.add(current) if problem.goal.issubset(current): return path for act in problem.actions: if act.precondition.issubset(current): child = current.difference(act.negative_effect).union(act.positive_effect) if child not in closed_list: child_f = len(path) + 1 + self.heuristic(set(child)) heapq.heappush(open_list, (child_f, child, path+[act.name])) def generate_problem(size: int) -> Problem: actions = [Action('mk_y', frozenset(['x']), frozenset(['y']), frozenset(['x'])), Action('reset_x', frozenset([]), frozenset(['x']), frozenset([]))] goal = [] for i in range(size): name = f"v{i}" goal.append(name) actions.append(Action(f'mk_{name}', frozenset(['y']), frozenset([name]), frozenset(['y'])),) init = frozenset(['x']) return Problem(actions, init, frozenset(goal)) def main(): size = 15 problem = generate_problem(size) def heuristic(state: FrozenSet[str]) -> float: return size - len(state & problem.goal) with timing("Without Heuristic"): solver = Solver(heuristic=None) plan = solver.solve(problem) print(plan) with timing("With Heuristic"): solver = Solver(heuristic=heuristic) plan = solver.solve(problem) print(plan) if __name__ == '__main__': main()
py
1a5b7ef70d1f0f1437617ca4ab9dc8daa6a7b79a
# Generated by Django 3.1.6 on 2021-04-29 02:35 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('crud', '0002_auto_20210428_2309'), ] operations = [ migrations.RenameModel( old_name='Pet', new_name='Animals', ), ]
py
1a5b7f186751c78a37c62b3aa778827083eadc95
# -*- coding: utf-8 -*- import base64 import datetime import hashlib import io import uuid from lxml import etree, builder DS = builder.ElementMaker( namespace="http://www.w3.org/2000/09/xmldsig#", nsmap={ "ds": "http://www.w3.org/2000/09/xmldsig#", }, ) CanonicalizationMethod = DS.CanonicalizationMethod DigestMethod = DS.DigestMethod DigestValue = DS.DigestValue KeyInfo = DS.KeyInfo Object = DS.Object Reference = DS.Reference Signature = DS.Signature SignatureMethod = DS.SignatureMethod SignatureValue = DS.SignatureValue SignedInfo = DS.SignedInfo Transform = DS.Transform Transforms = DS.Transforms X509Certificate = DS.X509Certificate X509Data = DS.X509Data X509IssuerName = DS.X509IssuerName X509SerialNumber = DS.X509SerialNumber XADES = builder.ElementMaker( namespace="http://uri.etsi.org/01903/v1.3.2#", nsmap={ "xades": "http://uri.etsi.org/01903/v1.3.2#", "ds": "http://www.w3.org/2000/09/xmldsig#", }, ) Cert = XADES.Cert CertDigest = XADES.CertDigest DataObjectFormat = XADES.DataObjectFormat Description = XADES.Description DocumentationReference = XADES.DocumentationReference DocumentationReferences = XADES.DocumentationReferences Identifier = XADES.Identifier IssuerSerial = XADES.IssuerSerial MimeType = XADES.MimeType ObjectIdentifier = XADES.ObjectIdentifier QualifyingProperties = XADES.QualifyingProperties SignedDataObjectProperties = XADES.SignedDataObjectProperties SignedProperties = XADES.SignedProperties SignedSignatureProperties = XADES.SignedSignatureProperties SigningCertificate = XADES.SigningCertificate SigningTime = XADES.SigningTime UnsignedProperties = XADES.UnsignedProperties def ensure_str(x, encoding="utf-8", none_ok=False): if none_ok is True and x is None: return x if not isinstance(x, str): x = x.decode(encoding) return x class BES: def __init__(self): self.guid = str(uuid.uuid1()) self.time = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ") def sha256(self, data): h = hashlib.sha256(data).digest() return ensure_str(base64.b64encode(h)) def _c14n(self, nodes, algorithm, inclusive_ns_prefixes=None): exclusive, with_comments = False, False if algorithm.startswith("http://www.w3.org/2001/10/xml-exc-c14n#"): exclusive = True if algorithm.endswith("#WithComments"): with_comments = True if not isinstance(nodes, list): nodes = [nodes] c14n = b"" for node in nodes: c14n += etree.tostring(node, method="c14n", exclusive=exclusive, with_comments=with_comments, inclusive_ns_prefixes=inclusive_ns_prefixes) # TODO: optimize if needed if exclusive is False: # TODO: there must be a nicer way to do this. See also: # http://www.w3.org/TR/xml-c14n, "namespace axis" # http://www.w3.org/TR/xml-c14n2/#sec-Namespace-Processing c14n = c14n.replace(b' xmlns=""', b'') return c14n def build(self, fname, data, smime, cert, certcontent, signproc, base64encode=True, withcomments=False): swithcomments = "" if withcomments: swithcomments = "#WithComments" if base64encode: data = ensure_str(base64.b64encode(data)) signedobj = Object( data, Encoding="http://www.w3.org/2000/09/xmldsig#base64", MimeType=smime, Id="Object1_" + self.guid, ) elif 0: signedobj = Object( data, MimeType='text/xml', Id="Object1_" + self.guid, ) else: signedobj = Object( MimeType='text/xml', Id="Object1_" + self.guid, ) tree = etree.parse(io.BytesIO(data)) signedobj.append(tree.getroot()) certdigest = self.sha256(certcontent) b64 = b''.join(base64.encodebytes(certcontent).split()) certcontent = [] for i in range(0, len(b64), 64): certcontent.append(b64[i:i + 64]) certcontent = b'\n'.join(certcontent) certserialnumber = '%d' % cert.serial_number certissuer = [] for k, v in ( ('CN', 'common_name'), ('O', 'organization_name'), ('C', 'country_name'), ('serialNumber', 'serial_number'), ): try: v = cert.issuer.native[v] certissuer.append('%s=%s' % (k, v)) except: pass certissuer = ','.join(certissuer) signedprop = SignedProperties( SignedSignatureProperties( SigningTime( self.time ), SigningCertificate( Cert( CertDigest( DigestMethod( Algorithm="http://www.w3.org/2001/04/xmlenc#sha256", ), DigestValue( certdigest, ), ), IssuerSerial( X509IssuerName( certissuer, ), X509SerialNumber( certserialnumber, ), ), ), ), Id="SignedSignatureProperties_" + self.guid + "_04", ), SignedDataObjectProperties( DataObjectFormat( Description("""\ MIME-Version: 1.0 Content-Type: %s Content-Transfer-Encoding: binary Content-Disposition: filename="%s"\ """ % (smime, fname), ), ObjectIdentifier( Identifier( "http://www.certum.pl/OIDAsURI/signedFile/1.2.616.1.113527.3.1.1.3.1", Qualifier="OIDAsURI", ), Description( u"Opis formatu dokumentu oraz jego pełna nazwa", ), DocumentationReferences( DocumentationReference( "http://www.certum.pl/OIDAsURI/signedFile.pdf", ), ), ), MimeType( smime, ), ObjectReference="#Reference1_" + self.guid + "_29", ), Id="SignedDataObjectProperties_" + self.guid + "_45", ), Id="SignedProperties_" + self.guid + "_40", ) canonicalizedxml = self._c14n(signedobj, '') digestvalue1 = self.sha256(canonicalizedxml) canonicalizedxml = self._c14n(signedprop, '') digestvalue2 = self.sha256(canonicalizedxml) signedinfo = SignedInfo( CanonicalizationMethod( Algorithm="http://www.w3.org/TR/2001/REC-xml-c14n-20010315", ), SignatureMethod( Algorithm="http://www.w3.org/2001/04/xmldsig-more#rsa-sha256", ), Reference( Transforms( Transform( Algorithm="http://www.w3.org/TR/2001/REC-xml-c14n-20010315" + swithcomments, ) ), DigestMethod( Algorithm="http://www.w3.org/2001/04/xmlenc#sha256", ), DigestValue( digestvalue1, ), URI="#Object1_" + self.guid, Id="Reference1_" + self.guid + "_29", ), Reference( DigestMethod( Algorithm="http://www.w3.org/2001/04/xmlenc#sha256", ), DigestValue( digestvalue2, ), Id="SignedProperties-Reference_" + self.guid + "_26", Type="http://uri.etsi.org/01903#SignedProperties", URI="#SignedProperties_" + self.guid + "_40", ), Id="SignedInfo_" + self.guid + "_4f", ) canonicalizedxml = self._c14n(signedinfo, '') signature = signproc(canonicalizedxml, 'sha256') actualdigestencoded = ensure_str(base64.b64encode(signature)) digestvalue3 = [] for i in range(0, len(actualdigestencoded), 64): digestvalue3.append(actualdigestencoded[i:i + 64]) digestvalue3 = '\n'.join(digestvalue3) DOC = Signature( signedinfo, SignatureValue( digestvalue3, Id="SignatureValue_" + self.guid + "_5c", ), KeyInfo( X509Data( X509Certificate( certcontent.decode() ), ), Id="KeyInfo_" + self.guid + "_2a", ), Object( QualifyingProperties( signedprop, UnsignedProperties( Id="UnsignedProperties_" + self.guid + "_5b", ), Id="QualifyingProperties_" + self.guid + "_4d", Target="#Signature_" + self.guid + "_17", ), ), signedobj, Id="Signature_" + self.guid + "_17", ) return DOC
py
1a5b7fc1e6f8c56ef5381abc67408cc28f7d155b
from redis import StrictRedis import logging class Config(object): DEBUG = True # 配置MySQL:指定数据库位置 SQLALCHEMY_DATABASE_URI = 'mysql://root:[email protected]:3306/information' # 禁用追踪mysql:因为mysql的性能差,如果再去追踪mysql的所有的修改,会再次浪费性能 SQLALCHEMY_TRACK_MODIFICATIONS = False REDIS_HOST = '192.168.73.128' REDIS_PORT = 6379 SECRET_KEY = 'SECRETKEY' SESSION_TYPE = 'redis' SESSION_USE_SIGNER = True SESSION_REDIS = StrictRedis(host=REDIS_HOST, port=REDIS_PORT) PERMANENT_SESSION_LIFETIME = 60 * 60 * 24 class DevelopmentConfig(Config): DEBUG = True LOGGING_LEVEL = logging.DEBUG class ProductionConfig(Config): DEBUG = False LOGGING_LEVEL = logging.WARNING configs = { 'dev': DevelopmentConfig, 'prod': ProductionConfig }