code
stringlengths 114
1.05M
| path
stringlengths 3
312
| quality_prob
float64 0.5
0.99
| learning_prob
float64 0.2
1
| filename
stringlengths 3
168
| kind
stringclasses 1
value |
---|---|---|---|---|---|
class TeamSearch:
def __init__(self, data: dict):
if data is not None:
self.team_id: str = data.get('id')
self.name: str = data.get('name')
self.settings: Settings = Settings(data.get('settings'))
self.group: AccountGroup = AccountGroup(data.get('group'))
self.is_default: bool = data.get('is_default')
self.org_uuid: str = data.get('org_uuid')
self.user_count: int = data.get('user_count')
def __str__(self):
return self.name
class Team:
def __init__(self, data: dict):
if data is not None:
self.team_id: str = data.get('id')
self.name: str = data.get('name')
self.org_uuid: str = data.get('org_uuid')
self.group: AccountGroup = AccountGroup(data.get('group'))
self.created_at: str = data.get('created_at')
self.updated_at: str = data.get('updated_at')
self.settings: Settings = Settings(data.get('settings'))
self.description: str = data.get('description')
self.is_default: bool = data.get('is_default')
def __str__(self):
return self.name
class Settings:
def __init__(self, data: dict):
if data is not None:
self.virtual_machines: int = data.get('virtual_machines')
self.real_devices: int = data.get('real_devices')
self.live_only: bool = data.get('live_only')
class AccountGroup:
def __init__(self, data: dict):
if data is not None:
self.group_id: str = data.get('id')
self.name: str = data.get('name')
self.virtual_machines: int = data.get('virtual_machines')
self.real_devices: int = data.get('real_devices')
def __str__(self):
return self.name
class User:
def __init__(self, data: dict):
if data is not None:
self.user_id: str = data.get('id')
self.username: str = data.get('username')
self.email: str = data.get('email')
self.first_name: str = data.get('first_name')
self.last_name: str = data.get('last_name')
self.is_active: bool = data.get('is_active')
self.created_at: str = data.get('created_at')
self.updated_at: str = data.get('updated_at')
self.teams: list[Team] = [Team(team) for team in data.get('teams')]
self.roles: list[Role] = [Role(role) for role in data.get('roles')]
self.is_staff: bool = data.get('is_staff')
self.is_superuser: bool = data.get('is_superuser')
self.user_type: str = data.get('user_type')
self.groups: list = data.get('groups')
self.organization: Organization = Organization(data.get('organization'))
self.phone: str = data.get('phone')
self.is_organization_admin: bool = data.get('is_organization_admin')
self.is_team_admin: bool = data.get('is_team_admin')
def __str__(self):
return self.username
class UserSearch:
def __init__(self, data: dict):
if data is not None:
self.user_id: str = data.get('id')
self.username: str = data.get('username')
self.first_name: str = data.get('first_name')
self.last_name: str = data.get('last_name')
self.is_active: bool = data.get('is_active')
self.email: str = data.get('email')
self.teams: list[Team] = [Team(team) for team in data.get('teams')]
self.roles: list[Role] = [Role(role) for role in data.get('roles')]
self.organization: Organization = Organization(data.get('organization'))
def __str__(self):
return self.username
class Role:
def __init__(self, data: dict):
if data is not None:
self.name: str = data.get('name')
self.role: int = data.get('role')
def __str__(self):
return self.name
class Organization:
def __init__(self, data: dict):
if data is not None:
self.organization_id: str = data.get('id')
self.name: str = data.get('name')
def __str__(self):
return self.name
|
/saucelab-api-client-0.6.2.tar.gz/saucelab-api-client-0.6.2/saucelab_api_client/models/accounts.py
| 0.595493 | 0.337722 |
accounts.py
|
pypi
|
from saucelab_api_client.models.device import Device
from saucelab_api_client.models.file import FileSummary
class RealDeviceJob:
def __init__(self, data: dict):
self.application_summary: FileSummary = FileSummary(data.get('application_summary'))
self.assigned_tunnel_id = data.get('assigned_tunnel_id')
self.device_type: str = data.get('device_type')
self.owner_sauce: str = data.get('owner_sauce')
self.automation_backend: str = data.get('automation_backend')
self.base_config: dict = data.get('base_config')
self.build = data.get('build')
self.collects_automator_log: bool = data.get('collects_automator_log')
self.consolidated_status: str = data.get('consolidated_status')
self.creation_time: int = data.get('creation_time')
self.device_descriptor: Device = Device(data.get('device_descriptor'))
self.end_time: int = data.get('end_time')
self.error = data.get('error')
self.job_id: str = data.get('id')
self.framework_log_url: str = data.get('framework_log_url')
self.device_log_url: str = data.get('device_log_url')
self.requests_url: str = data.get('requests_url')
self.test_cases_url = data.get('test_cases_url')
self.manual: bool = data.get('manual')
self.modification_time: int = data.get('modification_time')
self.name: str = data.get('name')
self.os: str = data.get('os')
self.os_version: str = data.get('os_version')
self.device_name: str = data.get('device_name')
self.passed = data.get('passed')
self.proxied: bool = data.get('proxied')
self.record_screenshots: bool = data.get('record_screenshots')
self.screenshots: list = data.get('screenshots')
self.record_video: bool = data.get('record_video')
self.start_time: int = data.get('start_time')
self.status: str = data.get('status')
self.tags: list = data.get('tags')
self.video_url: str = data.get('video_url')
self.remote_app_file_url: str = data.get('remote_app_file_url')
self.appium_session_id: str = data.get('appium_session_id')
self.device_session_id = data.get('device_session_id')
self.client: str = data.get('client')
def __str__(self):
return self.name
|
/saucelab-api-client-0.6.2.tar.gz/saucelab-api-client-0.6.2/saucelab_api_client/models/real_device_job.py
| 0.513668 | 0.198278 |
real_device_job.py
|
pypi
|
class File:
def __init__(self, data: dict):
if data is not None:
self.file_id: str = data.get('id')
self.owner: dict = data.get('owner')
self.name: str = data.get('name')
self.upload_timestamp: int = data.get('upload_timestamp')
self.etag: str = data.get('etag')
self.kind: str = data.get('kind')
self.group_id: int = data.get('group_id')
self.description = data.get('description')
self.metadata: Metadata = Metadata(data.get('metadata'))
self.access: dict = data.get('access')
self.sha256: str = data.get('sha256')
def __str__(self):
return self.name
class Metadata:
def __init__(self, data: dict):
if data is not None:
self.identifier: str = data.get('identifier')
self.name: str = data.get('name')
self.version: str = data.get('version')
self.is_test_runner: bool = data.get('is_test_runner')
self.icon = data.get('icon')
self.short_version: str = data.get('short_version')
self.is_simulator: bool = data.get('is_simulator')
self.min_os: str = data.get('min_os')
self.target_os: str = data.get('target_os')
self.test_runner_plugin_path = data.get('test_runner_plugin_path')
self.vesrion_code: int = data.get('vesrion_code')
self.min_sdk: int = data.get('min_sdk')
self.target_sdk: int = data.get('target_sdk')
self.test_runner_class = data.get('test_runner_class')
def __str__(self):
return self.identifier
class FileSummary:
def __init__(self, data: dict):
if data is not None:
self.app_storage_id: str = data.get('appStorageId')
self.group_id: int = data.get('groupId')
self.filename: str = data.get('filename')
self.name: str = data.get('name')
self.version: str = data.get('version')
self.short_version: str = data.get('shortVersion')
self.min_os_version: str = data.get('minOsVersion')
self.target_os_version: str = data.get('targetOsVersion')
def __str__(self):
return self.name
|
/saucelab-api-client-0.6.2.tar.gz/saucelab-api-client-0.6.2/saucelab_api_client/models/file.py
| 0.650245 | 0.320236 |
file.py
|
pypi
|
class Performance:
def __init__(self, data: dict):
if data is not None:
self.job_id: str = data.get('job_id')
self.job_owner: str = data.get('job_owner')
self.job_name_hash: str = data.get('job_name_hash')
self.metric_data: dict = data.get('metric_data')
self.page_url: str = data.get('page_url')
self.order_index: int = data.get('order_index')
self.job_creation_time: str = data.get('job_creation_time')
self.load_id: str = data.get('load_id')
self.loader_id: str = data.get('loader_id')
self.error: str = data.get('error')
class PerformanceJob:
def __init__(self, data: dict):
if data is not None:
self.job_id: str = data.get('job_id')
self.job_owner: str = data.get('job_owner')
self.job_name_hash: str = data.get('job_name_hash')
self.metric_data: PerformanceMetrics = PerformanceMetrics(data.get('metric_data'))
self.page_url: str = data.get('page_url')
self.order_index: int = data.get('order_index')
self.job_creation_time: str = data.get('job_creation_time')
self.load_id: str = data.get('load_id')
self.loader_id: str = data.get('loader_id')
self.error: str = data.get('error')
self.links = data.get('links')
class PerformanceMetrics:
def __init__(self, data: dict):
if data is not None:
self.rtt: int = data.get('rtt')
self.load: int = data.get('load')
self.score: float = data.get('score')
self.max_rtt: int = data.get('maxRtt')
self.num_fonts: int = data.get('numFonts')
self.num_tasks: int = data.get('numTasks')
self.xhr_size: int = data.get('xhr_size')
self.font_size: int = data.get('font_size')
self.xhr_count: int = data.get('xhr_count')
self.first_paint: int = data.get('firstPaint')
self.font_count: int = data.get('font_count')
self.image_size: int = data.get('image_size')
self.num_scripts: int = data.get('numScripts')
self.other_size: int = data.get('other_size')
self.speed_index: int = data.get('speedIndex')
self.throughput: float = data.get('throughput')
self.image_count: int = data.get('image_count')
self.num_requests: int = data.get('numRequests')
self.other_count: int = data.get('other_count')
self.script_size: int = data.get('script_size')
self.first_c_p_u_idle: int = data.get('firstCPUIdle')
self.requests_size: int = data.get('requestsSize')
self.script_count: int = data.get('script_count')
self.document_size: int = data.get('document_size')
self.requests_count: int = data.get('requestsCount')
self.total_task_time: int = data.get('totalTaskTime')
self.document_count: int = data.get('document_count')
self.num_stylesheets: int = data.get('numStylesheets')
self.stylesheet_size: int = data.get('stylesheet_size')
self.time_to_first_byte: int = data.get('timeToFirstByte')
self.total_byte_weight: int = data.get('totalByteWeight')
self.dom_content_loaded: int = data.get('domContentLoaded')
self.first_interactive: int = data.get('firstInteractive')
self.last_visual_change: int = data.get('lastVisualChange')
self.max_server_latency: int = data.get('maxServerLatency')
self.num_tasks_over10ms: int = data.get('numTasksOver10ms')
self.num_tasks_over25ms: int = data.get('numTasksOver25ms')
self.num_tasks_over50ms: int = data.get('numTasksOver50ms')
self.stylesheet_count: int = data.get('stylesheet_count')
self.first_visual_change: int = data.get('firstVisualChange')
self.num_tasks_over100ms: int = data.get('numTasksOver100ms')
self.num_tasks_over500ms: int = data.get('numTasksOver500ms')
self.total_blocking_time: int = data.get('totalBlockingTime')
self.server_response_time: int = data.get('serverResponseTime')
self.first_contentful_paint: int = data.get('firstContentfulPaint')
self.first_meaningful_paint: int = data.get('firstMeaningfulPaint')
self.cumulative_layout_shift: int = data.get('cumulativeLayoutShift')
self.estimated_input_latency: int = data.get('estimatedInputLatency')
self.largest_contentful_paint: int = data.get('largestContentfulPaint')
self.main_document_transfer_size: int = data.get('mainDocumentTransferSize')
|
/saucelab-api-client-0.6.2.tar.gz/saucelab-api-client-0.6.2/saucelab_api_client/models/performance.py
| 0.611034 | 0.335432 |
performance.py
|
pypi
|
class Job:
def __init__(self, data: dict):
if data is not None:
self.browser_short_version: str = data.get('browser_short_version')
self.video_url: str = data.get('video_url')
self.creation_time: int = data.get('creation_time')
self.custom_data = data.get('custom-data')
self.browser_version: str = data.get('browser_version')
self.owner: str = data.get('owner')
self.automation_backend: str = data.get('automation_backend')
self.job_id: str = data.get('id')
self.collects_automator_log: bool = data.get('collects_automator_log')
self.record_screenshots: bool = data.get('record_screenshots')
self.record_video: bool = data.get('record_video')
self.build = data.get('build')
self.passed = data.get('passed')
self.public: str = data.get('public')
self.assigned_tunnel_id = data.get('assigned_tunnel_id')
self.status: str = data.get('status')
self.log_url: str = data.get('log_url')
self.start_time: int = data.get('start_time')
self.proxied: bool = data.get('proxied')
self.modification_time: int = data.get('modification_time')
self.tags: list = data.get('tags')
self.name: str = data.get('name')
self.commands_not_successful: int = data.get('commands_not_successful')
self.consolidated_status: str = data.get('consolidated_status')
self.selenium_version = data.get('selenium_version')
self.manual: bool = data.get('manual')
self.end_time: int = data.get('end_time')
self.error: str = data.get('error')
self.os: str = data.get('os')
self.breakpointed = data.get('breakpointed')
self.browser: str = data.get('browser')
def __str__(self):
return self.name
class JobSearch:
def __init__(self, data: dict):
if data is not None:
self.status: str = data.get('status')
self.base_config: JobBaseConfig = JobBaseConfig(data.get('base_config'))
self.command_counts: dict = data.get('command_counts')
self.deletion_time = data.get('deletion_time')
self.url = data.get('url')
self.org_id: str = data.get('org_id')
self.creation_time: int = data.get('creation_time')
self.job_id: str = data.get('id')
self.team_id: str = data.get('team_id')
self.performance_enabled = data.get('performance_enabled')
self.assigned_tunnel_id = data.get('assigned_tunnel_id')
self.container: bool = data.get('container')
self.group_id: str = data.get('group_id')
self.public: str = data.get('public')
self.breakpointed = data.get('breakpointed')
class JobBaseConfig:
def __init__(self, data: dict):
if data is not None:
self.sauce_options: dict = data.get('sauce:options')
self.appium_new_command_timeout: int = data.get('appium:newCommandTimeout')
self.browser_name: str = data.get('browserName')
self.appium_device_name: str = data.get('appium:deviceName')
self.appium_platform_version: str = data.get('appium:platformVersion')
self.platform_name: str = data.get('platformName')
class JobAssets:
def __init__(self, data: dict):
if data is not None:
self.video_mp4: str = data.get('video.mp4')
self.selenium_log: str = data.get('selenium-log')
self.sauce_log: str = data.get('sauce-log')
self.video: str = data.get('video')
self.logcat_log: str = data.get('logcat.log')
self.screenshots: list = data.get('screenshots')
self.automator_log: str = data.get('automator.log')
self.network_har: str = data.get('network.har')
self.performance_json: str = data.get('performance.json')
|
/saucelab-api-client-0.6.2.tar.gz/saucelab-api-client-0.6.2/saucelab_api_client/models/job.py
| 0.586049 | 0.346237 |
job.py
|
pypi
|
import json
class Device:
def __init__(self, data: dict, restrore: bool = False):
if data is not None:
if restrore:
for key, value in data.items():
self.__setattr__(key, value)
else:
self.abi_type: str = data.get('abiType')
self.api_level: int = data.get('apiLevel')
self.cpu_cores: int = data.get('cpuCores')
self.cpu_frequency: int = data.get('cpuFrequency')
self.default_orientation: str = data.get('defaultOrientation')
self.dpi: int = data.get('dpi')
self.has_on_screen_buttons: bool = data.get('hasOnScreenButtons')
self.device_id: str = data.get('id')
self.internal_orientation: str = data.get('internalOrientation')
self.internal_storage_size: int = data.get('internalStorageSize')
self.is_arm: bool = data.get('isArm')
self.is_key_guard_disabled: bool = data.get('isKeyGuardDisabled')
self.is_private: bool = data.get('isPrivate')
self.is_rooted: bool = data.get('isRooted')
self.is_tablet: bool = data.get('isTablet')
self.manufacturer: list = data.get('manufacturer')[0]
self.model_number: str = data.get('modelNumber')
self.name: str = data.get('name')
self.os_type: str = data.get('os')
self.os_version: str = data.get('osVersion')
self.pixels_per_point: int = data.get('pixelsPerPoint')
self.ram_size: int = data.get('ramSize')
self.resolution_height: int = data.get('resolutionHeight')
self.resolution_width: int = data.get('resolutionWidth')
self.screen_size: float = data.get('screenSize')
self.sd_card_size: int = data.get('sdCardSize')
self.supports_appium_web_app_testing: bool = data.get('supportsAppiumWebAppTesting')
self.supports_global_proxy: bool = data.get('supportsGlobalProxy')
self.supports_minicap_socket_connection: bool = data.get('supportsMinicapSocketConnection')
self.supports_mock_locations: bool = data.get('supportsMockLocations')
self.cpu_type: str = data.get('cpuType')
self.device_family: str = data.get('deviceFamily')
self.dpi_name: str = data.get('dpiName')
self.is_alternative_io_enabled: bool = data.get('isAlternativeIoEnabled')
self.supports_manual_web_testing: bool = data.get('supportsManualWebTesting')
self.supports_multi_touch: bool = data.get('supportsMultiTouch')
self.supports_xcui_test: bool = data.get('supportsXcuiTest')
def __str__(self):
return self.name
def to_json(self):
return json.dumps(self.__dict__)
|
/saucelab-api-client-0.6.2.tar.gz/saucelab-api-client-0.6.2/saucelab_api_client/models/device.py
| 0.535584 | 0.338924 |
device.py
|
pypi
|
class Insight:
def __init__(self, data: dict):
if data is not None:
self.insight_id: str = data.get('id')
self.owner: str = data.get('owner')
self.ancestor: str = data.get('ancestor')
self.name: str = data.get('name')
self.build: str = data.get('build')
self.creation_time: str = data.get('creation_time')
self.start_time: str = data.get('start_time')
self.end_time: str = data.get('end_time')
self.duration: int = data.get('duration')
self.status: str = data.get('status')
self.error: str = data.get('error')
self.os: str = data.get('os')
self.os_normalized: str = data.get('os_normalized')
self.browser: str = data.get('browser')
self.browser_normalized: str = data.get('browser_normalized')
self.details_url: str = data.get('details_url')
def __str__(self):
return self.name
class RealDeviceInsight:
def __init__(self, data: dict):
if data is not None:
if isinstance(data.get('test_cases'), (list, tuple)):
self.test_cases: list[TestCase] = [TestCase(test_case) for test_case in data['test_cases']]
else:
self.test_cases = []
self.total: int = data.get('total')
self.statuses: TestCasesStatuses = TestCasesStatuses(data.get('statuses'))
self.avg_runtime: int = data.get('avg_runtime')
def filter_by_complete_rate(self, reverse: bool = False):
return sorted(self.test_cases, key=lambda x: x.complete_rate, reverse=reverse)
def filter_by_pass_rate(self, reverse: bool = False):
return sorted(self.test_cases, key=lambda x: x.pass_rate, reverse=reverse)
class TestCase:
def __init__(self, data: dict):
if data is not None:
self.name: str = data.get('name')
self.statuses: TestCasesStatuses = TestCasesStatuses(data.get('statuses'))
self.total_runs: int = data.get('total_runs')
self.complete_rate: float = data.get('complete_rate')
self.error_rate: float = data.get('error_rate')
self.fail_rate: float = data.get('fail_rate')
self.pass_rate: float = data.get('pass_rate')
self.avg_duration: float = data.get('avg_duration')
self.median_duration: float = data.get('median_duration')
self.total_duration: int = data.get('total_duration')
def __str__(self):
return self.name
class TestCasesStatuses:
def __init__(self, data: dict):
if data is not None:
self.complete: int = data.get('complete', 0)
self.passed: int = data.get('passed', 0)
self.failed: int = data.get('failed', 0)
class CsvClass:
def __init__(self, csv_content: bytes):
data = csv_content.decode('utf-8').split('\r\n')
self.results = [CsvResult(row) for row in data[1:] if row != '']
self.tests_number = len(self.results)
def filter_by_complete_rate(self, reverse: bool = False):
return sorted(self.results, key=lambda x: x.complete_rate, reverse=reverse)
def filter_by_pass_rate(self, reverse: bool = False):
return sorted(self.results, key=lambda x: x.pass_rate, reverse=reverse)
class CsvResult:
def __init__(self, csv_row: str):
if '"' in csv_row:
name_split = csv_row.split('"')
name = name_split[1]
name_split.remove(name)
data = ''.join(name_split).split(',')
data[1] = name
else:
data = csv_row.split(',')
for index, value in enumerate(data):
if value.isnumeric():
data[index] = int(value)
self.row_id, self.name, self.total_runs, self.total_duration, self.avg_duration, self.median_duration, \
self.pass_rate, self.fail_rate, self.error_rate, self.complete_rate, self.status_passed, \
self.status_failed, self.status_error, self.status_complete = data
def __str__(self):
return self.name
|
/saucelab-api-client-0.6.2.tar.gz/saucelab-api-client-0.6.2/saucelab_api_client/models/insights.py
| 0.676727 | 0.418519 |
insights.py
|
pypi
|
from datetime import datetime
from saucelab_api_client.category import Base
from saucelab_api_client.models.platform_ import Status, WebDriverPlatform, AppiumPlatform
class Platform(Base):
__sub_host = '/rest/v1/info'
def get_status(self):
"""
https://docs.saucelabs.com/dev/api/platform/#get-sauce-labs-teststatus
Returns the current (30 second cache) availability of the Sauce Labs platform
:return: Status or str
"""
return self._valid(self._session.request('get', f'{self.__sub_host}/status'), Status)
def get_supported_platforms(self, automation_api: str):
"""
https://docs.saucelabs.com/dev/api/platform/#get-supported-platforms
Returns the set of supported operating system and browser combinations for the specified automation framework
:param automation_api: The framework for which you are requesting supported platforms. Valid values are:
- all
- appium
- webdriver
:return:
"""
response = self._session.request('get', f'{self.__sub_host}/platforms/{automation_api}')
if isinstance(response, str):
return response
else:
return tuple(WebDriverPlatform(automation) if automation['automation_backend'] == 'webdriver'
else AppiumPlatform(automation) for automation in response)
def get_end_of_life_date_appium_versions(self):
"""
https://docs.saucelabs.com/dev/api/platform/#get-end-of-life-dates-for-appium-versions
Returns the expected date (in Unix time) on which Sauce Labs support for each
Appium version is to be discontinued
:return:
"""
response = self._session.request('get', f'{self.__sub_host}/platforms/appium/eol')
if isinstance(response, str):
return response
else:
return {key: value if value is None else datetime.utcfromtimestamp(value) for key, value in
response.items()}
|
/saucelab-api-client-0.6.2.tar.gz/saucelab-api-client-0.6.2/saucelab_api_client/base_classes/platform_api.py
| 0.769427 | 0.158956 |
platform_api.py
|
pypi
|
from saucelab_api_client.base_classes.exceptions import MissingArguments
from saucelab_api_client.category import Base
from saucelab_api_client.models.accounts import TeamSearch, Team, UserSearch, User
from saucelab_api_client.models.service import validate_dict, get_dict_from_locals
class Accounts(Base):
@property
def account_team(self):
return AccountTeam(self._session)
@property
def account_user(self):
return AccountUser(self._session)
class AccountTeam(Base):
__sub_host = '/team-management/v1'
def teams(self, team_name: str = None):
"""
https://docs.saucelabs.com/dev/api/accounts/#lookup-teams
Queries the organization of the requesting account and returns the number of teams matching the query and
a summary of each team, including the ID value, which may be a required parameter of other API calls
related to a specific team.
You can filter the results of your query using the name parameter below
:param team_name: Returns the set of teams that begin with the specified name value
:return:
"""
params = {'name': team_name} if team_name else {}
return self._valid(self._session.request('get', f'{self.__sub_host}/teams/', params=params), TeamSearch,
'results')
def get_team(self, team_id: str):
"""
https://docs.saucelabs.com/dev/api/accounts/#get-a-specific-team
Returns the full profile of the specified team. The ID of the team is the only valid unique identifier
:param team_id: The unique identifier of the team
:return:
"""
return self._valid(self._session.request('get', f'{self.__sub_host}/teams/{team_id}'), Team)
def create_team(self, name: str, organization: str, settings: dict, description: str = None):
"""
https://docs.saucelabs.com/dev/api/accounts/#create-a-team
Creates a new team under the organization of the requesting account
:param name: A name for the new team
:param organization: The unique ID of the organization under which the team is created
:param settings: The settings object specifies the concurrency allocations for the team within the organization.
The available attributes are:
- virtual_machines - integer
- real devices - integer
- live only - boolean Defaults to false.
:param description: A description to distinguish the team within the organization
:return: Team object
"""
validate_dict(settings, ('virtual_machines', 'real_devices', 'live_only'), soft_check=True)
data = {
'name': name,
'organization': organization,
'settings': settings,
}
if description is not None:
data['description'] = description
return self._valid(self._session.request('post', f'{self.__sub_host}/teams', data=data), Team)
def delete_team(self, team_id: str):
"""
https://docs.saucelabs.com/dev/api/accounts/#delete-a-team
Deletes the specified team from the organization of the requesting accoun
:param team_id: The unique identifier of the team
:return: None
"""
self._session.request('delete', f'{self.__sub_host}/teams/{team_id}')
def update_team(self, team_id: str, name: str, settings: dict, description: str = None):
"""
https://docs.saucelabs.com/dev/api/accounts/#update-a-team
Replaces all values of the specified team with the new set of parameters passed in the request.
To update only certain parameters, see Partially Update Team.
:param team_id: The unique identifier of the team
:param name: The name of the team as it will be after the update.
Pass the current value to keep the name unchanged.
:param settings: The settings object specifies the concurrency allocations for the team within the organization.
The available attributes are:
- virtual_machines - integer
- real devices - integer
- live only - boolean Defaults to false.
:param description: A description to distinguish the team within the organization.
If the previous team definition included a description, omitting the parameter in the
update will delete it from the team record.
:return:
"""
validate_dict(settings, ('virtual_machines', 'real_devices', 'live_only'), soft_check=True)
data = {
'name': name,
'settings': settings,
'description': description
}
return self._valid(self._session.request('put', f'{self.__sub_host}/teams/{team_id}', data=data), Team)
def partially_update_team(self, team_id: str = None, name: str = None, settings: dict = None,
description: str = None):
"""
https://docs.saucelabs.com/dev/api/accounts/#partially-update-a-team
Updates one or more individual editable parameters (such as the concurrency allocation) of the
specified team without requiring a full profile update.
:param team_id: The unique identifier of the team
:param name: An updated name for the team
:param settings: The settings object specifies the concurrency allocations for the team within the organization.
The available attributes are:
- virtual_machines - integer
- real devices - integer
- live only - boolean Defaults to false.
:param description: An updated description
:return:
"""
if settings is not None:
validate_dict(settings, ('virtual_machines', 'real_devices', 'live_only'), soft_check=True)
data = {key: value for key, value in locals().items() if key in ('team_id', 'name', 'settings', 'description')}
if len(data.keys()) == 0:
raise MissingArguments('Missing any arguments')
return self._valid(self._session.request('patch', f'{self.__sub_host}/teams/{team_id}/', data=data), Team)
def list_team_members(self, team_id: str):
"""
https://docs.saucelabs.com/dev/api/accounts/#list-team-members
Returns the number of members in the specified team and lists each member
:param team_id: Identifies the team for which you are requesting the list of members
:return:
"""
return self._valid(self._session.request('get', f'{self.__sub_host}/teams/{team_id}/members'), UserSearch,
'results')
def reset_access_key_for_team(self, team_id: str):
"""
https://docs.saucelabs.com/dev/api/accounts/#reset-access-keys-for-entire-team
Globally regenerates new access key values for every member of the specified team
Regenerating an access key invalidates the previous value and any tests containing the prior value will fail,
so make sure you edit any tests and credential environment variables with the new value.
:param team_id: Identifies the team for which you are resetting member access keys
:return: None
"""
self._session.request('post', f'{self.__sub_host}/teams/{team_id}/reset-access-key/')
class AccountUser(Base):
__sub_host = '/team-management/v1'
def all_users(self, username: str = None, teams: str = None, team_name: str = None, roles: int = None,
phrase: str = None, status: str = None, limit: int = None, offset: int = None):
"""
https://docs.saucelabs.com/dev/api/accounts/#lookup-users
Queries the organization of the requesting account and returns the number of users matching the query and
a basic profile of each user, including the ID value, which may be a required parameter of other API calls
related to a specific user.
You can narrow the results of your query using any of the following filtering parameters.
:param username: Limits the results to usernames that begin with the specified value
:param teams: Limit results to users who belong to the specified team_ids.
Specify multiple teams as comma-separated values
:param team_name: Limit results to users who belong to the specified team names.
Specify multiple teams as comma-separated values
:param roles: Limit results to users who are assigned certain roles. Valid values are:
1 - Organaization Admin. 4 - Team Admin. 3 - Member.
Specify multiple roles as comma-separated values
:param phrase: Limit results to users whose first name, last name, or email address begins
with the specified value
:param status: Limit results to users of the specifid status. Valid values are: active, pending, inactive
:param limit: Limit results to a maximum number per page. Default value is 20
:param offset: The starting record number from which to return results
:return:
"""
params = get_dict_from_locals(locals())
return self._valid(self._session.request('get', f'{self.__sub_host}/users/', params=params), UserSearch,
'results')
def get_user(self, user_id: str):
"""
https://docs.saucelabs.com/dev/api/accounts/#get-a-specific-user
Returns the full profile of the specified user. The ID of the user is the only valid unique identifier
:param user_id: The user's unique identifier
:return: UserSearch or str
"""
return self._valid(self._session.request('get', f'{self.__sub_host}/users/{user_id}'), User)
def create(self, email: str, username: str, password: str, first_name: str = None, last_name: str = None,
organization: str = None, role: str = None, team: str = None):
"""
https://docs.saucelabs.com/dev/api/accounts/#create-a-new-user
Creates a new user in the Sauce Labs platform
:param first_name: The new user's first name
:param last_name: The new user's last name
:param email: The user's contact email address
:param username: A login username for the new user
:param password: A login password for the new user
:param organization: The identifier of the organization to create the user's account
:param role: The new user's permission role.
Valid values are 1 - Organaization Admin. 4 - Team Admin. 3 - Member
:param team: The identifier of the team of which the new user is a member
:return:
"""
data = get_dict_from_locals(locals())
return self._valid(self._session.request('post', f'{self.__sub_host}/users/', data=data), User)
def update_user(self, user_id: str, first_name: str, last_name: str, email: str, password: str):
"""
https://docs.saucelabs.com/dev/api/accounts/#update-a-user
Replaces all values of the specified user profile with the new set of parameters passed in the request.
To update only certain parameters, see Partially Update a User.
:param user_id: The unique identifier of the use
:param first_name: The user's first name
:param last_name: The user's last name
:param email: The user's contact email address
:param password: A login password for the new user
:return: User or str
"""
data = get_dict_from_locals(locals())
data['verify_password'] = password
return self._valid(self._session.request('put', f'{self.__sub_host}/users/{user_id}', data=data), User)
def partially_update_user(self, user_id: str, first_name: str = None, last_name: str = None, email: str = None,
password: str = None):
"""
https://docs.saucelabs.com/dev/api/accounts/#partially-update-a-user
Allows you to update individual user values without replacing the entire profile
:param user_id: The unique identifier of the user to update
:param first_name: The user's first name
:param last_name: The user's last name
:param email: The user's contact email address
:param password: A login password for the new user
:return:
"""
data = get_dict_from_locals(locals())
if 'password' in data:
data['verify_password'] = password
if len(data.keys()) == 0:
raise MissingArguments('Missing any arguments')
return self._valid(self._session.request('patch', f'{self.__sub_host}/users/{user_id}', data=data), User)
def get_user_concurrency(self, username: str):
"""
https://docs.saucelabs.com/dev/api/accounts/#get-user-concurrency
Allows you to update individual user values without replacing the entire profile
:param username: The username of the user whose concurrency you are looking up
:return:
"""
return self._session.request('get', f'/rest/v1.2/users/{username}/concurrency')
def user_teams(self, user_id: str):
"""
https://docs.saucelabs.com/dev/api/accounts/#get-a-users-team
Returns the number of teams a user belongs to and provides information about each team,
including whether it is the default and its concurrency settings.
:param user_id: The unique identifier of the user
:return:
"""
return self._session.request('get', f'{self.__sub_host}/users/{user_id}/teams')['results']
def get_active_user(self):
return self._valid(self._session.request('get', f'{self.__sub_host}/users/me'), User)
|
/saucelab-api-client-0.6.2.tar.gz/saucelab-api-client-0.6.2/saucelab_api_client/base_classes/accounts_api.py
| 0.819207 | 0.271455 |
accounts_api.py
|
pypi
|
from saucelab_api_client.category import Base
from saucelab_api_client.models.sauce_connect import Tunnel, TunnelJobs, StoppedTunnel
class SauceConnectApi(Base):
"""
https://app.eu-central-1.saucelabs.com/tunnels
Information about tunnels on SauceLab
"""
__sub_host = '/rest/v1'
def get_tunnel_for_user(self, username: str):
"""
https://docs.saucelabs.com/dev/api/connect/#get-tunnels-for-a-user
Returns a list of IDs for any currently running tunnels launched by the specified user
:param username: The authentication username of the user whose tunnels you are requesting
:return: str or dict
"""
return self._session.request('get', f'{self.__sub_host}/{username}/tunnels')
def get_tunnel_information(self, username: str, tunnel_id: str):
"""
https://docs.saucelabs.com/dev/api/connect/#get-tunnel-information
Returns information about the specified tunnel
:param username: The authentication username of the owner of the requested tunnel
:param tunnel_id: The unique identifier of the requested tunnel
:return:
"""
return self._valid(self._session.request('get', f'{self.__sub_host}/{username}/tunnels/{tunnel_id}'), Tunnel)
def get_current_jobs_for_tunnel(self, username: str, tunnel_id: str):
"""
https://docs.saucelabs.com/dev/api/connect/#get-current-jobs-for-a-tunnel
Returns the number of currently running jobs for the specified tunnel
:param username: The authentication username of the user whose tunnels you are requesting
:param tunnel_id: The unique identifier of the requested tunnel
:return:
"""
return self._valid(self._session.request('get', f'{self.__sub_host}/{username}/tunnels/{tunnel_id}/num_jobs'),
TunnelJobs)
def stop_tunnel(self, username: str, tunnel_id: str):
"""
https://docs.saucelabs.com/dev/api/connect/#stop-a-tunnel
Shuts down the specified tunnel
:param username: The authentication username of the user whose tunnels you are requesting
:param tunnel_id: The unique identifier of the tunnel to stop
:return:
"""
return self._valid(self._session.request('delete', f'{self.__sub_host}/{username}/tunnels/{tunnel_id}'),
StoppedTunnel)
|
/saucelab-api-client-0.6.2.tar.gz/saucelab-api-client-0.6.2/saucelab_api_client/base_classes/sauce_connect_api.py
| 0.747616 | 0.19063 |
sauce_connect_api.py
|
pypi
|
import os
from threading import Thread, Event
from saucelab_api_client.category import Base
from saucelab_api_client.models.build import Build
from saucelab_api_client.models.job import JobSearch, Job, JobAssets
from saucelab_api_client.models.service import print_progress, get_dict_from_locals
class JobsApi(Base):
__sub_host = '/rest/v1'
def get_user_jobs(self, username: str, limit: int = None, skip: int = None, from_: str = None, to: str = None) \
:
"""
https://docs.saucelabs.com/dev/api/jobs/#jobs-methods
Get a list of recent jobs run by the specified user
:param username: The username of the Sauce Labs user whose jobs you are looking up
:param limit: The maximum number of jobs to return
:param skip: Returns only the jobs beginning after this index number
:param from_: Return only jobs that ran on or after this Unix timestamp
:param to: Return only jobs that ran on or before this Unix timestamp
:return:
"""
params = {key.replace('_', ''): value for key, value in locals().items() if value}
return self._valid(self._session.request('get', f'{self.__sub_host}/{username}/jobs', params=params), JobSearch)
def get_job_details(self, username: str, job_id: str):
"""
https://docs.saucelabs.com/dev/api/jobs/#get-job-details
Get detailed information about a specific job
:param username: The username of the Sauce Labs user whose jobs you are looking up
:param job_id: The Sauce Labs identifier of the job you are looking up
:return:
"""
return self._valid(self._session.request('get', f'{self.__sub_host}/{username}/jobs/{job_id}'), Job)
def update_job(self, username: str, job_id: str, name: str = None, tags: [tuple, list] = None, public: str = None,
passed: str = None, build: str = None, custom_data: dict = None):
"""
https://docs.saucelabs.com/dev/api/jobs/#update-a-job
Edit job attributes based on parameters passed in the request, including setting the status of the job.
:param username: The username of the owner of the job you are updating
:param job_id: The Sauce Labs identifier of the job to be update
:param name: A new name for the job
:param tags: The set of distinguishing tags to apply to the job
:param public: Specifies the level of visibility permitted for the job
:param passed: Asserts whether the job passed (true) or not (false)
:param build: Assign the job to a build
:param custom_data: Any relevant attributes you wish to add to the job details
:return: JobSearch object
"""
data = get_dict_from_locals(locals(), replace_underscore=True)
return self._valid(self._session.request('put', f'{self.__sub_host}/{username}/jobs/{job_id}', data=data), Job)
def stop_job(self, username: str, job_id: str):
"""
https://docs.saucelabs.com/dev/api/jobs/#stop-a-job
Stop indicated job
:param username: The username of the owner of the job to stop
:param job_id: The Sauce Labs identifier of the job to stop
:return: Job object
"""
return self._valid(self._session.request('put', f'{self.__sub_host}/{username}/jobs/{job_id}/stop'), Job)
def delete_job(self, job_id: str):
"""
https://docs.saucelabs.com/dev/api/jobs/#delete-a-job
Delete a job and all of its assets from the Sauce Labs test history
:param job_id: The Sauce Labs identifier of the job to delete
:return: None
"""
self._session.request('delete', f'/rest/v1.1/jobs/{job_id}')
def delete_all_users_jobs(self, username: str):
"""
https://docs.saucelabs.com/dev/api/jobs/#delete-all-of-a-users-jobs
Delete the entire test history and all assets for the specified user
:param username: The username of the Sauce Labs user whose jobs you are deleting
:return: None
"""
self._session.request('get', f'/rest/v1.1/{username}/jobs')
def get_job_assets(self, username: str, job_id: str):
"""
https://docs.saucelabs.com/dev/api/jobs/#list-job-assets
Get a list of files associated with a specific test, such as the logs, video, and screenshots
:param username: The username of the owner of the job
:param job_id: The Sauce Labs identifier of the job for which you are retrieving the asset list
:return:
"""
return self._valid(self._session.request('get', f'{self.__sub_host}/{username}/jobs/{job_id}/assets'),
JobAssets)
def get_job_asset_file(self, username: str, job_id: str, file_name: str, media_path: str = None,
media_file_name: str = None):
"""
https://docs.saucelabs.com/dev/api/jobs/#get-a-job-asset-file
Retrieve one of the asset files associated with a job, such as a log file, video, or screenshot
:param username: The username of the owner of the job
:param job_id: The Sauce Labs identifier of the job for which you are retrieving the asset list
:param file_name: The name of the asset file you wish to download
:param media_path: Download file folder
:param media_file_name: File name for downloaded file (if not indicated - script use file name from SauceLab
:return:
"""
file_type = file_name.split('.')
if len(file_type) > 0:
content_type, file_type = None, file_type[1]
if file_type in ('log', 'har'):
content_type = 'text'
elif file_type in ('mp4', 'png'):
if media_path is None:
raise FileNotFoundError('Media path is not indicated for media file')
content_type = 'content'
response = self._session.request('get', f'{self.__sub_host}/{username}/jobs/{job_id}/assets/{file_name}',
return_type=content_type)
if content_type == 'content':
download_path = media_path if media_file_name is None else os.path.join(media_path, media_file_name)
exit_event = Event()
thread = Thread(target=print_progress, args=(exit_event, 'download'))
thread.start()
exit_event.set()
open(download_path, 'wb').write(response)
else:
return response
else:
raise FileNotFoundError('Wrong file name')
def delete_all_job_assets(self, username: str, job_id: str):
"""
https://docs.saucelabs.com/dev/api/jobs/#delete-job-assets
This request deletes all of the asset files associated with a job.
Deleting a single asset file is not supported at this time
:param username: The username of the owner of the job
:param job_id: The Sauce Labs identifier of the job for which you are retrieving the asset list
:return:
"""
return self._session.request('delete', f'/{username}/jobs/{job_id}/assets')
def get_builds(self, username: str):
"""
https://docs.saucelabs.com/dev/api/jobs/#get-builds
Get a list of recent builds run by the specified user
:param username: The username of the Sauce Labs users whose builds you are looking up
:return:
"""
return self._valid(self._session.request('get', f'{self.__sub_host}/{username}/builds'), Build)
|
/saucelab-api-client-0.6.2.tar.gz/saucelab-api-client-0.6.2/saucelab_api_client/base_classes/job_api.py
| 0.61659 | 0.175679 |
job_api.py
|
pypi
|
from datetime import datetime
from saucelab_api_client.category import Base
from saucelab_api_client.models.insights import Insight
from saucelab_api_client.models.service import get_dict_from_locals, get_datetime_for_insights
class Insights(Base):
__sub_host = '/v1/analytics'
def test_results(self, start: datetime, end: datetime, scope=None, owner=None, status=None, build=None, from_=None,
max_results=None, missing_build=None, query=None, desc=None, error=None):
"""
https://docs.saucelabs.com/dev/api/insights/#get-test-results
Returns run data for all tests that match the request criteria
:param start: The starting date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param end: The ending date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param scope: Specifies the scope of the owner parameter
:param owner: The name of one or more users in the requestor's organization who executed the requested tests.
This parameter is required if the scope parameter is set to single.
:param status: Limit results to only those with a specified status
:param build: Limit results to those grouped by this build name
:param from_: Begin results list from this record number
:param max_results: The maximum number of results to return
:param missing_build: Requires no value. If this parameter is included in the query string,
results will only include tests with no assigned build
:param query: Limit results to only those with this test name
:param desc: Set to true to sort results in descending order by creation time. Default value is false
:param error: Limit results to only those that threw this error message
:return:
"""
start, end = get_datetime_for_insights(start, end)
params = get_dict_from_locals(locals())
return self._valid(self._session.request('get', f'{self.__sub_host}/tests', params=params), Insight, 'items')
def get_summary_of_test_metric(self, start: datetime, end: datetime, scope=None, owner=None, status=None,
query=None, os=None, browser=None):
"""
https://docs.saucelabs.com/dev/api/insights/#get-summary-of-test-metrics
Returns an aggregate of metric values for runs of a specified test during the specified time period
:param start: The starting date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param end: The ending date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param scope: Specifies the scope of the owner parameter
:param owner: The name of one or more users in the requestor's organization who executed the requested tests.
This parameter is required if the scope parameter is set to single.
:param status: Limit results to only those with a specified status
:param query: The name of the test for which results are requested
:param os: Limit results to only those run on the specified operating systems
:param browser: Limit results to only those run on the specified browsers
:return:
"""
start, end = get_datetime_for_insights(start, end)
params = get_dict_from_locals(locals())
return self._session.request('get', f'{self.__sub_host}/insights/test-metrics', params=params)
def get_test_trends(self, start: datetime, end: datetime, interval: str, scope=None, owner=None, status=None,
os=None, browser=None):
"""
https://docs.saucelabs.com/dev/api/insights/#get-test-trends
Returns a set of data "buckets" representing tests that were run in each time interval defined
by the request parameters
:param start: The starting date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param end: The ending date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param interval: The amount of time representing the boundary of each data bucket
:param scope: Specifies the scope of the owner parameter
:param owner: The name of one or more users in the requestor's organization who executed the requested tests.
This parameter is required if the scope parameter is set to single.
:param status: Limit results to only those with a specified status
:param os: Limit results to only those run on the specified operating systems
:param browser: Limit results to only those run on the specified browsers
:return:
"""
start, end = get_datetime_for_insights(start, end)
params = get_dict_from_locals(locals())
return self._session.request('get', f'{self.__sub_host}/trends/tests', params=params)
def get_builds_and_tests(self, start: datetime, end: datetime, scope=None, owner=None, status=None, os=None,
browser=None):
"""
https://docs.saucelabs.com/dev/api/insights/#get-builds-and-tests
Returns the set of all tests run within the specified time period, grouped by whether
each test was part of a build or not
:param start: The starting date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param end: The ending date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param scope: Specifies the scope of the owner parameter
:param owner: The name of one or more users in the requestor's organization who executed the requested tests.
This parameter is required if the scope parameter is set to single.
:param status: Limit results to only those with a specified status
:param os: Limit results to only those run on the specified operating systems
:param browser: Limit results to only those run on the specified browsers
:return:
"""
start, end = get_datetime_for_insights(start, end)
params = get_dict_from_locals(locals())
return self._session.request('get', f'{self.__sub_host}/trends/builds_tests', params=params)
def get_error_trends(self, start: datetime, end: datetime, scope=None, owner=None, status=None, os=None,
browser=None):
"""
https://docs.saucelabs.com/dev/api/insights/#get-error-trends
Returns an array of errors that occurred on all tests run within the specified time period.
:param start: The starting date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param end: The ending date of the period during which the test runs executed, in YYY-MM-DDTHH:MM:SSZ
or Unix time format.
:param scope: Specifies the scope of the owner parameter
:param owner: The name of one or more users in the requestor's organization who executed the requested tests.
This parameter is required if the scope parameter is set to single.
:param status: Limit results to only those with a specified status
:param os: Limit results to only those run on the specified operating systems
:param browser: Limit results to only those run on the specified browsers
:return:
"""
start, end = get_datetime_for_insights(start, end)
params = get_dict_from_locals(locals())
return self._session.request('get', f'{self.__sub_host}/trends/errors', params=params)
|
/saucelab-api-client-0.6.2.tar.gz/saucelab-api-client-0.6.2/saucelab_api_client/base_classes/insights_api.py
| 0.828454 | 0.475971 |
insights_api.py
|
pypi
|
import json
class Jobs(object):
def __init__(self, sauce):
self.sauce = sauce
def list_jobs(self, full=False, limit=False, skip=False):
"""
List all jobs for the account.
Uses the saucelabs username and access key in self.sauce.
Args:
full: return all job details not only job id.
limit: max number of jobs to return.
skip: number of jobs to skip
Returns:
List of JSON objects containing id of jobs (or all details if
full=True).
"""
payload = {}
if limit:
payload['limit'] = limit
if full:
payload['full'] = full
if skip:
payload['skip'] = skip
rel_url = '/rest/v1/{0}/jobs'.format(self.sauce.user)
return self.sauce.request('GET', rel_url, params=payload)
def get_job_details(self, session):
"""
Get details for the specified job.
Args:
session: the session id of the job to get details for.
Returns:
JSON object containing job information.
"""
rel_url = '/rest/v1/{0}/jobs/{1}'.format(self.sauce.user, session)
return self.sauce.request('GET', rel_url)
def update_job(self, session, build=False, custom_data=False, name=False,
passed=None, public=None, tags=False):
rel_url = '/rest/v1/{0}/jobs/{1}'.format(self.sauce.user, session)
payload = {}
if name:
payload['name'] = name
if tags is not False:
payload['tags'] = tags
if public is not None:
payload['public'] = public
if passed is not None:
payload['passed'] = passed
if build:
payload['build'] = build
if custom_data:
payload['custom-data'] = custom_data
data = json.dumps(payload)
return self.sauce.request('PUT', rel_url, data)
def stop_job(self, session):
rel_url = '/rest/v1/{0}/jobs/{1}/stop'.format(self.sauce.user, session)
return self.sauce.request('PUT', rel_url)
def delete_job(self, session):
rel_url = '/rest/v1/{0}/jobs/{1}'.format(self.sauce.user, session)
return self.sauce.request('DELETE', rel_url)
def list_job_assets(self, session):
rel_url = '/rest/v1/{0}/jobs/{1}/assets'.format(self.sauce.user, session)
return self.sauce.request('GET', rel_url)
def download_job_asset(self, session, asset):
"""
Download job asset from sauce.
Args:
session: session id for sauce job.
asset: name of asset to download.
Returns:
The contents of the asset from the server, as a bytestring.
"""
rel_url = '/rest/v1/{0}/jobs/{1}/assets/{2}'.format(
self.sauce.user, session, asset)
return self.sauce.download(rel_url)
def delete_job_assets(self, session):
rel_url = '/rest/v1/{0}/jobs/{1}/assets'.format(self.sauce.user, session)
return self.sauce.request('DELETE', rel_url)
|
/saucelabs-python-0.3.tar.gz/saucelabs-python-0.3/sauce/jobs.py
| 0.751375 | 0.150309 |
jobs.py
|
pypi
|
from typing import Optional, List
from .params import DB
class BasicSauce:
def __init__(self, raw):
result_header = raw['header']
self.raw: dict = raw
self.similarity: float = float(result_header['similarity'])
self.thumbnail: str = result_header['thumbnail']
self.index_id: int = result_header['index_id']
self.index_name: str = result_header['index_name']
self.title: Optional[str] = self._get_title(raw['data'])
self.urls: List[str] = self._get_urls(raw['data'])
self.author: Optional[str] = self._get_author(raw['data'])
@staticmethod
def _get_title(data):
# Order is important!
if 'title' in data:
return data['title']
elif 'eng_name' in data:
return data['eng_name']
elif 'material' in data:
return data['material']
elif 'source' in data:
return data['source']
elif 'created_at' in data:
return data['created_at']
@staticmethod
def _get_urls(data):
if 'ext_urls' in data:
return data['ext_urls']
elif 'getchu_id' in data:
return [f'http://www.getchu.com/soft.phtml?id={data["getchu_id"]}']
return []
@staticmethod
def _get_author(data):
# Order is important!
if 'author' in data:
return data['author']
elif 'author_name' in data:
return data['author_name']
elif 'member_name' in data:
return data['member_name']
elif 'pawoo_user_username' in data:
return data['pawoo_user_username']
elif 'twitter_user_handle' in data:
return data['twitter_user_handle']
elif 'company' in data:
return data['company']
elif 'creator' in data:
if isinstance(data['creator'], list):
return data['creator'][0]
return data['creator']
def __repr__(self):
return f'<BasicSauce(title={repr(self.title)}, similarity={self.similarity:.2f})>'
class BookSauce(BasicSauce):
def __init__(self, raw):
super().__init__(raw)
data = raw['data']
self.part: str = data['part']
def __repr__(self):
return f'<BookSauce(title={repr(self.title)}, part={repr(self.part)}, similarity={self.similarity:.2f})>'
class VideoSauce(BasicSauce):
def __init__(self, raw):
super().__init__(raw)
data = raw['data']
self.part: str = data['part']
self.year: str = data['year']
self.est_time: str = data['est_time']
def __repr__(self):
return f'<VideoSauce(title={repr(self.title)}, part={repr(self.part)}, similarity={self.similarity:.2f})>'
class SauceResponse:
_BOOK_INDEXES = [DB.HMagazines, DB.Madokami, DB.MangaDex]
_VIDEO_INDEXES = [DB.Anime, DB.HAnime, DB.Movies, DB.Shows]
def __init__(self, resp):
resp_header = resp['header']
parsed_results = self._parse_results(resp['results'])
self.raw: dict = resp
self.user_id: int = resp_header['user_id']
self.account_type: int = resp_header['account_type']
self.short_limit: str = resp_header['short_limit']
self.long_limit: str = resp_header['long_limit']
self.long_remaining: int = resp_header['long_remaining']
self.short_remaining: int = resp_header['short_remaining']
self.status: int = resp_header['status']
self.results_requested: int = resp_header['results_requested']
self.search_depth: str = resp_header['search_depth']
self.minimum_similarity: float = resp_header['minimum_similarity']
self.results_returned: int = resp_header['results_returned']
self.results: List[BasicSauce] = parsed_results
def _parse_results(self, results):
if results is None:
return []
sorted_results = sorted(results, key=lambda r: float(r['header']['similarity']), reverse=True)
parsed_results = []
for result in sorted_results:
index_id = result['header']['index_id']
if index_id in self._BOOK_INDEXES:
parsed_results.append(BookSauce(result))
elif index_id in self._VIDEO_INDEXES:
parsed_results.append(VideoSauce(result))
else:
parsed_results.append(BasicSauce(result))
return parsed_results
def __len__(self):
return len(self.results)
def __bool__(self):
return bool(self.results)
def __getitem__(self, item):
return self.results[item]
def __repr__(self):
return (f'<SauceResponse(count={repr(len(self.results))}, long_remaining={repr(self.long_remaining)}, '
f'short_remaining={repr(self.short_remaining)})>')
|
/saucenao_api-2.4.0.tar.gz/saucenao_api-2.4.0/saucenao_api/containers.py
| 0.838977 | 0.332229 |
containers.py
|
pypi
|
class AstNode(object):
def __eq__(self, other):
if not isinstance(other, AstNode):
raise NotImplementedError
self_props = set(p for p in dir(self) if not p.startswith('__'))
other_props = set(p for p in dir(other) if not p.startswith('__'))
if self_props == other_props:
for prop in self_props:
if getattr(self, prop) != getattr(other, prop):
return False
return True
return False
class AStNodeList(list):
pass
class AttributeDefault(AstNode):
def __init__(self, attribute_name, attribute_value):
self.attribute_name = attribute_name
self.attribute_value = attribute_value
class AttributeDefaults(AStNodeList):
def __init__(self, attribute_defaults):
self.attribute_defaults = attribute_defaults
class AttributeDefinition(AstNode):
def __init__(self, object_type, attribute_name, attribute_value_type):
self.object_type = object_type
self.attribute_name = attribute_name
self.attribute_value_type = attribute_value_type
class AttributeDefinitions(AStNodeList):
def __init__(self, attribute_definitions):
self.attribute_definitions = attribute_definitions
class AttributeValue(AstNode):
def __init__(self, value):
self.value = value
class AttributeValues(AStNodeList):
def __init__(self, attribute_values):
self.attribute_values = attribute_values
class AttributeValueEnvironmentVariable(AstNode):
def __init__(self, name, value):
self.name = name
self.value = value
class AttributeValueForObject(AstNode):
def __init__(self, name, value):
self.name = name
self.value = value
class AttributeValueMessage(AstNode):
def __init__(self, message_id, value):
self.message_id = message_id
self.value = value
class AttributeValueNetworkNode(AstNode):
def __init__(self, name, value):
self.name = name
self.value = value
class AttributeValueSignal(AstNode):
def __init__(self, message_id, signal_name, value):
self.message_id = message_id
self.signal_name = signal_name
self.value = value
class AttributeValueTypeEnum(AstNode):
def __init__(self, e):
self.e = e
class AttributeValueTypeFloat(AstNode):
def __init__(self, a, b):
self.a = a
self.b = b
class AttributeValueTypeHex(AstNode):
def __init__(self, a, b):
self.a = a
self.b = b
class AttributeValueTypeInt(AstNode):
def __init__(self, a, b):
self.a = a
self.b = b
class AttributeValueTypeString(AstNode):
def __init__(self):
pass
class Comment(AstNode):
def __init__(self, value):
self.value = value
class Comments(AStNodeList):
def __init__(self, comments):
self.comments = comments
class CommentEnvironmentVariable(AstNode):
def __init__(self, name, value):
self.name = name
self.value = value
class CommentMessage(AstNode):
def __init__(self, message_id, value):
self.message_id = message_id
self.value = value
class CommentNetworkNode(AstNode):
def __init__(self, name, value):
self.name = name
self.value = value
class CommentSignal(AstNode):
def __init__(self, message_id, signal_name, value):
self.message_id = message_id
self.signal_name = signal_name
self.value = value
class BitTiming(AstNode):
def __init__(self, baud_rate, btr0, btr1):
super(BitTiming, self).__init__()
self.baud_rate = baud_rate
self.btr0 = btr0
self.btr1 = btr1
class DbcFile(AstNode):
def __init__(self,
version=None,
new_symbols=tuple(),
bit_timing=None,
nodes=None,
value_tables=None,
messages=None,
message_transmitters=None,
environment_variables=None,
environment_variables_data=None,
signal_types=None,
comments=None,
attribute_definitions=None,
sigtype_attr_list=None,
attribute_defaults=None,
attribute_values=None,
value_descriptions=None,
category_definitions=None,
categories=None,
filter=None,
signal_type_refs=None,
signal_groups=None,
signal_extended_value_type_list=None,
multiplexed_signals=None):
self.version = version
self.new_symbols = new_symbols
self.bit_timing = bit_timing
self.nodes = nodes
self.value_tables = value_tables
self.messages = messages
self.message_transmitters = message_transmitters
self.environment_variables = environment_variables
self.environment_variables_data = environment_variables_data
self.signal_types = signal_types
self.comments = comments
self.attribute_definitions = attribute_definitions
self.sigtype_attr_list = sigtype_attr_list
self.attribute_defaults = attribute_defaults
self.attribute_values = attribute_values
self.value_descriptions = value_descriptions
self.category_definitions = category_definitions
self.categories = categories
self.filter = filter
self.signal_type_refs = signal_type_refs
self.signal_groups = signal_groups
self.signal_extended_value_type_list = signal_extended_value_type_list
self.multiplexed_signals = multiplexed_signals
class Message(AstNode):
def __init__(self, identifier, name, size, transmitter, signals):
if signals is None:
signals = list()
self.identifier = identifier
self.name = name
self.size = size
self.transmitter = transmitter
self.signals = signals
class Messages(AStNodeList):
def get_by_name(self, name):
for e in self:
if e.name == name:
return e
raise KeyError
class MultiplexedSignal(AstNode):
def __init__(self, message_id, signal_name, multiplexer_switch_name, ranges):
self.message_id = message_id
self.signal_name = signal_name
self.multiplexer_switch_name = multiplexer_switch_name
self.ranges = ranges
class MultiplexedSignals(AStNodeList):
pass
class Nodes(AStNodeList):
def __init__(self, nodes):
self.nodes = nodes
class Signal(AstNode):
def __init__(self, name, multiplexer_indicator, start_bit, signal_size, byte_order, value_type, factor, offset,
minimum, maximum, unit, receiver):
self.name = name
self.multiplexer_indicator = multiplexer_indicator
self.start_bit = start_bit
self.signal_size = signal_size
self.byte_order = byte_order
self.value_type = value_type
self.factor = factor
self.offset = offset
self.minimum = minimum
self.maximum = maximum
self.unit = unit
self.receiver = receiver
class ValTable(AstNode):
def __init__(self, name, value_descriptions):
self.name = name
self.value_description = value_descriptions
class ValueDescription(AstNode):
def __init__(self, a, b):
self.a = a
self.b = b
class ValueDescriptionForEnvVar(AstNode):
def __init__(self, a_name, value_description):
self.a_name = a_name
self.value_description = value_description
class ValueDescriptionForSignal(AstNode):
def __init__(self, message_id, signal_name, value_description):
self.message_id = message_id
self.signal_name = signal_name
self.value_description = value_description
class ValueDescriptions(AStNodeList):
def __init__(self, value_descriptions):
self.value_descriptions = value_descriptions
class ValueTables(AStNodeList):
def __init__(self, value_tables):
self.value_tables = value_tables
class ValueTableDescription(AstNode):
def __init__(self, value, name):
self.value = value
self.name = name
class Version(str):
pass
|
/sauci_pydbc-0.1.2-py3-none-any.whl/pydbc/parser/node.py
| 0.755366 | 0.161254 |
node.py
|
pypi
|
import os
import ply.yacc as yacc
from .exception import FormatException
from .lexer import tokens as lex_tokens
from .node import *
class DbcParser(object):
tokens = lex_tokens
def __init__(self, string):
self._ast = list()
self._yacc = yacc.yacc(debug=True, module=self, optimize=True,
outputdir=os.path.dirname(os.path.realpath(__file__)))
self._yacc.parse(string)
@staticmethod
def p_error(p):
if p:
raise FormatException('invalid sequence at position ', p.lexpos, string=p.lexer.lexdata)
else:
raise FormatException('unvalid sequence in root node ', 0, string='')
@property
def ast(self):
return self._ast
def p_dbc(self, p):
"""dbc : empty
| dbc_optionals_list"""
kwargs = dict()
if p[1] is not None:
kwargs = dict(p[1])
self._ast = DbcFile(**kwargs)
@staticmethod
def p_dbc_optionals_list(p):
"""dbc_optionals_list : dbc_optionals
| dbc_optionals dbc_optionals_list"""
try:
p[0] = [p[1]] + p[2]
except IndexError:
p[0] = [p[1]]
@staticmethod
def p_dbc_optionals(p):
"""dbc_optionals : version
| new_symbols
| bit_timing
| nodes
| value_tables
| messages
| comments
| attribute_definitions
| attribute_defaults
| attribute_values
| value_descriptions
| multiplexed_signals"""
p[0] = p.slice[1].type, p[1]
@staticmethod
def p_multiplexed_signals(p):
"""multiplexed_signals : multiplexed_signals_list_optional"""
p[0] = MultiplexedSignals(p[1])
@staticmethod
def p_multiplexed_signals_list(p):
"""multiplexed_signals_list : multiplexed_signal
| multiplexed_signal multiplexed_signals_list"""
try:
p[0] = [p[1]] + p[2]
except IndexError:
p[0] = [p[1]]
@staticmethod
def p_multiplexed_signals_list_optional(p):
"""multiplexed_signals_list_optional : empty
| multiplexed_signals_list"""
p[0] = p[1]
@staticmethod
def p_multiplexed_signal(p):
"""multiplexed_signal : SG_MUL_VAL_ message_id multiplexed_signal_name multiplexor_switch_name multiplexor_value_ranges SEMICOLON"""
p[0] = MultiplexedSignal(p[2], p[3], p[4], p[5])
@staticmethod
def p_multiplexed_signal_name(p):
"""multiplexed_signal_name : IDENT"""
p[0] = p[1]
@staticmethod
def p_multiplexor_switch_name(p):
"""multiplexor_switch_name : IDENT"""
p[0] = p[1]
@staticmethod
def p_multiplexor_value_ranges(p):
"""multiplexor_value_ranges : value_range
| value_range COMMA multiplexor_value_ranges"""
try:
p[0] = [p[1]] + p[3]
except IndexError:
p[0] = [p[1]]
@staticmethod
def p_value_range(p):
"""value_range : RANGE"""
p[0] = p[1]
@staticmethod
def p_value_descriptions(p):
"""value_descriptions : value_descriptions_list_optional"""
p[0] = ValueDescriptions(p[1])
@staticmethod
def p_value_descriptions_list(p):
"""value_descriptions_list : value_descriptions_for
| value_descriptions_for value_descriptions_list"""
try:
p[0] = [p[1]] + p[2]
except IndexError:
p[0] = [p[1]]
@staticmethod
def p_value_descriptions_list_optional(p):
"""value_descriptions_list_optional : empty
| value_descriptions_list"""
p[0] = p[1]
@staticmethod
def p_value_descriptions_for(p):
"""value_descriptions_for : value_descriptions_for_signal
| value_descriptions_for_env_var"""
p[0] = p[1]
@staticmethod
def p_value_descriptions_for_signal(p):
"""value_descriptions_for_signal : VAL_ message_id signal_name value_description_list SEMICOLON"""
p[0] = ValueDescriptionForSignal(p[2], p[3], p[4])
@staticmethod
def p_value_descriptions_for_env_var(p):
"""value_descriptions_for_env_var : VAL_ env_var_aname value_description_list SEMICOLON"""
p[0] = ValueDescriptionForEnvVar(p[2], p[3])
@staticmethod
def p_value_description(p):
"""value_description : NUMERIC STRING"""
p[0] = ValueDescription(p[1], p[2])
@staticmethod
def p_value_description_list(p):
"""value_description_list : value_description
| value_description value_description_list"""
try:
p[0] = [p[1]] + p[2]
except IndexError:
p[0] = [p[1]]
@staticmethod
def p_attribute_values(p):
"""attribute_values : attribute_values_list_optional"""
p[0] = AttributeValues(p[1])
@staticmethod
def p_attribute_values_list(p):
"""attribute_values_list : attribute_value_for_object
| attribute_value_for_object attribute_values_list"""
try:
p[0] = [p[1]] + p[2]
except IndexError:
p[0] = [p[1]]
@staticmethod
def p_attribute_value_list_optional(p):
"""attribute_values_list_optional : empty
| attribute_values_list"""
p[0] = p[1]
@staticmethod
def p_env_var_aname(p):
"""env_var_aname : IDENT"""
p[0] = p[1]
@staticmethod
def p_attribute_value_for_object(p):
"""attribute_value_for_object : BA_ attribute_name attribute_value_ SEMICOLON"""
p[0] = AttributeValueForObject(p[2], p[3])
@staticmethod
def p_attribute_value_(p):
"""attribute_value_ : attribute_value
| attribute_value_network_node
| attribute_value_message
| attribute_value_signal
| attribute_value_environment_variable"""
p[0] = p[1]
@staticmethod
def p_attribute_value_network_node(p):
"""attribute_value_network_node : BU_ node_name attribute_value"""
p[0] = AttributeValueNetworkNode(p[2], p[3])
@staticmethod
def p_attribute_value_message(p):
"""attribute_value_message : BO_ message_id attribute_value"""
p[0] = AttributeValueMessage(p[2], p[3])
@staticmethod
def p_attribute_value_signal(p):
"""attribute_value_signal : SG_ message_id signal_name attribute_value"""
p[0] = AttributeValueSignal(p[2], p[3], p[4])
@staticmethod
def p_attribute_value_environment_variable(p):
"""attribute_value_environment_variable : EV_ env_var_name attribute_value"""
p[0] = AttributeValueEnvironmentVariable(p[2], p[3])
@staticmethod
def p_node_name(p):
"""node_name : IDENT"""
p[0] = p[1]
@staticmethod
def p_message_id(p):
"""message_id : NUMERIC"""
p[0] = p[1]
@staticmethod
def p_signal_name(p):
"""signal_name : IDENT"""
p[0] = p[1]
@staticmethod
def p_env_var_name(p):
"""env_var_name : IDENT"""
p[0] = p[1]
@staticmethod
def p_attribute_defaults(p):
"""attribute_defaults : attribute_defaults_list"""
p[0] = AttributeDefaults(p[1])
@staticmethod
def p_attribute_defaults_list(p):
"""attribute_defaults_list : attribute_default
| attribute_default attribute_defaults_list"""
try:
p[0] = [p[1]] + p[2]
except IndexError:
p[0] = [p[1]]
@staticmethod
def p_attribute_default(p):
"""attribute_default : BA_DEF_DEF_ attribute_name attribute_value SEMICOLON"""
p[0] = AttributeDefault(p[2], p[3])
@staticmethod
def p_attribute_value(p):
"""attribute_value : NUMERIC
| STRING"""
p[0] = AttributeValue(p[1])
@staticmethod
def p_attribute_definitions(p):
"""attribute_definitions : attribute_definitions_list"""
p[0] = AttributeDefinitions(p[1])
@staticmethod
def p_attribute_definitions_list(p):
"""attribute_definitions_list : attribute_definition
| attribute_definition attribute_definitions_list"""
try:
p[0] = [p[1]] + p[2]
except IndexError:
p[0] = [p[1]]
@staticmethod
def p_attribute_definition(p):
"""attribute_definition : BA_DEF_ object_type attribute_name attribute_value_type SEMICOLON"""
p[0] = AttributeDefinition(p[2], p[3], p[4])
@staticmethod
def p_opbject_type(p):
"""object_type : empty
| BU_
| BO_
| SG_
| EV_"""
p[0] = p[1]
@staticmethod
def p_attribute_value_type(p):
"""attribute_value_type : attribute_value_type_int
| attribute_value_type_hex
| attribute_value_type_float
| attribute_value_type_string
| attribute_value_type_enum"""
p[0] = p[1]
@staticmethod
def p_attribute_value_type_int(p):
"""attribute_value_type_int : INT NUMERIC NUMERIC"""
p[0] = AttributeValueTypeInt(p[2], p[3])
@staticmethod
def p_attribute_value_type_hex(p):
"""attribute_value_type_hex : HEX NUMERIC NUMERIC"""
p[0] = AttributeValueTypeHex(p[2], p[3])
@staticmethod
def p_attribute_value_type_float(p):
"""attribute_value_type_float : FLOAT NUMERIC NUMERIC"""
p[0] = AttributeValueTypeFloat(p[2], p[3])
@staticmethod
def p_attribute_value_type_string(p):
"""attribute_value_type_string : STRING"""
p[0] = AttributeValueTypeString()
@staticmethod
def p_attribute_value_type_enum(p):
"""attribute_value_type_enum : ENUM comma_separated_char_string_list"""
p[0] = AttributeValueTypeEnum(p[2])
@staticmethod
def p_comma_separated_char_string_list(p):
"""comma_separated_char_string_list : STRING
| STRING COMMA comma_separated_char_string_list"""
try:
p[0] = [p[1]] + p[3]
except IndexError:
p[0] = [p[1]]
@staticmethod
def p_attribute_name(p):
"""attribute_name : STRING"""
# TODO : check the format here according to section 12.1...
p[0] = p[1]
@staticmethod
def p_comments(p):
"""comments : comment_list_optional"""
p[0] = Comments(p[1])
@staticmethod
def p_comment(p):
"""comment : CM_ comment_definition SEMICOLON"""
p[0] = p[2]
@staticmethod
def p_comment_definition(p):
"""comment_definition : char_string
| comment_network_node
| comment_message
| comment_signal
| comment_environment_variable"""
p[0] = p[1]
@staticmethod
def p_char_string(p):
"""char_string : STRING"""
p[0] = p[1]
@staticmethod
def p_comment_network_node(p):
"""comment_network_node : BU_ node_name char_string"""
p[0] = CommentNetworkNode(p[2], p[3])
@staticmethod
def p_comment_message(p):
"""comment_message : BO_ message_id char_string"""
p[0] = CommentMessage(p[2], p[3])
@staticmethod
def p_comment_signal(p):
"""comment_signal : SG_ message_id signal_name char_string"""
p[0] = CommentSignal(p[2], p[3], p[4])
@staticmethod
def p_comment_environment_variable(p):
"""comment_environment_variable : EV_ env_var_name char_string"""
p[0] = CommentEnvironmentVariable()
@staticmethod
def p_comment_list(p):
"""comment_list : comment
| comment comment_list"""
try:
p[0] = [p[1]] + p[2]
except IndexError:
p[0] = [p[1]]
@staticmethod
def p_comment_list_optional(p):
"""comment_list_optional : empty
| comment_list"""
p[0] = p[1]
@staticmethod
def p_messages(p):
"""messages : empty
| message_list"""
p[0] = Messages(p[1])
@staticmethod
def p_message(p):
"""message : BO_ message_id message_name COLON message_size transmitter signal_list_optional"""
p[0] = Message(p[2], p[3], p[5], p[6], p[7])
@staticmethod
def p_message_size(p):
"""message_size : NUMERIC"""
p[0] = p[1]
@staticmethod
def p_transmitter(p):
"""transmitter : IDENT"""
p[0] = p[1]
@staticmethod
def p_message_list(p):
"""message_list : message
| message message_list"""
try:
p[0] = [p[1]] + p[2]
except IndexError:
p[0] = [p[1]]
@staticmethod
def p_message_name(p):
"""message_name : IDENT"""
p[0] = p[1]
@staticmethod
def p_signal(p):
"""signal : SG_ IDENT multiplexer_indicator COLON NUMERIC VERTICAL_BAR NUMERIC AROBASE NUMERIC value_type PARENTHESE_OPEN NUMERIC COMMA NUMERIC PARENTHESE_CLOSE BRACE_OPEN NUMERIC VERTICAL_BAR NUMERIC BRACE_CLOSE STRING nodes_list"""
p[0] = Signal(p[2], p[3], p[5], p[7], p[9], p[10], p[12], p[14], p[17], p[19], p[21], p[22])
@staticmethod
def p_signal_list(p):
"""signal_list : signal
| signal signal_list"""
try:
p[0] = [p[1]] + p[2]
except IndexError:
p[0] = [p[1]]
@staticmethod
def p_signal_list_optional(p):
"""signal_list_optional : empty
| signal_list"""
p[0] = p[1]
@staticmethod
def p_value_type(p):
"""value_type : PLUS
| MINUS"""
p[0] = p[1]
@staticmethod
def p_multiplexer_indicator(p):
"""multiplexer_indicator : empty
| IDENT"""
# TODO: handle format specified at page 5...
p[0] = p[1]
@staticmethod
def p_value_tables(p):
"""value_tables : value_table_list_optional"""
p[0] = ValueTables(p[1])
@staticmethod
def p_value_table_list(p):
"""value_table_list : value_table
| value_table value_table_list"""
try:
p[0] = [p[1]] + p[2]
except IndexError:
p[0] = [p[1]]
@staticmethod
def p_value_table_list_optional(p):
"""value_table_list_optional : empty
| value_table_list"""
p[0] = p[1]
@staticmethod
def p_value_table(p):
"""value_table : VAL_TABLE_ IDENT value_table_description_list_optional SEMICOLON"""
p[0] = ValTable(p[2], p[3])
@staticmethod
def p_value_table_description(p):
"""value_table_description : NUMERIC STRING"""
p[0] = ValueTableDescription(p[1], p[2])
@staticmethod
def p_value_table_description_list(p):
"""value_table_description_list : value_table_description
| value_table_description value_table_description_list"""
try:
p[0] = [p[1]] + p[2]
except IndexError:
p[0] = [p[1]]
@staticmethod
def p_value_table_description_list_optional(p):
"""value_table_description_list_optional : empty
| value_table_description_list"""
p[0] = p[1]
@staticmethod
def p_version(p):
"""version : VERSION STRING"""
p[0] = Version(p[2])
@staticmethod
def p_new_symbols(p):
"""new_symbols : NS_ COLON new_symbols_list_optional"""
try:
p[0] = p[3]
except IndexError:
p[0] = None
@staticmethod
def p_new_symbols_value(p):
"""
new_symbols_value : CM_
| BA_DEF_
| BA_
| VAL_
| CAT_DEF_
| CAT_
| FILTER_
| BA_DEF_DEF_
| EV_DATA_
| ENVVAR_DATA_
| SGTYPE_
| SGTYPE_VAL_
| BA_DEF_SGTYPE_
| BA_SGTYPE_
| SIG_TYPE_REF_
| VAL_TABLE_
| SIG_GROUP_
| SIG_VALTYPE_
| SIGTYPE_VALTYPE_
| BO_TX_BU_
| BA_DEF_REL_
| BA_REL_
| BA_DEF_DEF_REL_
| BU_SG_REL_
| BU_EV_REL_
| BU_BO_REL_
| NS_DESC_
| FILTER
| SG_MUL_VAL_
"""
p[0] = p[1]
@staticmethod
def p_new_symbols_list(p):
"""new_symbols_list : new_symbols_value
| new_symbols_value new_symbols_list"""
try:
p[0] = [p[1]] + p[2]
except IndexError:
p[0] = [p[1]]
@staticmethod
def p_new_symbols_list_optional(p):
"""
new_symbols_list_optional : empty
| new_symbols_list
"""
p[0] = p[1]
@staticmethod
def p_bit_timimg(p):
"""bit_timing : BS_ COLON bit_timing_optional"""
p[0] = p[3]
@staticmethod
def p_bit_timing_optional(p):
"""bit_timing_optional : empty
| NUMERIC COLON NUMERIC COMMA NUMERIC"""
try:
p[0] = BitTiming(p[1], p[3], p[5])
except IndexError:
p[0] = None
@staticmethod
def p_nodes(p):
"""nodes : BU_ COLON nodes_list_optional"""
p[0] = Nodes(p[3])
@staticmethod
def p_nodes_list(p):
"""nodes_list : IDENT
| IDENT nodes_list"""
try:
p[0] = [p[1]] + p[2]
except IndexError:
p[0] = [p[1]]
@staticmethod
def p_nodes_list_optional(p):
"""nodes_list_optional : empty
| nodes_list"""
p[0] = p[1]
@staticmethod
def p_empty(p):
"""empty :"""
p[0] = None
|
/sauci_pydbc-0.1.2-py3-none-any.whl/pydbc/parser/scanner.py
| 0.489992 | 0.311231 |
scanner.py
|
pypi
|
from elftools.common.exceptions import DWARFError
from elftools.dwarf.descriptions import describe_form_class
from elftools.elf.elffile import ELFFile
from elftools.elf.sections import Symbol as ElfSymbol
from elftools.elf.sections import SymbolTableSection, SUNWSyminfoTableSection
class Address(int):
"""
represents an address.
"""
def __str__(self):
return '{0}0x{1:08X}'.format('-' if self < 0 else '', abs(self))
class Symbol(ElfSymbol):
"""
represents a symbol.
"""
@property
def size(self):
"""
returns the size of the symbol.
:return: size in [byte]
:rtype: int
"""
return self.entry.st_size
@property
def address(self):
"""
returns the address of the symbol.
:return: address
:rtype: int
"""
return self.entry.st_value
class AbiInfo(object):
"""
represents the header of the file.
"""
def __init__(self, e_machine=None, e_version=None, *args, **kwargs):
self.machine = e_machine
self.version = e_version
def get_machine(self):
return self._machine
def set_machine(self, value):
self._machine = value
def get_version(self):
return self._version
def set_version(self, value):
self._version = value
machine = property(fget=get_machine, fset=set_machine)
version = property(fget=get_version, fset=set_version)
class ElfException(Exception):
pass
class ElfFile(ELFFile):
"""
"""
def __init__(self, path):
fp = open(path, 'rb')
super(ElfFile, self).__init__(stream=fp)
self.path = self.stream.name
self.endianness = self.little_endian
self._symbols = dict()
for section in self.iter_sections():
if isinstance(section, SymbolTableSection) or isinstance(section, SUNWSyminfoTableSection):
for sym in section.iter_symbols():
self._symbols[sym.name] = sym.entry
@property
def abi_info(self):
"""
returns the ABI information.
:return: ABI information
:rtype: AbiInfo
"""
return AbiInfo(**dict((k, self.header[k]) for k in ('e_machine', 'e_version')))
@property
def base_address(self):
"""
returns the address of the first instruction in the ELF file.
:return: first instruction's address
:rtype: Address
"""
return Address(self.header.e_entry)
@property
def binary_address(self):
"""
returns the lowest address of the binary from the ELF file.
:return: lowest binary address
:rtype: Address
"""
address = Address(-1)
for segment in self.iter_segments():
if segment['p_type'] == 'PT_LOAD':
if (address == -1) or (segment.header['p_paddr'] < address):
address = segment.header['p_paddr']
return address
@property
def binary(self):
"""
returns the binary from the ELF file.
:return: binary data
:rtype: bytearray
"""
data = b''
for segment in self.iter_segments():
if segment['p_type'] == 'PT_LOAD':
data += segment.data()
return data
@property
def endianness(self):
"""
endianness of the binary.
:getter: returns the endianness of the binary
:setter: sets the endianness of the binary
:type: str
"""
return self._endianness
@endianness.setter
def endianness(self, value):
if value:
self._endianness = 'little'
else:
self._endianness = 'big'
def files(self):
"""
returns an iterator containing all source files in the ELF file.
:return: list of file name
:rtype: list
"""
return (k for k, v in self._symbols.items() if hasattr(v, 'st_info') and v.st_info.type == 'STT_FILE')
@property
def path(self):
"""
returns the full path of the elf file.
:return: path
:rtype: str
"""
return self._path
@path.setter
def path(self, value):
self._path = value
def symbols(self):
"""
returns a list of all symbols available in the ELF file.
:return: list of symbols
:rtype: list
"""
return [k for k, v in self._symbols.items() if
hasattr(v, 'st_info') and v.st_info.type == 'STT_OBJECT']
def get_symbol(self, name):
"""
returns a Symbol object containing the properties of the symbol named 'name' in the ELF file.
:param name: symbol name
:type name: str
:return: symbol
:rtype: Symbol
"""
if name in self._symbols.keys():
return Symbol(self._symbols[name], name)
raise ElfException('symbol ' + str(name) + ' not found')
def get_source_info(self, address):
"""
returns the full path to the source file containing the code for the specified address, as well as the line
number (1-based), and the function name (if in a function).
:param address: requested address
:type address: int
:return: a tuple containing the source file path, the source file line (1-based) and the function name (None if
not in a function)
:rtype: tuple
"""
file_path, line, func_name = None, -1, None
line = -1
func_name = None
dwarf_info = self.get_dwarf_info()
for CU in dwarf_info.iter_CUs():
try:
line_program = dwarf_info.line_program_for_CU(CU)
except DWARFError:
continue
if line_program is None:
continue
prev_state = None
if line == -1:
for entry in line_program.get_entries():
if entry.state is None:
continue
if prev_state and prev_state.address <= address < entry.state.address:
file_path = CU.get_top_DIE().get_full_path()
line = prev_state.line
if entry.state.end_sequence:
prev_state = None
else:
prev_state = entry.state
if func_name is None:
for DIE in CU.iter_DIEs():
try:
if DIE.tag == 'DW_TAG_subprogram':
low_pc = DIE.attributes['DW_AT_low_pc'].value
high_pc_attr = DIE.attributes['DW_AT_high_pc']
high_pc_attr_class = describe_form_class(high_pc_attr.form)
if high_pc_attr_class == 'address':
high_pc = high_pc_attr.value
elif high_pc_attr_class == 'constant':
high_pc = low_pc + high_pc_attr.value
else:
continue
if low_pc <= address < high_pc:
func_name = DIE.attributes['DW_AT_name'].value.decode()
break
except KeyError:
continue
if func_name is not None:
break
return file_path, line - 1 if line != -1 else -1, func_name
|
/sauci-pyelf-0.1.2.tar.gz/sauci-pyelf-0.1.2/src/pyelf/pyelf.py
| 0.828835 | 0.356895 |
pyelf.py
|
pypi
|
from io import BytesIO
from zipfile import ZipFile
import numpy as np
import requests
ZENODO_PREFIX = 'https://zenodo.org/record'
def is_downloadable(url):
"""Does the url contain a downloadable resource
Solution taken from:
https://www.codementor.io/@aviaryan/downloading-files-from-urls-in-python-77q3bs0un
"""
h = requests.head(url, allow_redirects=True)
header = h.headers
content_type = header.get('content-type')
if 'text' in content_type.lower():
return False
if 'html' in content_type.lower():
return False
return True
def zenodo_urls(doi, file_names):
"""Return the urls of file names according to https://zenodo.org/
Parameters
----------
doi: str
The DOI of the data set.
file_names: str or list of str
The file names for which the urls should be obtained.
"""
file_names = np.atleast_1d(file_names)
reg = doi.split('.')[-1]
urls = []
for file_name in file_names:
url = ZENODO_PREFIX + ('/%s/files/%s' % (reg, file_name))
assert is_downloadable(url), 'URL %s is not downloadable' % (url)
urls.append(url)
return urls
def extract_files_from_urlzip(zip_url, file_names):
"""Extract a list of files from a zip file url
Parameters
----------
zip_url: str
The zip file url.
file_names: str or list of str
The file names to be extracted.
"""
file_names = np.atleast_1d(file_names)
assert is_downloadable(zip_url), 'URL %s is not downloadable' % (zip_url)
r = requests.get(zip_url)
zipfile = ZipFile(BytesIO(r.content))
success = [False]*len(file_names)
count = 0
for fname in zipfile.namelist():
for file_name in file_names:
if file_name in fname:
zipfile.extract(fname, '.')
success[count] = True
count += 1
if not sum(success) == len(file_names):
for i, test in enumerate(success):
if not test:
raise RuntimeError('Could not extract file: %s' % file_names[i])
def files_from_urls(urls, file_names=None):
"""Download files from given urls
Parameters
----------
urls: list of str
The urls of the files to be downloaded.
file_names: str or list of str, optional
The file names corresponding to the urls, by default it will be
everything after the last "/" of the urls.
"""
if file_names is None:
file_names = [url.split('/')[-1] for url in urls]
else:
file_names = np.atleast_1d(file_names)
for url, file_name in zip(urls, file_names):
assert is_downloadable(url), 'URL %s is not downloadable' % (url)
r = requests.get(url, allow_redirects=True)
open('%s' % file_name, 'wb').write(r.content)
|
/saullo_castro_tenure_review-1.0.0-py3-none-any.whl/saullo_castro_tenure_review/dataset.py
| 0.704364 | 0.350922 |
dataset.py
|
pypi
|
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces = 50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval*i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2,sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian saurabhrajratan_distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev)
|
/saurabhrajratan_distributions-0.1.tar.gz/saurabhrajratan_distributions-0.1/saurabhrajratan_distributions/Gaussiandistribution.py
| 0.670177 | 0.850096 |
Gaussiandistribution.py
|
pypi
|
<h1 align="center">Sauron Rule engine - One engine to rule them all </h1>
<p>
<img src="https://img.shields.io/badge/version-0.1-blue.svg?cacheSeconds=2592000" />
<img src="https://circleci.com/gh/jlugao/sauron-rule-engine/tree/master.svg?style=svg" />
<a href='https://coveralls.io/github/jlugao/sauron-rule-engine?branch=master'><img src='https://coveralls.io/repos/github/jlugao/sauron-rule-engine/badge.svg?branch=master&service=github' alt='Coverage Status' /></a>
<img alt="GitHub" src="https://img.shields.io/github/license/jlugao/sauron-rule-engine.svg?style=plastic">
<a href="https://twitter.com/joaovoce">
<img alt="Twitter: joaovoce" src="https://img.shields.io/twitter/follow/joaovoce.svg?style=social" target="_blank" />
</a>
</p>
[](https://dev.azure.com/jlugao/Sauron%20Engine/_build/latest?definitionId=1&branchName=master)
> A simple rule engine to be used in python, it is based on simple rules and actions that can be chained with each other. The idea is to run the rule processor on events and have it mutate data or trigger actions
Heavily inspired on FastAPI. We use type annotations in our engine so that we can export data to other systems or frontends to convey what conditions and actions are possible using that engine
## Install
```sh
pip install sauron-rule-engine
```
## Concepts
Sauron rule engine is based on custom functions that can be called by a rule.
### Condition
Condition to be satisfied in order for the actions to run, they can take some or no parameters at all
Multiple conditions can be chained in order to create more complex ones, currently all chained conditions must be satisfied
### Action
An Action is the intented result. Usually they are there to mutate state or trigger/schedule other kinds of actions in your system. Actions can also be chained and will run in order.
### Rule
A Rule is a dict or json string containing the conditions and actions and the arguments they should be run with. Usually those rules will be built by a frontend to match complex and adaptable business rules from your customer
## Use it
A simple example of the usage
```python
from sauron.rule_engine import RuleEngine
engine = RuleEngine()
@engine.condition("First Condition")
def first_condition(session,lower_number: int = 10, greater_number: int = 20) -> bool:
"""
Checks if first number is lower than the first
- lower_number: Number expected to be low
- higher_number: Number expected to be high
"""
return lower_number < greater_number
@engine.condition()
def second_condition(session):
"""
Takes no argument and always returns True
"""
return True
@engine.action("The Action")
def print_the_equation(
session, lower_number: int = 10, greater_number: int = 20
) -> None:
"""
Prints a statement Asserting that the first number is lower than the second number
- lower_number: Number expected to be low
- higher_number: Number expected to be high
"""
print(f"{lower_number} < {greater_number}")
rule = {
"conditions": [
{
"name": "first_condition",
"args": {"lower_number": 3, "greater_number": 10},
}
],
"actions": [
{
"name": "print_the_equation",
"args": {"lower_number": 3, "greater_number": 10},
}
],
}
engine.run(rule)
```
## Choices Fields
Choices fields are supported through python's built-in Enum type. Example:
```python
from sauron.rule_engine import RuleEngine
from enum import Enum
class Color(str, Enum):
red = "R"
green = "G"
blue = "B"
@engine.condition("is it red?")
def is_red(session, color: Color) -> bool:
"""
Checks if the color is red
"""
return color == color.red
```
## Export Conditions and Actions
You can use the function export_metadata to export your data in a dict or as a json string (just pass `json=True`). Here is an Example and the output:
```python
from sauron_rule_engine.rule_engine import RuleEngine
from enum import Enum
engine = RuleEngine()
@engine.condition("First Condition")
def first_condition(lower_number: int = 10, greater_number: int = 20) -> bool:
"""
Checks if first number is lower than the first
- lower_number: Number expected to be low
- higher_number: Number expected to be high
"""
return lower_number < greater_number
@engine.condition()
def second_condition():
"""
Takes no argument and always returns True
"""
return True
@engine.action("The Action")
def print_the_equation(
lower_number: int = 10, greater_number: int = 20
) -> None:
"""
Prints a statement Asserting that the first number is lower than the second number
- lower_number: Number expected to be low
- higher_number: Number expected to be high
"""
print(f"{lower_number} < {greater_number}")
class Color(str, Enum):
red = "R"
green = "G"
blue = "B"
@engine.condition("is it red?")
def is_red(color: Color) -> bool:
"""
Checks if the color is red
"""
return color == color.red
metadata = engine.export_metadata(json=True)
print(metadata)
```
Results in the following json to be served to your frontend:
```json
{
"actions": {
"print_the_equation": {
"args": {
"lower_number": { "default": 10, "type": "int", "choices": null },
"greater_number": { "default": 20, "type": "int", "choices": null }
},
"doc": "Prints a statement Asserting that the first number is lower than the second number\n- lower_number: Number expected to be low\n- higher_number: Number expected to be high",
"name": "The Action"
}
},
"conditions": {
"first_condition": {
"args": {
"lower_number": { "default": 10, "type": "int", "choices": null },
"greater_number": { "default": 20, "type": "int", "choices": null }
},
"doc": "Checks if first number is lower than the first\n- lower_number: Number expected to be low\n- higher_number: Number expected to be high",
"name": "First Condition"
},
"second_condition": {
"args": {},
"doc": "Takes no argument and always returns True",
"name": "second_condition"
},
"is_red": {
"args": {
"color": {
"default": null,
"type": "Color",
"choices": ["red", "green", "blue"]
}
},
"doc": "Checks if the color is red",
"name": "is it red?"
}
}
}
```
## Sessions
Results are stored in a result stack inside the session, so that jobs can share data with each other.
## More Features coming to town
- Support pydantic types
- Support for complex types with hints to the frontend (like a range for an int type)
## Contribute
- We need all the help we can get. Please read [CONTRIBUTE.md](CONTRIBUTING.md) for instructions
## Author
👤 **João Ricardo Lhullier Lugão**
- Twitter: [@joaovoce](https://twitter.com/joaovoce)
- Github: [@jlugao](https://github.com/jlugao)
## Show your support
Give a ⭐️ if this project helped you!
---
_This README was generated with ❤️ by [readme-md-generator](https://github.com/kefranabg/readme-md-generator)_
|
/sauron-rule-engine-1.0.4.tar.gz/sauron-rule-engine-1.0.4/README.md
| 0.841533 | 0.899696 |
README.md
|
pypi
|
from typing import List, Dict, Callable, Union, Any, cast
from .models import RuleModel, ConditionModel, ActionModel
import json as json_lib
from ruamel.yaml import YAML
import inspect
from enum import Enum
yaml = YAML(typ="safe")
class RuleEngine:
class Meta:
job_types = ["Conditions", "Actions"]
def __init__(self, *args, **kwargs):
self.conditions: Dict[str, Callable] = {}
self.actions: Dict[str, Callable] = {}
self.metadata: Dict[str, Callable] = {}
return super().__init__(*args, **kwargs)
@staticmethod
def get_param_info(param):
"""
Get Type, Choices and Defaults of the parameter
"""
annotation = param.annotation
name = annotation.__name__
choices = None
if Enum in annotation.__mro__:
choices = [choice for choice in annotation.__members__]
defaults = param.default
if defaults is param.empty:
defaults = None
return name, choices, defaults
@classmethod
def __get_function_metadata(
cls, input_function: Callable
) -> Dict[str, Any]:
"""
Metadata about arguments documentation and the function itself
"""
signature: inspect.Signature = inspect.signature(input_function)
arguments_metadata: Dict[str, Dict[str, Any]] = {}
for key, parameter in signature.parameters.items():
arg_type, arg_choices, arg_defaults = cls.get_param_info(parameter)
arguments_metadata[key] = {
"default": arg_defaults,
"type": arg_type,
"choices": arg_choices,
}
metadata = {
"args": arguments_metadata,
"doc": inspect.getdoc(input_function),
"name": input_function.__name__,
}
return metadata
def get_metadata(self, input_function: Dict[str, Any]) -> Dict[str, Any]:
metadata = self.__get_function_metadata(input_function["function"])
verbose_name = input_function.get("verbose_name", None)
if verbose_name:
metadata["name"] = verbose_name
return metadata
def __add_condition(
self, function: Callable, verbose_name: str = None
) -> None:
self.conditions[function.__name__] = {
"function": function,
"verbose_name": verbose_name,
}
def __add_action(
self, function: Callable, verbose_name: str = None
) -> None:
self.actions[function.__name__] = {
"function": function,
"verbose_name": verbose_name,
}
def condition(self, *args, **kwargs) -> Callable:
"""
Decorator so rules can be called as follows:
@obj.condition
def my_function():
return None
"""
def decorator(function) -> Callable:
verbose_name = kwargs.get("verbose_name", None)
if args:
verbose_name = args[0]
self.__add_condition(function, verbose_name)
return function
return decorator
def action(self, *args, **kwargs) -> Callable:
"""
Decorator so actions can be called as follows:
@obj.action
def my_function():
return None
"""
def decorator(function) -> Callable:
verbose_name = kwargs.get("verbose_name", None)
if args:
verbose_name = args[0]
self.__add_action(function, verbose_name)
return function
return decorator
def parse_rule(
self, untreated_rule: Union[Dict[Any, Any], str]
) -> RuleModel:
"""
Rules are received either as json or as dict, parse and return pydantic model
"""
rule: dict = {}
if type(untreated_rule) == str:
untreated_rule = cast(str, untreated_rule)
rule = yaml.load(untreated_rule)
else:
rule = cast(dict, untreated_rule)
return RuleModel(**rule)
def __apply_conditions(self, conditions: List[ConditionModel]) -> bool:
"""
Auxiliary function to apply rules, currently the only option is to run
on AND mode, that means that any False condition will result in False
"""
should_continue: bool = True
for condition in conditions:
if condition.args:
should_continue &= self.conditions[condition.name]["function"](
**condition.args
)
else:
should_continue &= self.conditions[condition.name][
"function"
]()
return should_continue
def __run_actions(self, actions: List[ActionModel]) -> bool:
"""
Actions are applied sequentially
"""
for action in actions:
if action.args:
self.actions[action.name]["function"](**action.args)
else:
self.actions[action.name]["function"]()
return True
def run(self, untreated_rule: Union[Dict[str, Any], str]) -> bool:
"""
Run rule engine:
- rule - Json string or dict on the right format containing
a rule, it specifies which conditions should be checked and
which actions should be executed if conditions are met
"""
rule: RuleModel = self.parse_rule(untreated_rule)
should_continue: bool = self.__apply_conditions(rule.conditions)
if should_continue:
self.__run_actions(rule.actions)
return True
return False
def export_generic(self, generic: Dict[str, Any]) -> Dict[str, Any]:
result = {}
for name, item in generic.items():
result[name] = self.get_metadata(item)
return result
def export_conditions(self) -> Dict[str, Any]:
return self.export_generic(self.conditions)
def export_actions(self) -> Dict[str, Any]:
return self.export_generic(self.actions)
def export_metadata(
self, json: bool = False
) -> Union[str, Dict[str, Any]]:
metadata = {
"actions": self.export_actions(),
"conditions": self.export_conditions(),
}
if json:
return json_lib.dumps(metadata)
else:
return metadata
|
/sauron-rule-engine-1.0.4.tar.gz/sauron-rule-engine-1.0.4/sauron_rule_engine/rule_engine.py
| 0.841923 | 0.241299 |
rule_engine.py
|
pypi
|
from collections import OrderedDict
from typing import List, Dict, Callable, Union, Any, Type, Tuple
from .models import JobModel
from .parsers import DefaultParser
from .exporters import DefaultExporter
class Engine:
job_model_class: Type[JobModel] = JobModel
parser_class: Type[DefaultParser] = DefaultParser
exporter_class: Type[DefaultExporter] = DefaultExporter
parsed_rule: List[JobModel] = []
session: Dict[str, Any] = {}
def __init__(
self,
context: Dict[str, Any] = None,
job_model: Type[JobModel] = None,
parser_class: Type[DefaultParser] = None,
exporter_class: Type[DefaultExporter] = None,
):
"""
- Sessions can be initialized with a context provided by the user
- Job Model and Parser can be changed
"""
if context:
self.session = context
if job_model:
self.job_model_class = job_model
if parser_class:
self.parser_class = parser_class
if exporter_class:
self.exporter_class = exporter_class
self.callables_collected: "OrderedDict[str, Dict[str, Any]]" = OrderedDict()
def _add_callable(self, function: Callable, verbose_name: str, job_type: str = "job"):
self.callables_collected[function.__name__] = {
"function": function,
"verbose_name": verbose_name,
"type": job_type
}
def job(self, *args, **kwargs):
"""
Decorator so jobs can be called as follows:
@obj.job
def my_function():
return None
"""
def decorator(function: Callable):
verbose_name: str = kwargs.get("verbose_name", None)
if args:
verbose_name = args[0]
self._add_callable(function, verbose_name)
return function
return decorator
def apply_job_call(
self, job: JobModel, session: Dict[str, Any]
) -> Tuple[Dict[str, Any], Any]:
target_func: Callable = self.callables_collected.get(job.name).get(
"function"
)
if job.args:
result = target_func(session=session, **job.args)
else:
result = target_func(session=session)
# append result of function called into session
results = session.get("results", None)
if not results:
session["results"] = []
session["results"].append({"job": job.name, "return": result})
self.session = session
return (session, result)
def parse(self, unparsed_rule: Union[str, Dict[str, Any]]):
"""
Parses rules
"""
parser: DefaultParser = self.parser_class()
parsed_rule: List[JobModel] = parser.parse(unparsed_rule)
self.parsed_rule = parsed_rule
return parsed_rule
def run(
self, rule: Union[str, Dict[str, Any]], session: Dict[str, Any] = None
):
"""
Executes each job passing the current session to them
"""
if not session:
session = self.session
for job in self.parse(rule):
session, result = self.apply_job_call(job, session)
if not result:
break
def export_metadata(self, fmt: str = "dict"):
exporter = self.exporter_class()
return exporter.export_jobs(self.callables_collected, fmt=fmt)
|
/sauron-rule-engine-1.0.4.tar.gz/sauron-rule-engine-1.0.4/sauron/engine.py
| 0.855896 | 0.241735 |
engine.py
|
pypi
|
import json
from json.decoder import JSONDecodeError
from typing import List, Type, Union, Dict, Any
from sauron.models import JobModel
from ruamel.yaml import YAML
class DefaultParser:
single_model: Type[JobModel] = JobModel
def __init__(self):
self.yaml = YAML(typ="safe")
def _parse_single_job(self, job_dict) -> JobModel:
"""
Method that know how to parse a single job dictionary
"""
return self.single_model(**job_dict)
def _parse_jobs_from_list(self, jobs_input) -> List[JobModel]:
"""
Method that know how to parse a list for jobs
"""
parsed_jobs: List = []
for raw_job in jobs_input:
current_job: JobModel = self._parse_single_job(raw_job)
parsed_jobs.append(current_job)
return parsed_jobs
def _parse_jobs_from_string(self, jobs_input) -> List[JobModel]:
"""
Method that know how to parse a list for jobs described by a
json-string with the list of jobs
"""
try:
jobs: list = self.yaml.load(jobs_input)
except JSONDecodeError:
raise ValueError("jobs param is not a valid json string")
else:
return self._parse_jobs_from_list(jobs)
def parse(self, jobs_input) -> List[JobModel]:
"""
Main method called to parse any jobs
"""
jobs_list_data: List[JobModel] = []
if isinstance(jobs_input, str):
jobs_list_data = self._parse_jobs_from_string(jobs_input)
elif isinstance(jobs_input, list):
# jobs_input is a python list
jobs_list_data = self._parse_jobs_from_list(jobs_input)
else:
raise ValueError("jobs param must be a list or json-string")
return jobs_list_data
class RuleEngineParser(DefaultParser):
single_model: Type[JobModel] = JobModel
def __init__(self):
self.yaml = YAML(typ="safe")
def _parse_jobs_from_string(self, jobs_input: str) -> List[JobModel]:
"""
Method that know how to parse a list for jobs described by a
json-string with the list of jobs
"""
try:
decoded_jobs: dict = self.yaml.load(jobs_input)
jobs: list = decoded_jobs["conditions"] + decoded_jobs["actions"]
except JSONDecodeError:
raise ValueError("jobs param is not a valid json string")
else:
return self._parse_jobs_from_list(jobs)
def parse(self, jobs_input: Union[List, str]) -> List[JobModel]:
"""
Main method called to parse any jobs
"""
jobs_list_data: List[JobModel] = []
if isinstance(jobs_input, str):
jobs_list_data = self._parse_jobs_from_string(jobs_input)
elif isinstance(jobs_input, list):
# jobs_input is a python list
jobs_list_data = self._parse_jobs_from_list(jobs_input)
else:
raise ValueError("jobs param must be a list or json-string")
return jobs_list_data
|
/sauron-rule-engine-1.0.4.tar.gz/sauron-rule-engine-1.0.4/sauron/parsers.py
| 0.736874 | 0.253301 |
parsers.py
|
pypi
|
import argparse
import logging.handlers
import pathlib
import re
import sys
from typing import List, Tuple, Optional
from logging import LogRecord
from datetime import datetime # This clearly never leads to confusion...
from os import isatty
import rich
from pygments.styles.monokai import MonokaiStyle
from pygments.token import (
Comment,
Error,
Keyword,
Name,
Number,
Operator,
String,
Token,
)
from rich._log_render import LogRender # DEP-WARN
from rich.console import render_group
from rich.containers import Renderables
from rich.highlighter import NullHighlighter
from rich.logging import RichHandler
from rich.style import Style
from rich.syntax import ANSISyntaxTheme, PygmentsSyntaxTheme
from rich.table import Table
from rich.text import Text
from rich.theme import Theme
from rich.traceback import PathHighlighter, Traceback
MAX_OLD_LOGS = 8
class RotatingFileHandler(logging.handlers.RotatingFileHandler):
"""Custom rotating file handler.
This file handler rotates a bit differently to the one in stdlib.
For a start, this works off of a "stem" and a "directory". The stem
is the base name of the log file, without the extension. The
directory is where all log files (including backups) will be placed.
Secondly, this logger rotates files downwards, and new logs are
*started* with the backup number incremented. The stdlib handler
rotates files upwards, and this leaves the logs in reverse order.
Thirdly, naming conventions are not customisable with this class.
Logs will initially be named in the format "{stem}.log", and after
rotating, the first log file will be renamed "{stem}-part1.log",
and a new file "{stem}-part2.log" will be created for logging to
continue.
A few things can't be modified in this handler: it must use append
mode, it doesn't support use of the `delay` arg, and it will ignore
custom namers and rotators.
When this handler is instantiated, it will search through the
directory for logs from previous runtimes, and will open the file
with the highest backup number to append to.
"""
def __init__(
self,
stem: str,
directory: pathlib.Path,
maxBytes: int = 0,
backupCount: int = 0,
encoding: Optional[str] = None,
) -> None:
self.baseStem = stem
self.directory = directory.resolve()
# Scan for existing files in directory, append to last part of existing log
log_part_re = re.compile(rf"{stem}-part(?P<partnum>\d)\.log")
highest_part = 0
for path in directory.iterdir():
match = log_part_re.match(path.name)
if match and int(match["partnum"]) > highest_part:
highest_part = int(match["partnum"])
if highest_part:
filename = directory / f"{stem}-part{highest_part}.log"
else:
filename = directory / f"{stem}.log"
super().__init__(
filename,
mode="a",
maxBytes=maxBytes,
backupCount=backupCount,
encoding=encoding,
delay=False,
)
def doRollover(self):
if self.stream:
self.stream.close()
self.stream = None
initial_path = self.directory / f"{self.baseStem}.log"
if self.backupCount > 0 and initial_path.exists():
initial_path.replace(self.directory / f"{self.baseStem}-part1.log")
match = re.match(
rf"{self.baseStem}(?:-part(?P<part>\d))?\.log", pathlib.Path(self.baseFilename).name
)
latest_part_num = int(match.groupdict(default="1").get("part", "1"))
if self.backupCount < 1:
# No backups, just delete the existing log and start again
pathlib.Path(self.baseFilename).unlink()
elif latest_part_num > self.backupCount:
# Rotate files down one
# red-part2.log becomes red-part1.log etc, a new log is added at the end.
for i in range(1, self.backupCount + 1):
next_log = self.directory / f"{self.baseStem}-part{i + 1}.log"
if next_log.exists():
prev_log = self.directory / f"{self.baseStem}-part{i}.log"
next_log.replace(prev_log)
else:
# Simply start a new file
self.baseFilename = str(
self.directory / f"{self.baseStem}-part{latest_part_num + 1}.log"
)
self.stream = self._open()
SYNTAX_THEME = {
Token: Style(),
Comment: Style(color="bright_black"),
Keyword: Style(color="cyan", bold=True),
Keyword.Constant: Style(color="bright_magenta"),
Keyword.Namespace: Style(color="bright_red"),
Operator: Style(bold=True),
Operator.Word: Style(color="cyan", bold=True),
Name.Builtin: Style(bold=True),
Name.Builtin.Pseudo: Style(color="bright_red"),
Name.Exception: Style(bold=True),
Name.Class: Style(color="bright_green"),
Name.Function: Style(color="bright_green"),
String: Style(color="yellow"),
Number: Style(color="cyan"),
Error: Style(bgcolor="red"),
}
class FixedMonokaiStyle(MonokaiStyle):
styles = {**MonokaiStyle.styles, Token: "#f8f8f2"}
class RedTraceback(Traceback):
@render_group()
def _render_stack(self, stack):
for obj in super()._render_stack.__wrapped__(self, stack):
if obj != "":
yield obj
class RedLogRender(LogRender):
def __call__(
self,
console,
renderables,
log_time=None,
time_format=None,
level="",
path=None,
line_no=None,
link_path=None,
logger_name=None,
):
output = Text()
if self.show_time:
log_time = log_time or console.get_datetime()
log_time_display = log_time.strftime(time_format or self.time_format)
if log_time_display == self._last_time:
output.append(" " * (len(log_time_display) + 1))
else:
output.append(f"{log_time_display} ", style="log.time")
self._last_time = log_time_display
if self.show_level:
# The space needs to be added separately so that log level is colored by
# Rich.
output.append(level)
output.append(" ")
if logger_name:
output.append(f"[{logger_name}] ", style="bright_black")
output.append(*renderables)
if self.show_path and path:
path_text = Text()
path_text.append(path, style=f"link file://{link_path}" if link_path else "")
if line_no:
path_text.append(f":{line_no}")
output.append(path_text)
return output
class RedRichHandler(RichHandler):
"""Adaptation of Rich's RichHandler to manually adjust the path to a logger name"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._log_render = RedLogRender(
show_time=self._log_render.show_time,
show_level=self._log_render.show_level,
show_path=self._log_render.show_path,
level_width=self._log_render.level_width,
)
def get_level_text(self, record: LogRecord) -> Text:
"""Get the level name from the record.
Args:
record (LogRecord): LogRecord instance.
Returns:
Text: A tuple of the style and level name.
"""
level_text = super().get_level_text(record)
level_text.stylize("bold")
return level_text
def emit(self, record: LogRecord) -> None:
"""Invoked by logging."""
path = pathlib.Path(record.pathname).name
level = self.get_level_text(record)
message = self.format(record)
time_format = None if self.formatter is None else self.formatter.datefmt
log_time = datetime.fromtimestamp(record.created)
traceback = None
if self.rich_tracebacks and record.exc_info and record.exc_info != (None, None, None):
exc_type, exc_value, exc_traceback = record.exc_info
assert exc_type is not None
assert exc_value is not None
traceback = RedTraceback.from_exception(
exc_type,
exc_value,
exc_traceback,
width=self.tracebacks_width,
extra_lines=self.tracebacks_extra_lines,
theme=self.tracebacks_theme,
word_wrap=self.tracebacks_word_wrap,
show_locals=self.tracebacks_show_locals,
locals_max_length=self.locals_max_length,
locals_max_string=self.locals_max_string,
indent_guides=False,
)
message = record.getMessage()
use_markup = getattr(record, "markup") if hasattr(record, "markup") else self.markup
if use_markup:
message_text = Text.from_markup(message)
else:
message_text = Text(message)
if self.highlighter:
message_text = self.highlighter(message_text)
if self.KEYWORDS:
message_text.highlight_words(self.KEYWORDS, "logging.keyword")
self.console.print(
self._log_render(
self.console,
[message_text],
log_time=log_time,
time_format=time_format,
level=level,
path=path,
line_no=record.lineno,
link_path=record.pathname if self.enable_link_path else None,
logger_name=record.name,
),
soft_wrap=True,
)
if traceback:
self.console.print(traceback)
def init_logging(level: int, location: pathlib.Path, cli_flags: argparse.Namespace) -> None:
root_logger = logging.getLogger()
base_logger = logging.getLogger("red")
base_logger.setLevel(level)
dpy_logger = logging.getLogger("discord")
dpy_logger.setLevel(logging.WARNING)
warnings_logger = logging.getLogger("py.warnings")
warnings_logger.setLevel(logging.WARNING)
rich_console = rich.get_console()
rich.reconfigure(tab_size=4)
rich_console.push_theme(
Theme(
{
"log.time": Style(dim=True),
"logging.level.warning": Style(color="yellow"),
"logging.level.critical": Style(color="white", bgcolor="red"),
"repr.number": Style(color="cyan"),
"repr.url": Style(underline=True, italic=True, bold=False, color="cyan"),
}
)
)
rich_console.file = sys.stdout
# This is terrible solution, but it's the best we can do if we want the paths in tracebacks
# to be visible. Rich uses `pygments.string` style which is fine, but it also uses
# this highlighter which dims most of the path and therefore makes it unreadable on Mac.
PathHighlighter.highlights = []
enable_rich_logging = False
if isatty(0) and cli_flags.rich_logging is None:
# Check if the bot thinks it has a active terminal.
enable_rich_logging = True
elif cli_flags.rich_logging is True:
enable_rich_logging = True
file_formatter = logging.Formatter(
"[{asctime}] [{levelname}] {name}: {message}", datefmt="%Y-%m-%d %H:%M:%S", style="{"
)
if enable_rich_logging is True:
rich_formatter = logging.Formatter("{message}", datefmt="[%X]", style="{")
stdout_handler = RedRichHandler(
rich_tracebacks=True,
show_path=False,
highlighter=NullHighlighter(),
tracebacks_extra_lines=cli_flags.rich_traceback_extra_lines,
tracebacks_show_locals=cli_flags.rich_traceback_show_locals,
tracebacks_theme=(
PygmentsSyntaxTheme(FixedMonokaiStyle)
if rich_console.color_system == "truecolor"
else ANSISyntaxTheme(SYNTAX_THEME)
),
)
stdout_handler.setFormatter(rich_formatter)
else:
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setFormatter(file_formatter)
root_logger.addHandler(stdout_handler)
logging.captureWarnings(True)
if not location.exists():
location.mkdir(parents=True, exist_ok=True)
# Rotate latest logs to previous logs
previous_logs: List[pathlib.Path] = []
latest_logs: List[Tuple[pathlib.Path, str]] = []
for path in location.iterdir():
match = re.match(r"latest(?P<part>-part\d+)?\.log", path.name)
if match:
part = match.groupdict(default="")["part"]
latest_logs.append((path, part))
match = re.match(r"previous(?:-part\d+)?.log", path.name)
if match:
previous_logs.append(path)
# Delete all previous.log files
for path in previous_logs:
path.unlink()
# Rename latest.log files to previous.log
for path, part in latest_logs:
path.replace(location / f"previous{part}.log")
latest_fhandler = RotatingFileHandler(
stem="latest",
directory=location,
maxBytes=1_000_000, # About 1MB per logfile
backupCount=MAX_OLD_LOGS,
encoding="utf-8",
)
all_fhandler = RotatingFileHandler(
stem="red",
directory=location,
maxBytes=1_000_000,
backupCount=MAX_OLD_LOGS,
encoding="utf-8",
)
for fhandler in (latest_fhandler, all_fhandler):
fhandler.setFormatter(file_formatter)
root_logger.addHandler(fhandler)
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/logging.py
| 0.579043 | 0.170992 |
logging.py
|
pypi
|
from pathlib import Path
import aiohttp
from redbot.core.i18n import Translator
_ = Translator("Audio", Path(__file__))
class AudioError(Exception):
"""Base exception for errors in the Audio cog."""
class LavalinkDownloadFailed(AudioError, RuntimeError):
"""Downloading the Lavalink jar failed.
Attributes
----------
response : aiohttp.ClientResponse
The response from the server to the failed GET request.
should_retry : bool
Whether or not the Audio cog should retry downloading the jar.
"""
def __init__(self, *args, response: aiohttp.ClientResponse, should_retry: bool = False):
super().__init__(*args)
self.response = response
self.should_retry = should_retry
def __repr__(self) -> str:
str_args = [*map(str, self.args), self._response_repr()]
return f"LavalinkDownloadFailed({', '.join(str_args)}"
def __str__(self) -> str:
return f"{super().__str__()} {self._response_repr()}"
def _response_repr(self) -> str:
return f"[{self.response.status} {self.response.reason}]"
class QueryUnauthorized(AudioError):
"""Provided an unauthorized query to audio."""
def __init__(self, message, *args):
self.message = message
super().__init__(*args)
class TrackEnqueueError(AudioError):
"""Unable to play track."""
class PlayListError(AudioError):
"""Base exception for errors related to playlists."""
class InvalidPlaylistScope(PlayListError):
"""Provided playlist scope is not valid."""
class MissingGuild(PlayListError):
"""Trying to access the Guild scope without a guild."""
class MissingAuthor(PlayListError):
"""Trying to access the User scope without an user id."""
class TooManyMatches(PlayListError):
"""Too many playlist match user input."""
class NoMatchesFound(PlayListError):
"""No entries found for this input."""
class NotAllowed(PlayListError):
"""Too many playlist match user input."""
class ApiError(AudioError):
"""Base exception for API errors in the Audio cog."""
class SpotifyApiError(ApiError):
"""Base exception for Spotify API errors."""
class SpotifyFetchError(SpotifyApiError):
"""Fetching Spotify data failed."""
def __init__(self, message, *args):
self.message = message
super().__init__(*args)
class YouTubeApiError(ApiError):
"""Base exception for YouTube Data API errors."""
def __init__(self, message, *args):
self.message = message
super().__init__(*args)
class DatabaseError(AudioError):
"""Base exception for database errors in the Audio cog."""
class InvalidTableError(DatabaseError):
"""Provided table to query is not a valid table."""
class LocalTrackError(AudioError):
"""Base exception for local track errors."""
class InvalidLocalTrack(LocalTrackError):
"""Base exception for local track errors."""
class InvalidLocalTrackFolder(LocalTrackError):
"""Base exception for local track errors."""
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/cogs/audio/errors.py
| 0.917293 | 0.153644 |
errors.py
|
pypi
|
import asyncio
import contextlib
import logging
import time
from enum import Enum, unique
from pathlib import Path
from typing import MutableMapping
import discord
from redbot.core import commands
from redbot.core.i18n import Translator
log = logging.getLogger("red.cogs.Audio.task.callback")
_ = Translator("Audio", Path(__file__))
class CacheLevel:
__slots__ = ("value",)
def __init__(self, level=0):
if not isinstance(level, int):
raise TypeError(
f"Expected int parameter, received {level.__class__.__name__} instead."
)
elif level < 0:
level = 0
elif level > 0b11111:
level = 0b11111
self.value = level
def __eq__(self, other):
return isinstance(other, CacheLevel) and self.value == other.value
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.value)
def __add__(self, other):
return CacheLevel(self.value + other.value)
def __radd__(self, other):
return CacheLevel(other.value + self.value)
def __sub__(self, other):
return CacheLevel(self.value - other.value)
def __rsub__(self, other):
return CacheLevel(other.value - self.value)
def __str__(self):
return "{0:b}".format(self.value)
def __format__(self, format_spec):
return "{r:{f}}".format(r=self.value, f=format_spec)
def __repr__(self):
return f"<CacheLevel value={self.value}>"
def is_subset(self, other):
"""Returns ``True`` if self has the same or fewer caching levels as other."""
return (self.value & other.value) == self.value
def is_superset(self, other):
"""Returns ``True`` if self has the same or more caching levels as other."""
return (self.value | other.value) == self.value
def is_strict_subset(self, other):
"""Returns ``True`` if the caching level on other are a strict subset of those on self."""
return self.is_subset(other) and self != other
def is_strict_superset(self, other):
"""Returns ``True`` if the caching level on
other are a strict superset of those on self."""
return self.is_superset(other) and self != other
__le__ = is_subset
__ge__ = is_superset
__lt__ = is_strict_subset
__gt__ = is_strict_superset
@classmethod
def all(cls):
"""A factory method that creates a :class:`CacheLevel` with max caching level."""
return cls(0b11111)
@classmethod
def none(cls):
"""A factory method that creates a :class:`CacheLevel` with no caching."""
return cls(0)
@classmethod
def set_spotify(cls):
"""A factory method that creates a :class:`CacheLevel` with Spotify caching level."""
return cls(0b00011)
@classmethod
def set_youtube(cls):
"""A factory method that creates a :class:`CacheLevel` with YouTube caching level."""
return cls(0b00100)
@classmethod
def set_lavalink(cls):
"""A factory method that creates a :class:`CacheLevel` with lavalink caching level."""
return cls(0b11000)
def _bit(self, index):
return bool((self.value >> index) & 1)
def _set(self, index, value):
if value is True:
self.value |= 1 << index
elif value is False:
self.value &= ~(1 << index)
else:
raise TypeError("Value to set for CacheLevel must be a bool.")
@property
def lavalink(self):
""":class:`bool`: Returns ``True`` if a user can deafen other users."""
return self._bit(4)
@lavalink.setter
def lavalink(self, value):
self._set(4, value)
@property
def youtube(self):
""":class:`bool`: Returns ``True`` if a user can move users between other voice
channels."""
return self._bit(2)
@youtube.setter
def youtube(self, value):
self._set(2, value)
@property
def spotify(self):
""":class:`bool`: Returns ``True`` if a user can use voice activation in voice channels."""
return self._bit(1)
@spotify.setter
def spotify(self, value):
self._set(1, value)
class Notifier:
def __init__(
self, ctx: commands.Context, message: discord.Message, updates: MutableMapping, **kwargs
):
self.context = ctx
self.message = message
self.updates = updates
self.color = None
self.last_msg_time = 0
self.cooldown = 5
async def notify_user(
self,
current: int = None,
total: int = None,
key: str = None,
seconds_key: str = None,
seconds: str = None,
):
"""This updates an existing message.
Based on the message found in :variable:`Notifier.updates` as per the `key` param
"""
if self.last_msg_time + self.cooldown > time.time() and not current == total:
return
if self.color is None:
self.color = await self.context.embed_colour()
embed2 = discord.Embed(
colour=self.color,
title=self.updates.get(key, "").format(num=current, total=total, seconds=seconds),
)
if seconds and seconds_key:
embed2.set_footer(text=self.updates.get(seconds_key, "").format(seconds=seconds))
try:
await self.message.edit(embed=embed2)
self.last_msg_time = int(time.time())
except discord.errors.NotFound:
pass
async def update_text(self, text: str):
embed2 = discord.Embed(colour=self.color, title=text)
try:
await self.message.edit(embed=embed2)
except discord.errors.NotFound:
pass
async def update_embed(self, embed: discord.Embed):
try:
await self.message.edit(embed=embed)
self.last_msg_time = int(time.time())
except discord.errors.NotFound:
pass
@unique
class PlaylistScope(Enum):
GLOBAL = "GLOBALPLAYLIST"
GUILD = "GUILDPLAYLIST"
USER = "USERPLAYLIST"
def __str__(self):
return "{0}".format(self.value)
@staticmethod
def list():
return list(map(lambda c: c.value, PlaylistScope))
def task_callback(task: asyncio.Task) -> None:
with contextlib.suppress(asyncio.CancelledError, asyncio.InvalidStateError):
if exc := task.exception():
log.exception("%s raised an Exception", task.get_name(), exc_info=exc)
def has_internal_server():
async def pred(ctx: commands.Context):
external = await ctx.cog.config.use_external_lavalink()
return not external
return commands.check(pred)
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/cogs/audio/utils.py
| 0.828696 | 0.235108 |
utils.py
|
pypi
|
import datetime
import json
import logging
from collections import namedtuple
from dataclasses import dataclass, field
from pathlib import Path
from typing import List, MutableMapping, Optional, Union
import discord
import lavalink
from redbot.core.bot import Red
from redbot.core.i18n import Translator
from redbot.core.utils.chat_formatting import humanize_list
from ..errors import InvalidPlaylistScope, MissingAuthor, MissingGuild
from ..utils import PlaylistScope
log = logging.getLogger("red.cogs.Audio.api.utils")
_ = Translator("Audio", Path(__file__))
@dataclass
class YouTubeCacheFetchResult:
query: Optional[str]
last_updated: int
def __post_init__(self):
if isinstance(self.last_updated, int):
self.updated_on: datetime.datetime = datetime.datetime.fromtimestamp(self.last_updated)
@dataclass
class SpotifyCacheFetchResult:
query: Optional[str]
last_updated: int
def __post_init__(self):
if isinstance(self.last_updated, int):
self.updated_on: datetime.datetime = datetime.datetime.fromtimestamp(self.last_updated)
@dataclass
class LavalinkCacheFetchResult:
query: Optional[MutableMapping]
last_updated: int
def __post_init__(self):
if isinstance(self.last_updated, int):
self.updated_on: datetime.datetime = datetime.datetime.fromtimestamp(self.last_updated)
if isinstance(self.query, str):
self.query = json.loads(self.query)
@dataclass
class LavalinkCacheFetchForGlobalResult:
query: str
data: MutableMapping
def __post_init__(self):
if isinstance(self.data, str):
self.data_string = str(self.data)
self.data = json.loads(self.data)
@dataclass
class PlaylistFetchResult:
playlist_id: int
playlist_name: str
scope_id: int
author_id: int
playlist_url: Optional[str] = None
tracks: List[MutableMapping] = field(default_factory=lambda: [])
def __post_init__(self):
if isinstance(self.tracks, str):
self.tracks = json.loads(self.tracks)
@dataclass
class QueueFetchResult:
guild_id: int
room_id: int
track: dict = field(default_factory=lambda: {})
track_object: lavalink.Track = None
def __post_init__(self):
if isinstance(self.track, str):
self.track = json.loads(self.track)
if self.track:
self.track_object = lavalink.Track(self.track)
def standardize_scope(scope: str) -> str:
"""Convert any of the used scopes into one we are expecting."""
scope = scope.upper()
valid_scopes = ["GLOBAL", "GUILD", "AUTHOR", "USER", "SERVER", "MEMBER", "BOT"]
if scope in PlaylistScope.list():
return scope
elif scope not in valid_scopes:
raise InvalidPlaylistScope(
f'"{scope}" is not a valid playlist scope.'
f" Scope needs to be one of the following: {humanize_list(valid_scopes)}"
)
if scope in ["GLOBAL", "BOT"]:
scope = PlaylistScope.GLOBAL.value
elif scope in ["GUILD", "SERVER"]:
scope = PlaylistScope.GUILD.value
elif scope in ["USER", "MEMBER", "AUTHOR"]:
scope = PlaylistScope.USER.value
return scope
def prepare_config_scope(
bot: Red,
scope,
author: Union[discord.abc.User, int] = None,
guild: Union[discord.Guild, int] = None,
):
"""Return the scope used by Playlists."""
scope = standardize_scope(scope)
if scope == PlaylistScope.GLOBAL.value:
config_scope = [PlaylistScope.GLOBAL.value, bot.user.id]
elif scope == PlaylistScope.USER.value:
if author is None:
raise MissingAuthor("Invalid author for user scope.")
config_scope = [PlaylistScope.USER.value, int(getattr(author, "id", author))]
else:
if guild is None:
raise MissingGuild("Invalid guild for guild scope.")
config_scope = [PlaylistScope.GUILD.value, int(getattr(guild, "id", guild))]
return config_scope
def prepare_config_scope_for_migration23( # TODO: remove me in a future version ?
scope, author: Union[discord.abc.User, int] = None, guild: discord.Guild = None
):
"""Return the scope used by Playlists."""
scope = standardize_scope(scope)
if scope == PlaylistScope.GLOBAL.value:
config_scope = [PlaylistScope.GLOBAL.value]
elif scope == PlaylistScope.USER.value:
if author is None:
raise MissingAuthor("Invalid author for user scope.")
config_scope = [PlaylistScope.USER.value, str(getattr(author, "id", author))]
else:
if guild is None:
raise MissingGuild("Invalid guild for guild scope.")
config_scope = [PlaylistScope.GUILD.value, str(getattr(guild, "id", guild))]
return config_scope
FakePlaylist = namedtuple("Playlist", "author scope")
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/cogs/audio/apis/api_utils.py
| 0.697712 | 0.15704 |
api_utils.py
|
pypi
|
from abc import ABC
from typing import Final
from base64 import b64decode
from io import BytesIO
import struct
from redbot import VersionInfo
from redbot.core import commands
from ..converters import get_lazy_converter, get_playlist_converter
__version__ = VersionInfo.from_json({"major": 2, "minor": 4, "micro": 0, "releaselevel": "final"})
__author__ = ["aikaterna", "Draper"]
_SCHEMA_VERSION: Final[int] = 3
_OWNER_NOTIFICATION: Final[int] = 1
LazyGreedyConverter = get_lazy_converter("--")
PlaylistConverter = get_playlist_converter()
HUMANIZED_PERM = {
"create_instant_invite": "Create Instant Invite",
"kick_members": "Kick Members",
"ban_members": "Ban Members",
"administrator": "Administrator",
"manage_channels": "Manage Channels",
"manage_guild": "Manage Server",
"add_reactions": "Add Reactions",
"view_audit_log": "View Audit Log",
"priority_speaker": "Priority Speaker",
"stream": "Go Live",
"read_messages": "Read Text Channels & See Voice Channels",
"send_messages": "Send Messages",
"send_tts_messages": "Send TTS Messages",
"manage_messages": "Manage Messages",
"embed_links": "Embed Links",
"attach_files": "Attach Files",
"read_message_history": "Read Message History",
"mention_everyone": "Mention @everyone, @here, and All Roles",
"external_emojis": "Use External Emojis",
"view_guild_insights": "View Server Insights",
"connect": "Connect",
"speak": "Speak",
"mute_members": "Mute Members",
"deafen_members": "Deafen Members",
"move_members": "Move Members",
"use_voice_activation": "Use Voice Activity",
"change_nickname": "Change Nickname",
"manage_nicknames": "Manage Nicknames",
"manage_roles": "Manage Roles",
"manage_webhooks": "Manage Webhooks",
"manage_emojis": "Manage Emojis",
}
class CompositeMetaClass(type(commands.Cog), type(ABC)):
"""
This allows the metaclass used for proper type detection to
coexist with discord.py's metaclass
"""
pass
# Both DataReader and DataWriter are taken from https://github.com/Devoxin/Lavalink.py/blob/master/lavalink/datarw.py
# These are licenced under MIT, Thanks Devoxin for putting these together!
# The license can be found in https://github.com/Devoxin/Lavalink.py/blob/master/LICENSE
class DataReader:
def __init__(self, ts):
self._buf = BytesIO(b64decode(ts))
def _read(self, n):
return self._buf.read(n)
def read_byte(self):
return self._read(1)
def read_boolean(self):
(result,) = struct.unpack("B", self.read_byte())
return result != 0
def read_unsigned_short(self):
(result,) = struct.unpack(">H", self._read(2))
return result
def read_int(self):
(result,) = struct.unpack(">i", self._read(4))
return result
def read_long(self):
(result,) = struct.unpack(">Q", self._read(8))
return result
def read_utf(self):
text_length = self.read_unsigned_short()
return self._read(text_length)
class DataWriter:
def __init__(self):
self._buf = BytesIO()
def _write(self, data):
self._buf.write(data)
def write_byte(self, byte):
self._buf.write(byte)
def write_boolean(self, b):
enc = struct.pack("B", 1 if b else 0)
self.write_byte(enc)
def write_unsigned_short(self, s):
enc = struct.pack(">H", s)
self._write(enc)
def write_int(self, i):
enc = struct.pack(">i", i)
self._write(enc)
def write_long(self, l):
enc = struct.pack(">Q", l)
self._write(enc)
def write_utf(self, s):
utf = s.encode("utf8")
byte_len = len(utf)
if byte_len > 65535:
raise OverflowError("UTF string may not exceed 65535 bytes!")
self.write_unsigned_short(byte_len)
self._write(utf)
def finish(self):
with BytesIO() as track_buf:
byte_len = self._buf.getbuffer().nbytes
flags = byte_len | (1 << 30)
enc_flags = struct.pack(">i", flags)
track_buf.write(enc_flags)
self._buf.seek(0)
track_buf.write(self._buf.read())
self._buf.close()
track_buf.seek(0)
return track_buf.read()
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/cogs/audio/core/cog_utils.py
| 0.821223 | 0.228425 |
cog_utils.py
|
pypi
|
import asyncio
import contextlib
import logging
from typing import List
import discord
import lavalink
from redbot.core import commands
from redbot.core.utils.chat_formatting import box
from ...equalizer import Equalizer
from ..abc import MixinMeta
from ..cog_utils import CompositeMetaClass
log = logging.getLogger("red.cogs.Audio.cog.Utilities.equalizer")
class EqualizerUtilities(MixinMeta, metaclass=CompositeMetaClass):
async def _apply_gain(self, guild_id: int, band: int, gain: float) -> None:
const = {
"op": "equalizer",
"guildId": str(guild_id),
"bands": [{"band": band, "gain": gain}],
}
try:
await lavalink.get_player(guild_id).node.send({**const})
except (KeyError, IndexError):
pass
async def _apply_gains(self, guild_id: int, gains: List[float]) -> None:
const = {
"op": "equalizer",
"guildId": str(guild_id),
"bands": [{"band": x, "gain": y} for x, y in enumerate(gains)],
}
try:
await lavalink.get_player(guild_id).node.send({**const})
except (KeyError, IndexError):
pass
async def _eq_check(self, ctx: commands.Context, player: lavalink.Player) -> None:
eq = player.fetch("eq", Equalizer())
config_bands = await self.config.custom("EQUALIZER", ctx.guild.id).eq_bands()
if not config_bands:
config_bands = eq.bands
await self.config.custom("EQUALIZER", ctx.guild.id).eq_bands.set(eq.bands)
if eq.bands != config_bands:
band_num = list(range(0, eq.band_count))
band_value = config_bands
eq_dict = {}
for k, v in zip(band_num, band_value):
eq_dict[k] = v
for band, value in eq_dict.items():
eq.set_gain(band, value)
player.store("eq", eq)
await self._apply_gains(ctx.guild.id, config_bands)
async def _eq_interact(
self,
ctx: commands.Context,
player: lavalink.Player,
eq: Equalizer,
message: discord.Message,
selected: int,
) -> None:
player.store("eq", eq)
emoji = {
"far_left": "\N{BLACK LEFT-POINTING TRIANGLE}\N{VARIATION SELECTOR-16}",
"one_left": "\N{LEFTWARDS BLACK ARROW}\N{VARIATION SELECTOR-16}",
"max_output": "\N{BLACK UP-POINTING DOUBLE TRIANGLE}",
"output_up": "\N{UP-POINTING SMALL RED TRIANGLE}",
"output_down": "\N{DOWN-POINTING SMALL RED TRIANGLE}",
"min_output": "\N{BLACK DOWN-POINTING DOUBLE TRIANGLE}",
"one_right": "\N{BLACK RIGHTWARDS ARROW}\N{VARIATION SELECTOR-16}",
"far_right": "\N{BLACK RIGHT-POINTING TRIANGLE}\N{VARIATION SELECTOR-16}",
"reset": "\N{BLACK CIRCLE FOR RECORD}\N{VARIATION SELECTOR-16}",
"info": "\N{INFORMATION SOURCE}\N{VARIATION SELECTOR-16}",
}
selector = f'{" " * 8}{" " * selected}^^'
try:
await message.edit(content=box(f"{eq.visualise()}\n{selector}", lang="ini"))
except discord.errors.NotFound:
return
try:
(react_emoji, react_user) = await self._get_eq_reaction(ctx, message, emoji)
except TypeError:
return
if not react_emoji:
await self.config.custom("EQUALIZER", ctx.guild.id).eq_bands.set(eq.bands)
await self._clear_react(message, emoji)
if react_emoji == "\N{LEFTWARDS BLACK ARROW}\N{VARIATION SELECTOR-16}":
await self.remove_react(message, react_emoji, react_user)
await self._eq_interact(ctx, player, eq, message, max(selected - 1, 0))
if react_emoji == "\N{BLACK RIGHTWARDS ARROW}\N{VARIATION SELECTOR-16}":
await self.remove_react(message, react_emoji, react_user)
await self._eq_interact(ctx, player, eq, message, min(selected + 1, 14))
if react_emoji == "\N{UP-POINTING SMALL RED TRIANGLE}":
await self.remove_react(message, react_emoji, react_user)
_max = float("{:.2f}".format(min(eq.get_gain(selected) + 0.1, 1.0)))
eq.set_gain(selected, _max)
await self._apply_gain(ctx.guild.id, selected, _max)
await self._eq_interact(ctx, player, eq, message, selected)
if react_emoji == "\N{DOWN-POINTING SMALL RED TRIANGLE}":
await self.remove_react(message, react_emoji, react_user)
_min = float("{:.2f}".format(max(eq.get_gain(selected) - 0.1, -0.25)))
eq.set_gain(selected, _min)
await self._apply_gain(ctx.guild.id, selected, _min)
await self._eq_interact(ctx, player, eq, message, selected)
if react_emoji == "\N{BLACK UP-POINTING DOUBLE TRIANGLE}":
await self.remove_react(message, react_emoji, react_user)
_max = 1.0
eq.set_gain(selected, _max)
await self._apply_gain(ctx.guild.id, selected, _max)
await self._eq_interact(ctx, player, eq, message, selected)
if react_emoji == "\N{BLACK DOWN-POINTING DOUBLE TRIANGLE}":
await self.remove_react(message, react_emoji, react_user)
_min = -0.25
eq.set_gain(selected, _min)
await self._apply_gain(ctx.guild.id, selected, _min)
await self._eq_interact(ctx, player, eq, message, selected)
if react_emoji == "\N{BLACK LEFT-POINTING TRIANGLE}\N{VARIATION SELECTOR-16}":
await self.remove_react(message, react_emoji, react_user)
selected = 0
await self._eq_interact(ctx, player, eq, message, selected)
if react_emoji == "\N{BLACK RIGHT-POINTING TRIANGLE}\N{VARIATION SELECTOR-16}":
await self.remove_react(message, react_emoji, react_user)
selected = 14
await self._eq_interact(ctx, player, eq, message, selected)
if react_emoji == "\N{BLACK CIRCLE FOR RECORD}\N{VARIATION SELECTOR-16}":
await self.remove_react(message, react_emoji, react_user)
for band in range(eq.band_count):
eq.set_gain(band, 0.0)
await self._apply_gains(ctx.guild.id, eq.bands)
await self._eq_interact(ctx, player, eq, message, selected)
if react_emoji == "\N{INFORMATION SOURCE}\N{VARIATION SELECTOR-16}":
await self.remove_react(message, react_emoji, react_user)
await ctx.send_help(self.command_equalizer)
await self._eq_interact(ctx, player, eq, message, selected)
async def _eq_msg_clear(self, eq_message: discord.Message):
if eq_message is not None:
with contextlib.suppress(discord.HTTPException):
await eq_message.delete()
async def _get_eq_reaction(self, ctx: commands.Context, message: discord.Message, emoji):
try:
reaction, user = await self.bot.wait_for(
"reaction_add",
check=lambda r, u: r.message.id == message.id
and u.id == ctx.author.id
and r.emoji in emoji.values(),
timeout=30,
)
except asyncio.TimeoutError:
await self._clear_react(message, emoji)
return None
else:
return reaction.emoji, user
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/cogs/audio/core/utilities/equalizer.py
| 0.585338 | 0.159348 |
equalizer.py
|
pypi
|
import asyncio
import time
import random
from collections import Counter
import discord
from redbot.core import bank, errors
from redbot.core.i18n import Translator
from redbot.core.utils.chat_formatting import box, bold, humanize_list, humanize_number
from redbot.core.utils.common_filters import normalize_smartquotes
from .log import LOG
__all__ = ["TriviaSession"]
T_ = Translator("TriviaSession", __file__)
_ = lambda s: s
_REVEAL_MESSAGES = (
_("I know this one! {answer}!"),
_("Easy: {answer}."),
_("Oh really? It's {answer} of course."),
)
SPOILER_REVEAL_MESSAGES = (
_("I know this one! ||{answer}!||"),
_("Easy: ||{answer}.||"),
_("Oh really? It's ||{answer}|| of course."),
)
_FAIL_MESSAGES = (
_("To the next one I guess..."),
_("Moving on..."),
_("I'm sure you'll know the answer of the next one."),
_("\N{PENSIVE FACE} Next one."),
)
_ = T_
class TriviaSession:
"""Class to run a session of trivia with the user.
To run the trivia session immediately, use `TriviaSession.start` instead of
instantiating directly.
Attributes
----------
ctx : `commands.Context`
Context object from which this session will be run.
This object assumes the session was started in `ctx.channel`
by `ctx.author`.
question_list : `dict`
A list of tuples mapping questions (`str`) to answers (`list` of
`str`).
settings : `dict`
Settings for the trivia session, with values for the following:
- ``max_score`` (`int`)
- ``delay`` (`float`)
- ``timeout`` (`float`)
- ``reveal_answer`` (`bool`)
- ``bot_plays`` (`bool`)
- ``allow_override`` (`bool`)
- ``payout_multiplier`` (`float`)
scores : `collections.Counter`
A counter with the players as keys, and their scores as values. The
players are of type `discord.Member`.
count : `int`
The number of questions which have been asked.
"""
def __init__(self, ctx, question_list: dict, settings: dict):
self.ctx = ctx
list_ = list(question_list.items())
random.shuffle(list_)
self.question_list = list_
self.settings = settings
self.scores = Counter()
self.count = 0
self._last_response = time.time()
self._task = None
@classmethod
def start(cls, ctx, question_list, settings):
"""Create and start a trivia session.
This allows the session to manage the running and cancellation of its
own tasks.
Parameters
----------
ctx : `commands.Context`
Same as `TriviaSession.ctx`
question_list : `dict`
Same as `TriviaSession.question_list`
settings : `dict`
Same as `TriviaSession.settings`
Returns
-------
TriviaSession
The new trivia session being run.
"""
session = cls(ctx, question_list, settings)
loop = ctx.bot.loop
session._task = loop.create_task(session.run())
session._task.add_done_callback(session._error_handler)
return session
def _error_handler(self, fut):
"""Catches errors in the session task."""
try:
fut.result()
except asyncio.CancelledError:
pass
except (discord.NotFound, discord.Forbidden):
self.stop()
except Exception as exc:
LOG.error("A trivia session has encountered an error.\n", exc_info=exc)
asyncio.create_task(
self.ctx.send(
_(
"An unexpected error occurred in the trivia session.\nCheck your console or logs for details."
)
)
)
self.stop()
async def run(self):
"""Run the trivia session.
In order for the trivia session to be stopped correctly, this should
only be called internally by `TriviaSession.start`.
"""
await self._send_startup_msg()
max_score = self.settings["max_score"]
delay = self.settings["delay"]
timeout = self.settings["timeout"]
for question, answers in self._iter_questions():
async with self.ctx.typing():
await asyncio.sleep(3)
self.count += 1
msg = bold(_("Question number {num}!").format(num=self.count)) + "\n\n" + question
await self.ctx.send(msg)
continue_ = await self.wait_for_answer(answers, delay, timeout)
if continue_ is False:
break
if any(score >= max_score for score in self.scores.values()):
await self.end_game()
break
else:
await self.ctx.send(_("There are no more questions!"))
await self.end_game()
async def _send_startup_msg(self):
list_names = []
for idx, tup in enumerate(self.settings["lists"].items()):
name, author = tup
if author:
title = _("{trivia_list} (by {author})").format(trivia_list=name, author=author)
else:
title = name
list_names.append(title)
await self.ctx.send(
_("Starting Trivia: {list_names}").format(list_names=humanize_list(list_names))
)
def _iter_questions(self):
"""Iterate over questions and answers for this session.
Yields
------
`tuple`
A tuple containing the question (`str`) and the answers (`tuple` of
`str`).
"""
for question, answers in self.question_list:
answers = _parse_answers(answers)
yield question, answers
async def wait_for_answer(self, answers, delay: float, timeout: float):
"""Wait for a correct answer, and then respond.
Scores are also updated in this method.
Returns False if waiting was cancelled; this is usually due to the
session being forcibly stopped.
Parameters
----------
answers : `iterable` of `str`
A list of valid answers to the current question.
delay : float
How long users have to respond (in seconds).
timeout : float
How long before the session ends due to no responses (in seconds).
Returns
-------
bool
:code:`True` if the session wasn't interrupted.
"""
try:
message = await self.ctx.bot.wait_for(
"message", check=self.check_answer(answers), timeout=delay
)
except asyncio.TimeoutError:
if time.time() - self._last_response >= timeout:
await self.ctx.send(_("Guys...? Well, I guess I'll stop then."))
self.stop()
return False
if self.settings["reveal_answer"]:
if self.settings["use_spoilers"]:
reply = T_(random.choice(SPOILER_REVEAL_MESSAGES)).format(answer=answers[0])
else:
reply = T_(random.choice(_REVEAL_MESSAGES)).format(answer=answers[0])
else:
reply = T_(random.choice(_FAIL_MESSAGES))
if self.settings["bot_plays"]:
reply += _(" **+1** for me!")
self.scores[self.ctx.guild.me] += 1
await self.ctx.send(reply)
else:
self.scores[message.author] += 1
reply = _("You got it {user}! **+1** to you!").format(user=message.author.display_name)
await self.ctx.send(reply)
return True
def check_answer(self, answers):
"""Get a predicate to check for correct answers.
The returned predicate takes a message as its only parameter,
and returns ``True`` if the message contains any of the
given answers.
Parameters
----------
answers : `iterable` of `str`
The answers which the predicate must check for.
Returns
-------
function
The message predicate.
"""
answers = tuple(s.lower() for s in answers)
def _pred(message: discord.Message):
early_exit = message.channel != self.ctx.channel or message.author == self.ctx.guild.me
if early_exit:
return False
self._last_response = time.time()
guess = message.content.lower()
guess = normalize_smartquotes(guess)
for answer in answers:
if " " in answer and answer in guess:
# Exact matching, issue #331
return True
elif any(word == answer for word in guess.split(" ")):
return True
return False
return _pred
async def end_game(self):
"""End the trivia session and display scores."""
if self.scores:
await self.send_table()
multiplier = self.settings["payout_multiplier"]
if multiplier > 0:
await self.pay_winners(multiplier)
self.stop()
async def send_table(self):
"""Send a table of scores to the session's channel."""
table = "+ Results: \n\n"
for user, score in self.scores.most_common():
table += "+ {}\t{}\n".format(user, score)
await self.ctx.send(box(table, lang="diff"))
def stop(self):
"""Stop the trivia session, without showing scores."""
self.ctx.bot.dispatch("trivia_end", self)
def force_stop(self):
"""Cancel whichever tasks this session is running."""
self._task.cancel()
channel = self.ctx.channel
LOG.debug("Force stopping trivia session; #%s in %s", channel, channel.guild.id)
async def pay_winners(self, multiplier: float):
"""Pay the winner(s) of this trivia session.
Payout only occurs if there are at least 3 human contestants.
If a tie occurs the payout is split evenly among the winners.
Parameters
----------
multiplier : float
The coefficient of the winning score, used to determine the amount
paid.
"""
if not self.scores:
return
top_score = self.scores.most_common(1)[0][1]
winners = []
num_humans = 0
for (player, score) in self.scores.items():
if not player.bot:
if score == top_score:
winners.append(player)
num_humans += 1
if not winners or num_humans < 3:
return
payout = int(top_score * multiplier / len(winners))
if payout <= 0:
return
for winner in winners:
LOG.debug("Paying trivia winner: %d credits --> %s", payout, winner.name)
try:
await bank.deposit_credits(winner, payout)
except errors.BalanceTooHigh as e:
await bank.set_balance(winner, e.max_balance)
if len(winners) > 1:
msg = _(
"Congratulations {users}! You have each received {num} {currency} for winning!"
).format(
users=humanize_list([bold(winner.display_name) for winner in winners]),
num=payout,
currency=await bank.get_currency_name(self.ctx.guild),
)
else:
msg = _(
"Congratulations {user}! You have received {num} {currency} for winning!"
).format(
user=bold(winners[0].display_name),
num=payout,
currency=await bank.get_currency_name(self.ctx.guild),
)
await self.ctx.send(msg)
def _parse_answers(answers):
"""Parse the raw answers to readable strings.
The reason this exists is because of YAML's ambiguous syntax. For example,
if the answer to a question in YAML is ``yes``, YAML will load it as the
boolean value ``True``, which is not necessarily the desired answer. This
function aims to undo that for bools, and possibly for numbers in the
future too.
Parameters
----------
answers : `iterable` of `str`
The raw answers loaded from YAML.
Returns
-------
`tuple` of `str`
The answers in readable/ guessable strings.
"""
ret = []
for answer in answers:
if isinstance(answer, bool):
if answer is True:
ret.extend(["True", "Yes", "On"])
else:
ret.extend(["False", "No", "Off"])
else:
ret.append(str(answer))
# Uniquify list
seen = set()
return tuple(x for x in ret if not (x in seen or seen.add(x)))
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/cogs/trivia/session.py
| 0.648355 | 0.225502 |
session.py
|
pypi
|
from __future__ import annotations
import functools
import shutil
from enum import IntEnum
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Tuple, Union, cast
from .log import log
from .info_schemas import INSTALLABLE_SCHEMA, update_mixin
from .json_mixins import RepoJSONMixin
from redbot.core import VersionInfo
if TYPE_CHECKING:
from .repo_manager import RepoManager, Repo
class InstallableType(IntEnum):
# using IntEnum, because hot-reload breaks its identity
UNKNOWN = 0
COG = 1
SHARED_LIBRARY = 2
class Installable(RepoJSONMixin):
"""Base class for anything the Downloader cog can install.
- Modules
- Repo Libraries
- Other stuff?
The attributes of this class will mostly come from the installation's
info.json.
Attributes
----------
repo_name : `str`
Name of the repository which this package belongs to.
repo : Repo, optional
Repo object of the Installable, if repo is missing this will be `None`
commit : `str`, optional
Installable's commit. This is not the same as ``repo.commit``
author : `tuple` of `str`
Name(s) of the author(s).
end_user_data_statement : `str`
End user data statement of the module.
min_bot_version : `VersionInfo`
The minimum bot version required for this Installable.
max_bot_version : `VersionInfo`
The maximum bot version required for this Installable.
Ignored if `min_bot_version` is newer than `max_bot_version`.
min_python_version : `tuple` of `int`
The minimum python version required for this cog.
hidden : `bool`
Whether or not this cog will be hidden from the user when they use
`Downloader`'s commands.
required_cogs : `dict`
In the form :code:`{cog_name : repo_url}`, these are cogs which are
required for this installation.
requirements : `tuple` of `str`
Required libraries for this installation.
tags : `tuple` of `str`
List of tags to assist in searching.
type : `int`
The type of this installation, as specified by
:class:`InstallationType`.
"""
def __init__(self, location: Path, repo: Optional[Repo] = None, commit: str = ""):
"""Base installable initializer.
Parameters
----------
location : pathlib.Path
Location (file or folder) to the installable.
repo : Repo, optional
Repo object of the Installable, if repo is missing this will be `None`
commit : str
Installable's commit. This is not the same as ``repo.commit``
"""
self._location = location
self.repo = repo
self.repo_name = self._location.parent.stem
self.commit = commit
self.end_user_data_statement: str
self.min_bot_version: VersionInfo
self.max_bot_version: VersionInfo
self.min_python_version: Tuple[int, int, int]
self.hidden: bool
self.disabled: bool
self.required_cogs: Dict[str, str] # Cog name -> repo URL
self.requirements: Tuple[str, ...]
self.tags: Tuple[str, ...]
self.type: InstallableType
super().__init__(location)
def __eq__(self, other: Any) -> bool:
# noinspection PyProtectedMember
return self._location == other._location
def __hash__(self) -> int:
return hash(self._location)
@property
def name(self) -> str:
"""`str` : The name of this package."""
return self._location.stem
async def copy_to(self, target_dir: Path) -> bool:
"""
Copies this cog/shared_lib to the given directory. This
will overwrite any files in the target directory.
:param pathlib.Path target_dir: The installation directory to install to.
:return: Status of installation
:rtype: bool
"""
copy_func: Callable[..., Any]
if self._location.is_file():
copy_func = shutil.copy2
else:
copy_func = functools.partial(shutil.copytree, dirs_exist_ok=True)
# noinspection PyBroadException
try:
copy_func(src=str(self._location), dst=str(target_dir / self._location.stem))
except: # noqa: E722
log.exception("Error occurred when copying path: %s", self._location)
return False
return True
def _read_info_file(self) -> None:
super()._read_info_file()
update_mixin(self, INSTALLABLE_SCHEMA)
if self.type == InstallableType.SHARED_LIBRARY:
self.hidden = True
class InstalledModule(Installable):
"""Base class for installed modules,
this is basically instance of installed `Installable`
used by Downloader.
Attributes
----------
pinned : `bool`
Whether or not this cog is pinned, always `False` if module is not a cog.
"""
def __init__(
self,
location: Path,
repo: Optional[Repo] = None,
commit: str = "",
pinned: bool = False,
json_repo_name: str = "",
):
super().__init__(location=location, repo=repo, commit=commit)
self.pinned: bool = pinned if self.type == InstallableType.COG else False
# this is here so that Downloader could use real repo name instead of "MISSING_REPO"
self._json_repo_name = json_repo_name
def to_json(self) -> Dict[str, Union[str, bool]]:
module_json: Dict[str, Union[str, bool]] = {
"repo_name": self.repo_name,
"module_name": self.name,
"commit": self.commit,
}
if self.type == InstallableType.COG:
module_json["pinned"] = self.pinned
return module_json
@classmethod
def from_json(
cls, data: Dict[str, Union[str, bool]], repo_mgr: RepoManager
) -> InstalledModule:
repo_name = cast(str, data["repo_name"])
cog_name = cast(str, data["module_name"])
commit = cast(str, data.get("commit", ""))
pinned = cast(bool, data.get("pinned", False))
# TypedDict, where are you :/
repo = repo_mgr.get_repo(repo_name)
if repo is not None:
repo_folder = repo.folder_path
else:
repo_folder = repo_mgr.repos_folder / "MISSING_REPO"
location = repo_folder / cog_name
return cls(
location=location, repo=repo, commit=commit, pinned=pinned, json_repo_name=repo_name
)
@classmethod
def from_installable(cls, module: Installable, *, pinned: bool = False) -> InstalledModule:
return cls(
location=module._location, repo=module.repo, commit=module.commit, pinned=pinned
)
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/cogs/downloader/installable.py
| 0.911071 | 0.257602 |
installable.py
|
pypi
|
from __future__ import annotations
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Dict, Tuple, Union, cast
from redbot import VersionInfo, version_info as red_version_info
from . import installable
from .log import log
if TYPE_CHECKING:
from .json_mixins import RepoJSONMixin
__all__ = ("REPO_SCHEMA", "INSTALLABLE_SCHEMA", "update_mixin")
class UseDefault:
"""To be used as sentinel."""
# sentinel value
USE_DEFAULT = UseDefault()
def ensure_tuple_of_str(
info_file: Path, key_name: str, value: Union[Any, UseDefault]
) -> Tuple[str, ...]:
default: Tuple[str, ...] = ()
if value is USE_DEFAULT:
return default
if not isinstance(value, list):
log.warning(
"Invalid value of '%s' key (expected list, got %s)"
" in JSON information file at path: %s",
key_name,
type(value).__name__,
info_file,
)
return default
for item in value:
if not isinstance(item, str):
log.warning(
"Invalid item in '%s' list (expected str, got %s)"
" in JSON information file at path: %s",
key_name,
type(item).__name__,
info_file,
)
return default
return tuple(value)
def ensure_str(info_file: Path, key_name: str, value: Union[Any, UseDefault]) -> str:
default = ""
if value is USE_DEFAULT:
return default
if not isinstance(value, str):
log.warning(
"Invalid value of '%s' key (expected str, got %s)"
" in JSON information file at path: %s",
key_name,
type(value).__name__,
info_file,
)
return default
return value
def ensure_red_version_info(
info_file: Path, key_name: str, value: Union[Any, UseDefault]
) -> VersionInfo:
default = red_version_info
if value is USE_DEFAULT:
return default
if not isinstance(value, str):
log.warning(
"Invalid value of '%s' key (expected str, got %s)"
" in JSON information file at path: %s",
key_name,
type(value).__name__,
info_file,
)
return default
try:
version_info = VersionInfo.from_str(value)
except ValueError:
log.warning(
"Invalid value of '%s' key (given value isn't a valid version string)"
" in JSON information file at path: %s",
key_name,
info_file,
)
return default
return version_info
def ensure_python_version_info(
info_file: Path, key_name: str, value: Union[Any, UseDefault]
) -> Tuple[int, int, int]:
default = (3, 5, 1)
if value is USE_DEFAULT:
return default
if not isinstance(value, list):
log.warning(
"Invalid value of '%s' key (expected list, got %s)"
" in JSON information file at path: %s",
key_name,
type(value).__name__,
info_file,
)
return default
count = len(value)
if count != 3:
log.warning(
"Invalid value of '%s' key (expected list with 3 items, got %s items)"
" in JSON information file at path: %s",
key_name,
count,
info_file,
)
return default
for item in value:
if not isinstance(item, int):
log.warning(
"Invalid item in '%s' list (expected int, got %s)"
" in JSON information file at path: %s",
key_name,
type(item).__name__,
info_file,
)
return default
return cast(Tuple[int, int, int], tuple(value))
def ensure_bool(
info_file: Path, key_name: str, value: Union[Any, UseDefault], *, default: bool = False
) -> bool:
if value is USE_DEFAULT:
return default
if not isinstance(value, bool):
log.warning(
"Invalid value of '%s' key (expected bool, got %s)"
" in JSON information file at path: %s",
key_name,
type(value).__name__,
info_file,
)
return default
return value
def ensure_required_cogs_mapping(
info_file: Path, key_name: str, value: Union[Any, UseDefault]
) -> Dict[str, str]:
default: Dict[str, str] = {}
if value is USE_DEFAULT:
return default
if not isinstance(value, dict):
log.warning(
"Invalid value of '%s' key (expected dict, got %s)"
" in JSON information file at path: %s",
key_name,
type(value).__name__,
info_file,
)
return default
# keys in json dicts are always strings
for item in value.values():
if not isinstance(item, str):
log.warning(
"Invalid item in '%s' dict (expected str, got %s)"
" in JSON information file at path: %s",
key_name,
type(item).__name__,
info_file,
)
return default
return value
def ensure_installable_type(
info_file: Path, key_name: str, value: Union[Any, UseDefault]
) -> installable.InstallableType:
default = installable.InstallableType.COG
if value is USE_DEFAULT:
return default
if not isinstance(value, str):
log.warning(
"Invalid value of '%s' key (expected str, got %s)"
" in JSON information file at path: %s",
key_name,
type(value).__name__,
info_file,
)
return default # NOTE: old behavior was to use InstallableType.UNKNOWN
if value in ("", "COG"):
return installable.InstallableType.COG
if value == "SHARED_LIBRARY":
return installable.InstallableType.SHARED_LIBRARY
return installable.InstallableType.UNKNOWN
EnsureCallable = Callable[[Path, str, Union[Any, UseDefault]], Any]
SchemaType = Dict[str, EnsureCallable]
REPO_SCHEMA: SchemaType = {
"author": ensure_tuple_of_str,
"description": ensure_str,
"install_msg": ensure_str,
"short": ensure_str,
}
INSTALLABLE_SCHEMA: SchemaType = {
"min_bot_version": ensure_red_version_info,
"max_bot_version": ensure_red_version_info,
"min_python_version": ensure_python_version_info,
"hidden": ensure_bool,
"disabled": ensure_bool,
"required_cogs": ensure_required_cogs_mapping,
"requirements": ensure_tuple_of_str,
"tags": ensure_tuple_of_str,
"type": ensure_installable_type,
"end_user_data_statement": ensure_str,
}
def update_mixin(repo_or_installable: RepoJSONMixin, schema: SchemaType) -> None:
info = repo_or_installable._info
info_file = repo_or_installable._info_file
for key, callback in schema.items():
setattr(repo_or_installable, key, callback(info_file, key, info.get(key, USE_DEFAULT)))
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/cogs/downloader/info_schemas.py
| 0.826257 | 0.202838 |
info_schemas.py
|
pypi
|
from __future__ import annotations
from typing import List, TYPE_CHECKING
if TYPE_CHECKING:
from .repo_manager import Candidate
__all__ = [
"DownloaderException",
"GitException",
"InvalidRepoName",
"CopyingError",
"ExistingGitRepo",
"MissingGitRepo",
"CloningError",
"CurrentHashError",
"HardResetError",
"UpdateError",
"GitDiffError",
"NoRemoteURL",
"UnknownRevision",
"AmbiguousRevision",
"PipError",
]
class DownloaderException(Exception):
"""
Base class for Downloader exceptions.
"""
pass
class GitException(DownloaderException):
"""
Generic class for git exceptions.
"""
def __init__(self, message: str, git_command: str) -> None:
self.git_command = git_command
super().__init__(f"Git command failed: {git_command}\nError message: {message}")
class InvalidRepoName(DownloaderException):
"""
Throw when a repo name is invalid. Check
the message for a more detailed reason.
"""
pass
class CopyingError(DownloaderException):
"""
Throw when there was an issue
during copying of module's files.
"""
pass
class ExistingGitRepo(DownloaderException):
"""
Thrown when trying to clone into a folder where a
git repo already exists.
"""
pass
class MissingGitRepo(DownloaderException):
"""
Thrown when a git repo is expected to exist but
does not.
"""
pass
class CloningError(GitException):
"""
Thrown when git clone returns a non zero exit code.
"""
pass
class CurrentHashError(GitException):
"""
Thrown when git returns a non zero exit code attempting
to determine the current commit hash.
"""
pass
class HardResetError(GitException):
"""
Thrown when there is an issue trying to execute a hard reset
(usually prior to a repo update).
"""
pass
class UpdateError(GitException):
"""
Thrown when git pull returns a non zero error code.
"""
pass
class GitDiffError(GitException):
"""
Thrown when a git diff fails.
"""
pass
class NoRemoteURL(GitException):
"""
Thrown when no remote URL exists for a repo.
"""
pass
class UnknownRevision(GitException):
"""
Thrown when specified revision cannot be found.
"""
pass
class AmbiguousRevision(GitException):
"""
Thrown when specified revision is ambiguous.
"""
def __init__(self, message: str, git_command: str, candidates: List[Candidate]) -> None:
super().__init__(message, git_command)
self.candidates = candidates
class PipError(DownloaderException):
"""
Thrown when pip returns a non-zero return code.
"""
pass
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/cogs/downloader/errors.py
| 0.852184 | 0.272299 |
errors.py
|
pypi
|
import contextlib
import logging
from datetime import datetime, timedelta
from typing import Callable, List, Optional, Set, Union
import discord
from redbot.cogs.mod.converters import RawUserIds
from redbot.core import checks, commands, Config
from redbot.core.bot import Red
from redbot.core.i18n import Translator, cog_i18n
from redbot.core.utils.chat_formatting import humanize_number
from redbot.core.utils.mod import slow_deletion, mass_purge
from redbot.core.utils.predicates import MessagePredicate
from .checks import check_self_permissions
from .converters import PositiveInt, RawMessageIds, positive_int
_ = Translator("Cleanup", __file__)
log = logging.getLogger("red.cleanup")
@cog_i18n(_)
class Cleanup(commands.Cog):
"""This cog contains commands used for "cleaning up" (deleting) messages.
This is designed as a moderator tool and offers many convenient use cases.
All cleanup commands only apply to the channel the command is executed in.
Messages older than two weeks cannot be mass deleted.
This is a limitation of the API.
"""
def __init__(self, bot: Red):
super().__init__()
self.bot = bot
self.config = Config.get_conf(self, 8927348724, force_registration=True)
self.config.register_guild(notify=True)
async def red_delete_data_for_user(self, **kwargs):
""" Nothing to delete """
return
@staticmethod
async def check_100_plus(ctx: commands.Context, number: int) -> bool:
"""
Called when trying to delete more than 100 messages at once.
Prompts the user to choose whether they want to continue or not.
Tries its best to cleanup after itself if the response is positive.
"""
if ctx.assume_yes:
return True
prompt = await ctx.send(
_("Are you sure you want to delete {number} messages? (y/n)").format(
number=humanize_number(number)
)
)
response = await ctx.bot.wait_for("message", check=MessagePredicate.same_context(ctx))
if response.content.lower().startswith("y"):
with contextlib.suppress(discord.NotFound):
await prompt.delete()
with contextlib.suppress(discord.HTTPException):
await response.delete()
return True
else:
await ctx.send(_("Cancelled."))
return False
@staticmethod
async def get_messages_for_deletion(
*,
channel: Union[discord.TextChannel, discord.DMChannel],
number: Optional[PositiveInt] = None,
check: Callable[[discord.Message], bool] = lambda x: True,
limit: Optional[PositiveInt] = None,
before: Union[discord.Message, datetime] = None,
after: Union[discord.Message, datetime] = None,
delete_pinned: bool = False,
) -> List[discord.Message]:
"""
Gets a list of messages meeting the requirements to be deleted.
Generally, the requirements are:
- We don't have the number of messages to be deleted already
- The message passes a provided check (if no check is provided,
this is automatically true)
- The message is less than 14 days old
- The message is not pinned
Warning: Due to the way the API hands messages back in chunks,
passing after and a number together is not advisable.
If you need to accomplish this, you should filter messages on
the entire applicable range, rather than use this utility.
"""
# This isn't actually two weeks ago to allow some wiggle room on API limits
two_weeks_ago = datetime.utcnow() - timedelta(days=14, minutes=-5)
def message_filter(message):
return (
check(message)
and message.created_at > two_weeks_ago
and (delete_pinned or not message.pinned)
)
if after:
if isinstance(after, discord.Message):
after = after.created_at
after = max(after, two_weeks_ago)
collected = []
async for message in channel.history(
limit=limit, before=before, after=after, oldest_first=False
):
if message.created_at < two_weeks_ago:
break
if message_filter(message):
collected.append(message)
if number is not None and number <= len(collected):
break
return collected
async def send_optional_notification(
self,
num: int,
channel: Union[discord.TextChannel, discord.DMChannel],
*,
subtract_invoking: bool = False,
) -> None:
"""
Sends a notification to the channel that a certain number of messages have been deleted.
"""
if not hasattr(channel, "guild") or await self.config.guild(channel.guild).notify():
if subtract_invoking:
num -= 1
if num == 1:
await channel.send(_("1 message was deleted."), delete_after=5)
else:
await channel.send(
_("{num} messages were deleted.").format(num=humanize_number(num)),
delete_after=5,
)
@staticmethod
async def get_message_from_reference(
channel: discord.TextChannel, reference: discord.MessageReference
) -> Optional[discord.Message]:
message = None
resolved = reference.resolved
if resolved and isinstance(resolved, discord.Message):
message = resolved
elif (message := reference.cached_message) :
pass
else:
try:
message = await channel.fetch_message(reference.message_id)
except discord.NotFound:
pass
return message
@commands.group()
async def cleanup(self, ctx: commands.Context):
"""Base command for deleting messages."""
pass
@cleanup.command()
@commands.guild_only()
@checks.mod_or_permissions(manage_messages=True)
@commands.bot_has_permissions(manage_messages=True)
async def text(
self, ctx: commands.Context, text: str, number: positive_int, delete_pinned: bool = False
):
"""Delete the last X messages matching the specified text in the current channel.
Example:
- `[p]cleanup text "test" 5`
Remember to use double quotes.
**Arguments:**
- `<number>` The max number of messages to cleanup. Must be a positive integer.
- `<delete_pinned>` Whether to delete pinned messages or not. Defaults to False
"""
channel = ctx.channel
author = ctx.author
if number > 100:
cont = await self.check_100_plus(ctx, number)
if not cont:
return
def check(m):
if text in m.content:
return True
else:
return False
to_delete = await self.get_messages_for_deletion(
channel=channel,
number=number,
check=check,
before=ctx.message,
delete_pinned=delete_pinned,
)
to_delete.append(ctx.message)
reason = "{}({}) deleted {} messages containing '{}' in channel #{}.".format(
author.name,
author.id,
humanize_number(len(to_delete), override_locale="en_us"),
text,
channel.id,
)
log.info(reason)
await mass_purge(to_delete, channel)
await self.send_optional_notification(len(to_delete), channel, subtract_invoking=True)
@cleanup.command()
@commands.guild_only()
@checks.mod_or_permissions(manage_messages=True)
@commands.bot_has_permissions(manage_messages=True)
async def user(
self,
ctx: commands.Context,
user: Union[discord.Member, RawUserIds],
number: positive_int,
delete_pinned: bool = False,
):
"""Delete the last X messages from a specified user in the current channel.
Examples:
- `[p]cleanup user @Twentysix 2`
- `[p]cleanup user Red 6`
**Arguments:**
- `<user>` The user whose messages are to be cleaned up.
- `<number>` The max number of messages to cleanup. Must be a positive integer.
- `<delete_pinned>` Whether to delete pinned messages or not. Defaults to False
"""
channel = ctx.channel
member = None
if isinstance(user, discord.Member):
member = user
_id = member.id
else:
_id = user
author = ctx.author
if number > 100:
cont = await self.check_100_plus(ctx, number)
if not cont:
return
def check(m):
if m.author.id == _id:
return True
else:
return False
to_delete = await self.get_messages_for_deletion(
channel=channel,
number=number,
check=check,
before=ctx.message,
delete_pinned=delete_pinned,
)
to_delete.append(ctx.message)
reason = (
"{}({}) deleted {} messages"
" made by {}({}) in channel #{}."
"".format(
author.name,
author.id,
humanize_number(len(to_delete), override_locale="en_US"),
member or "???",
_id,
channel.name,
)
)
log.info(reason)
await mass_purge(to_delete, channel)
await self.send_optional_notification(len(to_delete), channel, subtract_invoking=True)
@cleanup.command()
@commands.guild_only()
@checks.mod_or_permissions(manage_messages=True)
@commands.bot_has_permissions(manage_messages=True)
async def after(
self,
ctx: commands.Context,
message_id: Optional[RawMessageIds],
delete_pinned: bool = False,
):
"""Delete all messages after a specified message.
To get a message id, enable developer mode in Discord's
settings, 'appearance' tab. Then right click a message
and copy its id.
Replying to a message will cleanup all messages after it.
**Arguments:**
- `<message_id>` The id of the message to cleanup after. This message won't be deleted.
- `<delete_pinned>` Whether to delete pinned messages or not. Defaults to False
"""
channel = ctx.channel
author = ctx.author
after = None
if message_id:
try:
after = await channel.fetch_message(message_id)
except discord.NotFound:
return await ctx.send(_("Message not found."))
elif ref := ctx.message.reference:
after = await self.get_message_from_reference(channel, ref)
if after is None:
raise commands.BadArgument
to_delete = await self.get_messages_for_deletion(
channel=channel, number=None, after=after, delete_pinned=delete_pinned
)
reason = "{}({}) deleted {} messages in channel #{}.".format(
author.name,
author.id,
humanize_number(len(to_delete), override_locale="en_US"),
channel.name,
)
log.info(reason)
await mass_purge(to_delete, channel)
await self.send_optional_notification(len(to_delete), channel)
@cleanup.command()
@commands.guild_only()
@checks.mod_or_permissions(manage_messages=True)
@commands.bot_has_permissions(manage_messages=True)
async def before(
self,
ctx: commands.Context,
message_id: Optional[RawMessageIds],
number: positive_int,
delete_pinned: bool = False,
):
"""Deletes X messages before the specified message.
To get a message id, enable developer mode in Discord's
settings, 'appearance' tab. Then right click a message
and copy its id.
Replying to a message will cleanup all messages before it.
**Arguments:**
- `<message_id>` The id of the message to cleanup before. This message won't be deleted.
- `<number>` The max number of messages to cleanup. Must be a positive integer.
- `<delete_pinned>` Whether to delete pinned messages or not. Defaults to False
"""
channel = ctx.channel
author = ctx.author
before = None
if message_id:
try:
before = await channel.fetch_message(message_id)
except discord.NotFound:
return await ctx.send(_("Message not found."))
elif ref := ctx.message.reference:
before = await self.get_message_from_reference(channel, ref)
if before is None:
raise commands.BadArgument
to_delete = await self.get_messages_for_deletion(
channel=channel, number=number, before=before, delete_pinned=delete_pinned
)
to_delete.append(ctx.message)
reason = "{}({}) deleted {} messages in channel #{}.".format(
author.name,
author.id,
humanize_number(len(to_delete), override_locale="en_US"),
channel.name,
)
log.info(reason)
await mass_purge(to_delete, channel)
await self.send_optional_notification(len(to_delete), channel, subtract_invoking=True)
@cleanup.command()
@commands.guild_only()
@checks.mod_or_permissions(manage_messages=True)
@commands.bot_has_permissions(manage_messages=True)
async def between(
self,
ctx: commands.Context,
one: RawMessageIds,
two: RawMessageIds,
delete_pinned: bool = False,
):
"""Delete the messages between Message One and Message Two, providing the messages IDs.
The first message ID should be the older message and the second one the newer.
Example:
- `[p]cleanup between 123456789123456789 987654321987654321`
**Arguments:**
- `<one>` The id of the message to cleanup after. This message won't be deleted.
- `<two>` The id of the message to cleanup before. This message won't be deleted.
- `<delete_pinned>` Whether to delete pinned messages or not. Defaults to False
"""
channel = ctx.channel
author = ctx.author
try:
mone = await channel.fetch_message(one)
except discord.errors.NotFound:
return await ctx.send(
_("Could not find a message with the ID of {id}.".format(id=one))
)
try:
mtwo = await channel.fetch_message(two)
except discord.errors.NotFound:
return await ctx.send(
_("Could not find a message with the ID of {id}.".format(id=two))
)
to_delete = await self.get_messages_for_deletion(
channel=channel, before=mtwo, after=mone, delete_pinned=delete_pinned
)
to_delete.append(ctx.message)
reason = "{}({}) deleted {} messages in channel #{}.".format(
author.name,
author.id,
humanize_number(len(to_delete), override_locale="en_US"),
channel.name,
)
log.info(reason)
await mass_purge(to_delete, channel)
await self.send_optional_notification(len(to_delete), channel, subtract_invoking=True)
@cleanup.command()
@commands.guild_only()
@checks.mod_or_permissions(manage_messages=True)
@commands.bot_has_permissions(manage_messages=True)
async def messages(
self, ctx: commands.Context, number: positive_int, delete_pinned: bool = False
):
"""Delete the last X messages in the current channel.
Example:
- `[p]cleanup messages 26`
**Arguments:**
- `<number>` The max number of messages to cleanup. Must be a positive integer.
- `<delete_pinned>` Whether to delete pinned messages or not. Defaults to False
"""
channel = ctx.channel
author = ctx.author
if number > 100:
cont = await self.check_100_plus(ctx, number)
if not cont:
return
to_delete = await self.get_messages_for_deletion(
channel=channel, number=number, before=ctx.message, delete_pinned=delete_pinned
)
to_delete.append(ctx.message)
reason = "{}({}) deleted {} messages in channel #{}.".format(
author.name, author.id, len(to_delete), channel.name
)
log.info(reason)
await mass_purge(to_delete, channel)
await self.send_optional_notification(len(to_delete), channel, subtract_invoking=True)
@cleanup.command(name="bot")
@commands.guild_only()
@checks.mod_or_permissions(manage_messages=True)
@commands.bot_has_permissions(manage_messages=True)
async def cleanup_bot(
self, ctx: commands.Context, number: positive_int, delete_pinned: bool = False
):
"""Clean up command messages and messages from the bot in the current channel.
Can only cleanup custom commands and alias commands if those cogs are loaded.
**Arguments:**
- `<number>` The max number of messages to cleanup. Must be a positive integer.
- `<delete_pinned>` Whether to delete pinned messages or not. Defaults to False
"""
channel = ctx.channel
author = ctx.message.author
if number > 100:
cont = await self.check_100_plus(ctx, number)
if not cont:
return
prefixes = await self.bot.get_prefix(ctx.message) # This returns all server prefixes
if isinstance(prefixes, str):
prefixes = [prefixes]
# In case some idiot sets a null prefix
if "" in prefixes:
prefixes.remove("")
cc_cog = self.bot.get_cog("CustomCommands")
if cc_cog is not None:
command_names: Set[str] = await cc_cog.get_command_names(ctx.guild)
is_cc = lambda name: name in command_names
else:
is_cc = lambda name: False
alias_cog = self.bot.get_cog("Alias")
if alias_cog is not None:
alias_names: Set[str] = set(
a.name for a in await alias_cog._aliases.get_global_aliases()
) | set(a.name for a in await alias_cog._aliases.get_guild_aliases(ctx.guild))
is_alias = lambda name: name in alias_names
else:
is_alias = lambda name: False
bot_id = self.bot.user.id
def check(m):
if m.author.id == bot_id:
return True
elif m == ctx.message:
return True
p = discord.utils.find(m.content.startswith, prefixes)
if p and len(p) > 0:
cmd_name = m.content[len(p) :].split(" ")[0]
return (
bool(self.bot.get_command(cmd_name)) or is_alias(cmd_name) or is_cc(cmd_name)
)
return False
to_delete = await self.get_messages_for_deletion(
channel=channel,
number=number,
check=check,
before=ctx.message,
delete_pinned=delete_pinned,
)
to_delete.append(ctx.message)
reason = (
"{}({}) deleted {}"
" command messages in channel #{}."
"".format(
author.name,
author.id,
humanize_number(len(to_delete), override_locale="en_US"),
channel.name,
)
)
log.info(reason)
await mass_purge(to_delete, channel)
await self.send_optional_notification(len(to_delete), channel, subtract_invoking=True)
@cleanup.command(name="self")
@check_self_permissions()
async def cleanup_self(
self,
ctx: commands.Context,
number: positive_int,
match_pattern: str = None,
delete_pinned: bool = False,
):
"""Clean up messages owned by the bot in the current channel.
By default, all messages are cleaned. If a second argument is specified,
it is used for pattern matching - only messages containing the given text will be deleted.
Examples:
- `[p]cleanup self 6`
- `[p]cleanup self 10 Pong`
- `[p]cleanup self 7 "" True`
**Arguments:**
- `<number>` The max number of messages to cleanup. Must be a positive integer.
- `<match_pattern>` The text that messages must contain to be deleted. Use "" to skip this.
- `<delete_pinned>` Whether to delete pinned messages or not. Defaults to False
"""
channel = ctx.channel
author = ctx.message.author
if number > 100:
cont = await self.check_100_plus(ctx, number)
if not cont:
return
# You can always delete your own messages, this is needed to purge
can_mass_purge = False
if type(author) is discord.Member:
me = ctx.guild.me
can_mass_purge = channel.permissions_for(me).manage_messages
if match_pattern:
def content_match(c):
return match_pattern in c
else:
def content_match(_):
return True
def check(m):
if m.author.id != self.bot.user.id:
return False
elif content_match(m.content):
return True
return False
to_delete = await self.get_messages_for_deletion(
channel=channel,
number=number,
check=check,
before=ctx.message,
delete_pinned=delete_pinned,
)
if can_mass_purge:
to_delete.append(ctx.message)
if ctx.guild:
channel_name = "channel " + channel.name
else:
channel_name = str(channel)
reason = (
"{}({}) deleted {} messages "
"sent by the bot in {}."
"".format(
author.name,
author.id,
humanize_number(len(to_delete), override_locale="en_US"),
channel_name,
)
)
log.info(reason)
if can_mass_purge:
await mass_purge(to_delete, channel)
else:
await slow_deletion(to_delete)
await self.send_optional_notification(
len(to_delete), channel, subtract_invoking=can_mass_purge
)
@cleanup.command(name="duplicates", aliases=["spam"])
@commands.guild_only()
@checks.mod_or_permissions(manage_messages=True)
@commands.bot_has_permissions(manage_messages=True)
async def cleanup_duplicates(
self, ctx: commands.Context, number: positive_int = PositiveInt(50)
):
"""Deletes duplicate messages in the channel from the last X messages and keeps only one copy.
Defaults to 50.
**Arguments:**
- `<number>` The number of messages to check for duplicates. Must be a positive integer.
"""
msgs = []
spam = []
def check(m):
if m.attachments:
return False
c = (m.author.id, m.content, [e.to_dict() for e in m.embeds])
if c in msgs:
spam.append(m)
return True
else:
msgs.append(c)
return False
to_delete = await self.get_messages_for_deletion(
channel=ctx.channel, limit=number, check=check, before=ctx.message
)
if len(to_delete) > 100:
cont = await self.check_100_plus(ctx, len(to_delete))
if not cont:
return
log.info(
"%s (%s) deleted %s spam messages in channel %s (%s).",
ctx.author,
ctx.author.id,
len(to_delete),
ctx.channel,
ctx.channel.id,
)
to_delete.append(ctx.message)
await mass_purge(to_delete, ctx.channel)
await self.send_optional_notification(len(to_delete), ctx.channel, subtract_invoking=True)
@commands.group()
@commands.admin_or_permissions(manage_messages=True)
async def cleanupset(self, ctx: commands.Context):
"""Manage the settings for the cleanup command."""
pass
@cleanupset.command(name="notify")
async def cleanupset_notify(self, ctx: commands.Context):
"""Toggle clean up notification settings."""
toggle = await self.config.guild(ctx.guild).notify()
if toggle:
await self.config.guild(ctx.guild).notify.set(False)
await ctx.send(_("I will no longer notify of message deletions."))
else:
await self.config.guild(ctx.guild).notify.set(True)
await ctx.send(_("I will now notify of message deletions."))
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/cogs/cleanup/cleanup.py
| 0.833392 | 0.181535 |
cleanup.py
|
pypi
|
from typing import Optional, Tuple, Union
from datetime import timezone, timedelta, datetime
from .abc import MixinMeta
import discord
from redbot.core import commands, checks, i18n, modlog
from redbot.core.utils.chat_formatting import (
bold,
humanize_timedelta,
humanize_list,
pagify,
format_perms_list,
)
from redbot.core.utils.mod import get_audit_reason
from .converters import MuteTime
_ = i18n.Translator("Mutes", __file__)
class VoiceMutes(MixinMeta):
"""
This handles all voice channel related muting
"""
@staticmethod
async def _voice_perm_check(
ctx: commands.Context, user_voice_state: Optional[discord.VoiceState], **perms: bool
) -> Tuple[bool, Optional[str]]:
"""Check if the bot and user have sufficient permissions for voicebans.
This also verifies that the user's voice state and connected
channel are not ``None``.
Returns
-------
bool
``True`` if the permissions are sufficient and the user has
a valid voice state.
"""
if user_voice_state is None or user_voice_state.channel is None:
return False, _("That user is not in a voice channel.")
voice_channel: discord.VoiceChannel = user_voice_state.channel
required_perms = discord.Permissions()
required_perms.update(**perms)
if not voice_channel.permissions_for(ctx.me) >= required_perms:
return (
False,
_("I require the {perms} permission(s) in that user's channel to do that.").format(
perms=format_perms_list(required_perms)
),
)
if (
ctx.permission_state is commands.PermState.NORMAL
and not voice_channel.permissions_for(ctx.author) >= required_perms
):
return (
False,
_(
"You must have the {perms} permission(s) in that user's channel to use this "
"command."
).format(perms=format_perms_list(required_perms)),
)
return True, None
@commands.command(name="voicemute", usage="<users...> [reason]")
@commands.guild_only()
async def voice_mute(
self,
ctx: commands.Context,
users: commands.Greedy[discord.Member],
*,
time_and_reason: MuteTime = {},
):
"""Mute a user in their current voice channel.
`<users...>` is a space separated list of usernames, ID's, or mentions.
`[time_and_reason]` is the time to mute for and reason. Time is
any valid time length such as `30 minutes` or `2 days`. If nothing
is provided the mute will use the set default time or indefinite if not set.
Examples:
`[p]voicemute @member1 @member2 spam 5 hours`
`[p]voicemute @member1 3 days`"""
if not users:
return await ctx.send_help()
if ctx.me in users:
return await ctx.send(_("You cannot mute me."))
if ctx.author in users:
return await ctx.send(_("You cannot mute yourself."))
async with ctx.typing():
success_list = []
issue_list = []
for user in users:
user_voice_state = user.voice
can_move, perm_reason = await self._voice_perm_check(
ctx, user_voice_state, mute_members=True, manage_permissions=True
)
if not can_move:
issue_list.append((user, perm_reason))
continue
duration = time_and_reason.get("duration", None)
reason = time_and_reason.get("reason", None)
time = ""
until = None
if duration:
until = datetime.now(timezone.utc) + duration
time = _(" for {duration}").format(
duration=humanize_timedelta(timedelta=duration)
)
else:
default_duration = await self.config.guild(ctx.guild).default_time()
if default_duration:
until = datetime.now(timezone.utc) + timedelta(seconds=default_duration)
time = _(" for {duration}").format(
duration=humanize_timedelta(
timedelta=timedelta(seconds=default_duration)
)
)
guild = ctx.guild
author = ctx.author
channel = user_voice_state.channel
audit_reason = get_audit_reason(author, reason, shorten=True)
success = await self.channel_mute_user(
guild, channel, author, user, until, audit_reason
)
if success["success"]:
if "reason" in success and success["reason"]:
issue_list.append((user, success["reason"]))
else:
success_list.append(user)
await modlog.create_case(
self.bot,
guild,
ctx.message.created_at.replace(tzinfo=timezone.utc),
"vmute",
user,
author,
reason,
until=until,
channel=channel,
)
await self._send_dm_notification(
user, author, guild, _("Voice mute"), reason, duration
)
async with self.config.member(user).perms_cache() as cache:
cache[channel.id] = success["old_overs"]
else:
issue_list.append((user, success["reason"]))
if success_list:
msg = _("{users} has been muted in this channel{time}.")
if len(success_list) > 1:
msg = _("{users} have been muted in this channel{time}.")
await ctx.send(
msg.format(users=humanize_list([f"{u}" for u in success_list]), time=time)
)
if issue_list:
msg = _("The following users could not be muted\n")
for user, issue in issue_list:
msg += f"{user}: {issue}\n"
await ctx.send_interactive(pagify(msg))
@commands.command(name="voiceunmute", usage="<users...> [reason]")
@commands.guild_only()
async def unmute_voice(
self,
ctx: commands.Context,
users: commands.Greedy[discord.Member],
*,
reason: Optional[str] = None,
):
"""Unmute a user in their current voice channel.
`<users...>` is a space separated list of usernames, ID's, or mentions.
`[reason]` is the reason for the unmute."""
if not users:
return await ctx.send_help()
if ctx.me in users:
return await ctx.send(_("You cannot unmute me."))
if ctx.author in users:
return await ctx.send(_("You cannot unmute yourself."))
async with ctx.typing():
issue_list = []
success_list = []
for user in users:
user_voice_state = user.voice
can_move, perm_reason = await self._voice_perm_check(
ctx, user_voice_state, mute_members=True, manage_permissions=True
)
if not can_move:
issue_list.append((user, perm_reason))
continue
guild = ctx.guild
author = ctx.author
channel = user_voice_state.channel
audit_reason = get_audit_reason(author, reason, shorten=True)
success = await self.channel_unmute_user(
guild, channel, author, user, audit_reason
)
if success["success"]:
if "reason" in success and success["reason"]:
issue_list.append((user, success["reason"]))
else:
success_list.append(user)
await modlog.create_case(
self.bot,
guild,
ctx.message.created_at.replace(tzinfo=timezone.utc),
"vunmute",
user,
author,
reason,
until=None,
channel=channel,
)
await self._send_dm_notification(
user, author, guild, _("Voice unmute"), reason
)
else:
issue_list.append((user, success["reason"]))
if success_list:
if channel.id in self._channel_mutes and self._channel_mutes[channel.id]:
await self.config.channel(channel).muted_users.set(self._channel_mutes[channel.id])
else:
await self.config.channel(channel).muted_users.clear()
await ctx.send(
_("{users} unmuted in this channel.").format(
users=humanize_list([f"{u}" for u in success_list])
)
)
if issue_list:
msg = _("The following users could not be unmuted\n")
for user, issue in issue_list:
msg += f"{user}: {issue}\n"
await ctx.send_interactive(pagify(msg))
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/cogs/mutes/voicemutes.py
| 0.848251 | 0.175185 |
voicemutes.py
|
pypi
|
from typing import Tuple, Dict, Optional, List, Union
from re import findall
import discord
from discord.ext.commands.view import StringView
from redbot.core import commands, Config
from redbot.core.i18n import Translator
from redbot.core.utils import AsyncIter
_ = Translator("Alias", __file__)
class ArgParseError(Exception):
pass
class AliasEntry:
"""An object containing all required information about an alias"""
name: str
command: Union[Tuple[str], str]
creator: int
guild: Optional[int]
uses: int
def __init__(
self, name: str, command: Union[Tuple[str], str], creator: int, guild: Optional[int]
):
super().__init__()
self.name = name
self.command = command
self.creator = creator
self.guild = guild
self.uses = 0
def inc(self):
"""
Increases the `uses` stat by 1.
:return: new use count
"""
self.uses += 1
return self.uses
def get_extra_args_from_alias(self, message: discord.Message, prefix: str) -> str:
"""
When an alias is executed by a user in chat this function tries
to get any extra arguments passed in with the call.
Whitespace will be trimmed from both ends.
:param message:
:param prefix:
:param alias:
:return:
"""
known_content_length = len(prefix) + len(self.name)
extra = message.content[known_content_length:]
view = StringView(extra)
view.skip_ws()
extra = []
while not view.eof:
prev = view.index
word = view.get_quoted_word()
if len(word) < view.index - prev:
word = "".join((view.buffer[prev], word, view.buffer[view.index - 1]))
extra.append(word)
view.skip_ws()
return extra
def to_json(self) -> dict:
return {
"name": self.name,
"command": self.command,
"creator": self.creator,
"guild": self.guild,
"uses": self.uses,
}
@classmethod
def from_json(cls, data: dict):
ret = cls(data["name"], data["command"], data["creator"], data["guild"])
ret.uses = data.get("uses", 0)
return ret
class AliasCache:
def __init__(self, config: Config, cache_enabled: bool = True):
self.config = config
self._cache_enabled = cache_enabled
self._loaded = False
self._aliases: Dict[Optional[int], Dict[str, AliasEntry]] = {None: {}}
async def anonymize_aliases(self, user_id: int):
async with self.config.entries() as global_aliases:
for a in global_aliases:
if a.get("creator", 0) == user_id:
a["creator"] = 0xDE1
if self._cache_enabled:
self._aliases[None][a["name"]] = AliasEntry.from_json(a)
all_guilds = await self.config.all_guilds()
async for guild_id, guild_data in AsyncIter(all_guilds.items(), steps=100):
for a in guild_data["entries"]:
if a.get("creator", 0) == user_id:
break
else:
continue
# basically, don't build a context manager without a need.
async with self.config.guild_from_id(guild_id).entries() as entry_list:
for a in entry_list:
if a.get("creator", 0) == user_id:
a["creator"] = 0xDE1
if self._cache_enabled:
self._aliases[guild_id][a["name"]] = AliasEntry.from_json(a)
async def load_aliases(self):
if not self._cache_enabled:
self._loaded = True
return
for alias in await self.config.entries():
self._aliases[None][alias["name"]] = AliasEntry.from_json(alias)
all_guilds = await self.config.all_guilds()
async for guild_id, guild_data in AsyncIter(all_guilds.items(), steps=100):
if guild_id not in self._aliases:
self._aliases[guild_id] = {}
for alias in guild_data["entries"]:
self._aliases[guild_id][alias["name"]] = AliasEntry.from_json(alias)
self._loaded = True
async def get_aliases(self, ctx: commands.Context) -> List[AliasEntry]:
"""Returns all possible aliases with the given context"""
global_aliases: List[AliasEntry] = []
server_aliases: List[AliasEntry] = []
global_aliases = await self.get_global_aliases()
if ctx.guild and ctx.guild.id in self._aliases:
server_aliases = await self.get_guild_aliases(ctx.guild)
return global_aliases + server_aliases
async def get_guild_aliases(self, guild: discord.Guild) -> List[AliasEntry]:
"""Returns all guild specific aliases"""
aliases: List[AliasEntry] = []
if self._cache_enabled:
if guild.id in self._aliases:
for _, alias in self._aliases[guild.id].items():
aliases.append(alias)
else:
aliases = [AliasEntry.from_json(d) for d in await self.config.guild(guild).entries()]
return aliases
async def get_global_aliases(self) -> List[AliasEntry]:
"""Returns all global specific aliases"""
aliases: List[AliasEntry] = []
if self._cache_enabled:
for _, alias in self._aliases[None].items():
aliases.append(alias)
else:
aliases = [AliasEntry.from_json(d) for d in await self.config.entries()]
return aliases
async def get_alias(
self, guild: Optional[discord.Guild], alias_name: str
) -> Optional[AliasEntry]:
"""Returns an AliasEntry object if the provided alias_name is a registered alias"""
server_aliases: List[AliasEntry] = []
if self._cache_enabled:
if alias_name in self._aliases[None]:
return self._aliases[None][alias_name]
if guild is not None:
if guild.id in self._aliases:
if alias_name in self._aliases[guild.id]:
return self._aliases[guild.id][alias_name]
else:
if guild:
server_aliases = [
AliasEntry.from_json(d) for d in await self.config.guild(guild.id).entries()
]
global_aliases = [AliasEntry.from_json(d) for d in await self.config.entries()]
all_aliases = global_aliases + server_aliases
for alias in all_aliases:
if alias.name == alias_name:
return alias
return None
@staticmethod
def format_command_for_alias(command: str) -> str:
# This was present in add_alias previously
# Made this into a separate method so as to reuse the same code in edit_alias
indices = findall(r"{(\d*)}", command)
if indices:
try:
indices = [int(a[0]) for a in indices]
except IndexError:
raise ArgParseError(_("Arguments must be specified with a number."))
low = min(indices)
indices = [a - low for a in indices]
high = max(indices)
gaps = set(indices).symmetric_difference(range(high + 1))
if gaps:
raise ArgParseError(
_("Arguments must be sequential. Missing arguments: ")
+ ", ".join(str(i + low) for i in gaps)
)
command = command.format(*(f"{{{i}}}" for i in range(-low, high + low + 1)))
return command
async def add_alias(
self, ctx: commands.Context, alias_name: str, command: str, global_: bool = False
) -> AliasEntry:
command = self.format_command_for_alias(command)
if global_:
alias = AliasEntry(alias_name, command, ctx.author.id, None)
settings = self.config
if self._cache_enabled:
self._aliases[None][alias.name] = alias
else:
alias = AliasEntry(alias_name, command, ctx.author.id, ctx.guild.id)
settings = self.config.guild(ctx.guild)
if self._cache_enabled:
if ctx.guild.id not in self._aliases:
self._aliases[ctx.guild.id] = {}
self._aliases[ctx.guild.id][alias.name] = alias
async with settings.entries() as curr_aliases:
curr_aliases.append(alias.to_json())
return alias
async def edit_alias(
self, ctx: commands.Context, alias_name: str, command: str, global_: bool = False
) -> bool:
command = self.format_command_for_alias(command)
if global_:
settings = self.config
else:
settings = self.config.guild(ctx.guild)
async with settings.entries() as aliases:
for index, alias in enumerate(aliases):
if alias["name"] == alias_name:
alias_edited = AliasEntry.from_json(alias)
alias_edited.command = command
aliases[index] = alias_edited.to_json()
if self._cache_enabled:
if global_:
self._aliases[None][alias_edited.name] = alias_edited
else:
self._aliases[ctx.guild.id][alias_edited.name] = alias_edited
return True
return False
async def delete_alias(
self, ctx: commands.Context, alias_name: str, global_: bool = False
) -> bool:
if global_:
settings = self.config
else:
settings = self.config.guild(ctx.guild)
async with settings.entries() as aliases:
for alias in aliases:
if alias["name"] == alias_name:
aliases.remove(alias)
if self._cache_enabled:
if global_:
del self._aliases[None][alias_name]
else:
del self._aliases[ctx.guild.id][alias_name]
return True
return False
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/cogs/alias/alias_entry.py
| 0.863607 | 0.220678 |
alias_entry.py
|
pypi
|
import random
from collections import namedtuple
from pathlib import Path
import weakref
import pytest
from redbot.core import Config
from redbot.core.bot import Red
from redbot.core import config as config_module, drivers
__all__ = [
"override_data_path",
"coroutine",
"driver",
"config",
"config_fr",
"red",
"guild_factory",
"empty_guild",
"empty_channel",
"empty_member",
"empty_message",
"empty_role",
"empty_user",
"member_factory",
"user_factory",
"ctx",
]
@pytest.fixture(autouse=True)
def override_data_path(tmpdir):
from redbot.core import data_manager
data_manager.basic_config = data_manager.basic_config_default
data_manager.basic_config["DATA_PATH"] = str(tmpdir)
@pytest.fixture()
def coroutine():
async def some_coro(*args, **kwargs):
return args, kwargs
return some_coro
@pytest.fixture()
def driver(tmpdir_factory):
import uuid
rand = str(uuid.uuid4())
path = Path(str(tmpdir_factory.mktemp(rand)))
return drivers.get_driver("PyTest", str(random.randint(1, 999999)), data_path_override=path)
@pytest.fixture()
def config(driver):
config_module._config_cache = weakref.WeakValueDictionary()
conf = Config(cog_name="PyTest", unique_identifier=driver.unique_cog_identifier, driver=driver)
yield conf
@pytest.fixture()
def config_fr(driver):
"""
Mocked config object with force_register enabled.
"""
config_module._config_cache = weakref.WeakValueDictionary()
conf = Config(
cog_name="PyTest",
unique_identifier=driver.unique_cog_identifier,
driver=driver,
force_registration=True,
)
yield conf
# region Dpy Mocks
@pytest.fixture()
def guild_factory():
mock_guild = namedtuple("Guild", "id members")
class GuildFactory:
def get(self):
return mock_guild(random.randint(1, 999999999), [])
return GuildFactory()
@pytest.fixture()
def empty_guild(guild_factory):
return guild_factory.get()
@pytest.fixture(scope="module")
def empty_channel():
mock_channel = namedtuple("Channel", "id")
return mock_channel(random.randint(1, 999999999))
@pytest.fixture(scope="module")
def empty_role():
mock_role = namedtuple("Role", "id")
return mock_role(random.randint(1, 999999999))
@pytest.fixture()
def member_factory(guild_factory):
mock_member = namedtuple("Member", "id guild display_name")
class MemberFactory:
def get(self):
return mock_member(random.randint(1, 999999999), guild_factory.get(), "Testing_Name")
return MemberFactory()
@pytest.fixture()
def empty_member(member_factory):
return member_factory.get()
@pytest.fixture()
def user_factory():
mock_user = namedtuple("User", "id")
class UserFactory:
def get(self):
return mock_user(random.randint(1, 999999999))
return UserFactory()
@pytest.fixture()
def empty_user(user_factory):
return user_factory.get()
@pytest.fixture(scope="module")
def empty_message():
mock_msg = namedtuple("Message", "content")
return mock_msg("No content.")
@pytest.fixture()
def ctx(empty_member, empty_channel, red):
mock_ctx = namedtuple("Context", "author guild channel message bot")
return mock_ctx(empty_member, empty_member.guild, empty_channel, empty_message, red)
# endregion
# region Red Mock
@pytest.fixture()
def red(config_fr):
from redbot.core.cli import parse_cli_flags
cli_flags = parse_cli_flags(["ignore_me"])
description = "Red v3 - Alpha"
Config.get_core_conf = lambda *args, **kwargs: config_fr
red = Red(cli_flags=cli_flags, description=description, dm_help=None, owner_ids=set())
yield red
# endregion
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/pytest/core.py
| 0.420362 | 0.204978 |
core.py
|
pypi
|
from collections import namedtuple
from pathlib import Path
import json
import subprocess as sp
import shutil
import pytest
from redbot.cogs.downloader.repo_manager import RepoManager, Repo, ProcessFormatter
from redbot.cogs.downloader.installable import Installable, InstalledModule
__all__ = [
"GIT_VERSION",
"repo_manager",
"repo",
"bot_repo",
"INFO_JSON",
"LIBRARY_INFO_JSON",
"installable",
"installed_cog",
"library_installable",
"fake_run_noprint",
"fake_current_commit",
"_session_git_repo",
"git_repo",
"cloned_git_repo",
"git_repo_with_remote",
]
def _get_git_version():
"""Returns version tuple in format: (major, minor)"""
raw_version = sp.check_output(("git", "version"), text=True)[12:]
# we're only interested in major and minor version if we will ever need micro
# there's more handling needed for versions like `2.25.0-rc1` and `2.25.0.windows.1`
return tuple(int(n) for n in raw_version.split(".", maxsplit=3)[:2])
GIT_VERSION = _get_git_version()
async def fake_run_noprint(*args, **kwargs):
fake_result_tuple = namedtuple("fake_result", "returncode result")
res = fake_result_tuple(0, (args, kwargs))
return res
async def fake_current_commit(*args, **kwargs):
return "fake_result"
@pytest.fixture
def repo_manager(tmpdir_factory):
rm = RepoManager()
# rm.repos_folder = Path(str(tmpdir_factory.getbasetemp())) / 'repos'
return rm
@pytest.fixture
def repo(tmp_path):
repo_folder = tmp_path / "repos" / "squid"
repo_folder.mkdir(parents=True, exist_ok=True)
return Repo(
url="https://github.com/tekulvw/Squid-Plugins",
name="squid",
branch="rewrite_cogs",
commit="6acb5decbb717932e5dc0cda7fca0eff452c47dd",
folder_path=repo_folder,
)
@pytest.fixture
def bot_repo(event_loop):
cwd = Path.cwd()
return Repo(
name="Red-DiscordBot",
branch="WRONG",
commit="",
url="https://empty.com/something.git",
folder_path=cwd,
)
# Installable
INFO_JSON = {
"author": ("tekulvw",),
"min_bot_version": "3.0.0",
"max_bot_version": "3.0.2",
"description": "A long description",
"hidden": False,
"install_msg": "A post-installation message",
"required_cogs": {},
"requirements": ("tabulate",),
"short": "A short description",
"tags": ("tag1", "tag2"),
"type": "COG",
}
LIBRARY_INFO_JSON = {
"author": ("seputaes",),
"min_bot_version": "3.0.0",
"max_bot_version": "3.0.2",
"description": "A long library description",
"hidden": False, # libraries are always hidden, this tests it will be flipped
"install_msg": "A library install message",
"required_cogs": {},
"requirements": ("tabulate",),
"short": "A short library description",
"tags": ("libtag1", "libtag2"),
"type": "SHARED_LIBRARY",
}
@pytest.fixture
def installable(tmpdir):
cog_path = tmpdir.mkdir("test_repo").mkdir("test_cog")
info_path = cog_path.join("info.json")
info_path.write_text(json.dumps(INFO_JSON), "utf-8")
cog_info = Installable(Path(str(cog_path)))
return cog_info
@pytest.fixture
def installed_cog(tmpdir):
cog_path = tmpdir.mkdir("test_repo").mkdir("test_installed_cog")
info_path = cog_path.join("info.json")
info_path.write_text(json.dumps(INFO_JSON), "utf-8")
cog_info = InstalledModule(Path(str(cog_path)))
return cog_info
@pytest.fixture
def library_installable(tmpdir):
lib_path = tmpdir.mkdir("test_repo").mkdir("test_lib")
info_path = lib_path.join("info.json")
info_path.write_text(json.dumps(LIBRARY_INFO_JSON), "utf-8")
cog_info = Installable(Path(str(lib_path)))
return cog_info
# Git
TEST_REPO_EXPORT_PTH: Path = Path(__file__).parent / "downloader_testrepo.export"
def _init_test_repo(destination: Path):
# copied from tools/edit_testrepo.py
git_dirparams = ("git", "-C", str(destination))
init_commands = (
(*git_dirparams, "init"),
(*git_dirparams, "checkout", "-b", "master"),
(*git_dirparams, "config", "--local", "user.name", "Cog-Creators"),
(*git_dirparams, "config", "--local", "user.email", "[email protected]"),
(*git_dirparams, "config", "--local", "commit.gpgSign", "false"),
)
for args in init_commands:
sp.run(args, check=True)
return git_dirparams
@pytest.fixture(scope="session")
async def _session_git_repo(tmp_path_factory, event_loop):
# we will import repo only once once per session and duplicate the repo folder
repo_path = tmp_path_factory.mktemp("session_git_repo")
repo = Repo(name="redbot-testrepo", url="", branch="master", commit="", folder_path=repo_path)
git_dirparams = _init_test_repo(repo_path)
fast_import = sp.Popen((*git_dirparams, "fast-import", "--quiet"), stdin=sp.PIPE)
with TEST_REPO_EXPORT_PTH.open(mode="rb") as f:
fast_import.communicate(f.read())
return_code = fast_import.wait()
if return_code:
raise Exception(f"git fast-import failed with code {return_code}")
sp.run((*git_dirparams, "reset", "--hard"))
return repo
@pytest.fixture
async def git_repo(_session_git_repo, tmp_path, event_loop):
# fixture only copies repo that was imported in _session_git_repo
repo_path = tmp_path / "redbot-testrepo"
shutil.copytree(_session_git_repo.folder_path, repo_path)
repo = Repo(
name="redbot-testrepo",
url=_session_git_repo.url,
branch=_session_git_repo.branch,
commit=_session_git_repo.commit,
folder_path=repo_path,
)
return repo
@pytest.fixture
async def cloned_git_repo(_session_git_repo, tmp_path, event_loop):
# don't use this if you want to edit origin repo
repo_path = tmp_path / "redbot-cloned_testrepo"
repo = Repo(
name="redbot-testrepo",
url=str(_session_git_repo.folder_path),
branch=_session_git_repo.branch,
commit=_session_git_repo.commit,
folder_path=repo_path,
)
sp.run(("git", "clone", str(_session_git_repo.folder_path), str(repo_path)), check=True)
return repo
@pytest.fixture
async def git_repo_with_remote(git_repo, tmp_path, event_loop):
# this can safely be used when you want to do changes to origin repo
repo_path = tmp_path / "redbot-testrepo_with_remote"
repo = Repo(
name="redbot-testrepo",
url=str(git_repo.folder_path),
branch=git_repo.branch,
commit=git_repo.commit,
folder_path=repo_path,
)
sp.run(("git", "clone", str(git_repo.folder_path), str(repo_path)), check=True)
return repo
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/pytest/downloader.py
| 0.563618 | 0.244374 |
downloader.py
|
pypi
|
from __future__ import annotations
import asyncio
import logging
from datetime import datetime, timezone
from typing import Union, List, Optional, TYPE_CHECKING, Literal
from functools import wraps
import discord
from redbot.core.utils import AsyncIter
from redbot.core.utils.chat_formatting import humanize_number
from . import Config, errors, commands
from .i18n import Translator
from .errors import BankPruneError
if TYPE_CHECKING:
from .bot import Red
_ = Translator("Bank API", __file__)
__all__ = [
"Account",
"get_balance",
"set_balance",
"withdraw_credits",
"deposit_credits",
"can_spend",
"transfer_credits",
"wipe_bank",
"get_account",
"is_global",
"set_global",
"get_bank_name",
"set_bank_name",
"get_currency_name",
"set_currency_name",
"get_default_balance",
"set_default_balance",
"get_max_balance",
"set_max_balance",
"cost",
"AbortPurchase",
"bank_prune",
]
_MAX_BALANCE = 2 ** 63 - 1
_SCHEMA_VERSION = 1
_DEFAULT_GLOBAL = {
"schema_version": 0,
"is_global": False,
"bank_name": "Twentysix bank",
"currency": "credits",
"default_balance": 100,
"max_balance": _MAX_BALANCE,
}
_DEFAULT_GUILD = {
"bank_name": "Twentysix bank",
"currency": "credits",
"default_balance": 100,
"max_balance": _MAX_BALANCE,
}
_DEFAULT_MEMBER = {"name": "", "balance": 0, "created_at": 0}
_DEFAULT_USER = _DEFAULT_MEMBER
_config: Config = None
log = logging.getLogger("red.core.bank")
_data_deletion_lock = asyncio.Lock()
_cache_is_global = None
_cache = {"bank_name": None, "currency": None, "default_balance": None, "max_balance": None}
async def _init():
global _config
_config = Config.get_conf(None, 384734293238749, cog_name="Bank", force_registration=True)
_config.register_global(**_DEFAULT_GLOBAL)
_config.register_guild(**_DEFAULT_GUILD)
_config.register_member(**_DEFAULT_MEMBER)
_config.register_user(**_DEFAULT_USER)
await _migrate_config()
async def _migrate_config():
schema_version = await _config.schema_version()
if schema_version == _SCHEMA_VERSION:
return
if schema_version == 0:
await _schema_0_to_1()
schema_version += 1
await _config.schema_version.set(schema_version)
async def _schema_0_to_1():
# convert floats in bank balances to ints
# don't use anything seen below in extensions, it's optimized and controlled for here,
# but can't be safe in 3rd party use
# this CANNOT use ctx manager, because ctx managers compare before and after,
# and floats can be equal to ints: (1.0 == 1) is True
group = _config._get_base_group(_config.USER)
bank_user_data = await group.all()
for user_config in bank_user_data.values():
if "balance" in user_config:
user_config["balance"] = int(user_config["balance"])
await group.set(bank_user_data)
group = _config._get_base_group(_config.MEMBER)
bank_member_data = await group.all()
for guild_data in bank_member_data.values():
for member_config in guild_data.values():
if "balance" in member_config:
member_config["balance"] = int(member_config["balance"])
await group.set(bank_member_data)
async def _process_data_deletion(
*, requester: Literal["discord_deleted_user", "owner", "user", "user_strict"], user_id: int
):
"""
Bank has no reason to keep any of this data
if the user doesn't want it kept,
we won't special case any request type
"""
if requester not in ("discord_deleted_user", "owner", "user", "user_strict"):
log.warning(
"Got unknown data request type `{req_type}` for user, deleting anyway",
req_type=requester,
)
async with _data_deletion_lock:
await _config.user_from_id(user_id).clear()
all_members = await _config.all_members()
async for guild_id, member_dict in AsyncIter(all_members.items(), steps=100):
if user_id in member_dict:
await _config.member_from_ids(guild_id, user_id).clear()
class Account:
"""A single account.
This class should ONLY be instantiated by the bank itself."""
def __init__(self, name: str, balance: int, created_at: datetime):
self.name = name
self.balance = balance
self.created_at = created_at
def _encoded_current_time() -> int:
"""Get the current UTC time as a timestamp.
Returns
-------
int
The current UTC timestamp.
"""
now = datetime.now(timezone.utc)
return _encode_time(now)
def _encode_time(time: datetime) -> int:
"""Convert a datetime object to a serializable int.
Parameters
----------
time : datetime.datetime
The datetime to convert.
Returns
-------
int
The timestamp of the datetime object.
"""
ret = int(time.timestamp())
return ret
def _decode_time(time: int) -> datetime:
"""Convert a timestamp to a datetime object.
Parameters
----------
time : int
The timestamp to decode.
Returns
-------
datetime.datetime
The datetime object from the timestamp.
"""
return datetime.utcfromtimestamp(time)
async def get_balance(member: discord.Member) -> int:
"""Get the current balance of a member.
Parameters
----------
member : discord.Member
The member whose balance to check.
Returns
-------
int
The member's balance
"""
acc = await get_account(member)
return acc.balance
async def can_spend(member: discord.Member, amount: int) -> bool:
"""Determine if a member can spend the given amount.
Parameters
----------
member : discord.Member
The member wanting to spend.
amount : int
The amount the member wants to spend.
Raises
------
TypeError
If the amount is not an `int`.
Returns
-------
bool
:code:`True` if the member has a sufficient balance to spend the
amount, else :code:`False`.
"""
if not isinstance(amount, int):
raise TypeError("Amount must be of type int, not {}.".format(type(amount)))
if _invalid_amount(amount):
return False
return await get_balance(member) >= amount
async def set_balance(member: Union[discord.Member, discord.User], amount: int) -> int:
"""Set an account balance.
Parameters
----------
member : Union[discord.Member, discord.User]
The member whose balance to set.
amount : int
The amount to set the balance to.
Returns
-------
int
New account balance.
Raises
------
ValueError
If attempting to set the balance to a negative number.
RuntimeError
If the bank is guild-specific and a discord.User object is provided.
BalanceTooHigh
If attempting to set the balance to a value greater than
``bank._MAX_BALANCE``.
TypeError
If the amount is not an `int`.
"""
if not isinstance(amount, int):
raise TypeError("Amount must be of type int, not {}.".format(type(amount)))
if amount < 0:
raise ValueError("Not allowed to have negative balance.")
guild = getattr(member, "guild", None)
max_bal = await get_max_balance(guild)
if amount > max_bal:
currency = await get_currency_name(guild)
raise errors.BalanceTooHigh(
user=member.display_name, max_balance=max_bal, currency_name=currency
)
if await is_global():
group = _config.user(member)
else:
group = _config.member(member)
await group.balance.set(amount)
if await group.created_at() == 0:
time = _encoded_current_time()
await group.created_at.set(time)
if await group.name() == "":
await group.name.set(member.display_name)
return amount
def _invalid_amount(amount: int) -> bool:
return amount < 0
async def withdraw_credits(member: discord.Member, amount: int) -> int:
"""Remove a certain amount of credits from an account.
Parameters
----------
member : discord.Member
The member to withdraw credits from.
amount : int
The amount to withdraw.
Returns
-------
int
New account balance.
Raises
------
ValueError
If the withdrawal amount is invalid or if the account has insufficient
funds.
TypeError
If the withdrawal amount is not an `int`.
"""
if not isinstance(amount, int):
raise TypeError("Withdrawal amount must be of type int, not {}.".format(type(amount)))
if _invalid_amount(amount):
raise ValueError(
"Invalid withdrawal amount {} < 0".format(
humanize_number(amount, override_locale="en_US")
)
)
bal = await get_balance(member)
if amount > bal:
raise ValueError(
"Insufficient funds {} > {}".format(
humanize_number(amount, override_locale="en_US"),
humanize_number(bal, override_locale="en_US"),
)
)
return await set_balance(member, bal - amount)
async def deposit_credits(member: discord.Member, amount: int) -> int:
"""Add a given amount of credits to an account.
Parameters
----------
member : discord.Member
The member to deposit credits to.
amount : int
The amount to deposit.
Returns
-------
int
The new balance.
Raises
------
ValueError
If the deposit amount is invalid.
TypeError
If the deposit amount is not an `int`.
"""
if not isinstance(amount, int):
raise TypeError("Deposit amount must be of type int, not {}.".format(type(amount)))
if _invalid_amount(amount):
raise ValueError(
"Invalid deposit amount {} <= 0".format(
humanize_number(amount, override_locale="en_US")
)
)
bal = await get_balance(member)
return await set_balance(member, amount + bal)
async def transfer_credits(
from_: Union[discord.Member, discord.User],
to: Union[discord.Member, discord.User],
amount: int,
):
"""Transfer a given amount of credits from one account to another.
Parameters
----------
from_: Union[discord.Member, discord.User]
The member to transfer from.
to : Union[discord.Member, discord.User]
The member to transfer to.
amount : int
The amount to transfer.
Returns
-------
int
The new balance of the member gaining credits.
Raises
------
ValueError
If the amount is invalid or if ``from_`` has insufficient funds.
TypeError
If the amount is not an `int`.
RuntimeError
If the bank is guild-specific and a discord.User object is provided.
BalanceTooHigh
If the balance after the transfer would be greater than
``bank._MAX_BALANCE``.
"""
if not isinstance(amount, int):
raise TypeError("Transfer amount must be of type int, not {}.".format(type(amount)))
if _invalid_amount(amount):
raise ValueError(
"Invalid transfer amount {} <= 0".format(
humanize_number(amount, override_locale="en_US")
)
)
guild = getattr(to, "guild", None)
max_bal = await get_max_balance(guild)
if await get_balance(to) + amount > max_bal:
currency = await get_currency_name(guild)
raise errors.BalanceTooHigh(
user=to.display_name, max_balance=max_bal, currency_name=currency
)
await withdraw_credits(from_, amount)
return await deposit_credits(to, amount)
async def wipe_bank(guild: Optional[discord.Guild] = None) -> None:
"""Delete all accounts from the bank.
Parameters
----------
guild : discord.Guild
The guild to clear accounts for. If unsupplied and the bank is
per-server, all accounts in every guild will be wiped.
"""
if await is_global():
await _config.clear_all_users()
else:
await _config.clear_all_members(guild)
async def bank_prune(bot: Red, guild: discord.Guild = None, user_id: int = None) -> None:
"""Prune bank accounts from the bank.
Parameters
----------
bot : Red
The bot.
guild : discord.Guild
The guild to prune. This is required if the bank is set to local.
user_id : int
The id of the user whose account will be pruned.
If supplied this will prune only this user's bank account
otherwise it will prune all invalid users from the bank.
Raises
------
BankPruneError
If guild is :code:`None` and the bank is Local.
"""
global_bank = await is_global()
if global_bank:
_guilds = set()
_uguilds = set()
if user_id is None:
async for g in AsyncIter(bot.guilds, steps=100):
if not g.unavailable and g.large and not g.chunked:
_guilds.add(g)
elif g.unavailable:
_uguilds.add(g)
group = _config._get_base_group(_config.USER)
else:
if guild is None:
raise BankPruneError("'guild' can't be None when pruning a local bank")
if user_id is None:
_guilds = {guild} if not guild.unavailable and guild.large else set()
_uguilds = {guild} if guild.unavailable else set()
group = _config._get_base_group(_config.MEMBER, str(guild.id))
if user_id is None:
for _guild in _guilds:
await _guild.chunk()
accounts = await group.all()
tmp = accounts.copy()
members = bot.get_all_members() if global_bank else guild.members
user_list = {str(m.id) for m in members if m.guild not in _uguilds}
async with group.all() as bank_data: # FIXME: use-config-bulk-update
if user_id is None:
for acc in tmp:
if acc not in user_list:
del bank_data[acc]
else:
user_id = str(user_id)
if user_id in bank_data:
del bank_data[user_id]
async def get_leaderboard(positions: int = None, guild: discord.Guild = None) -> List[tuple]:
"""
Gets the bank's leaderboard
Parameters
----------
positions : `int`
The number of positions to get
guild : discord.Guild
The guild to get the leaderboard of. If the bank is global and this
is provided, get only guild members on the leaderboard
Returns
-------
`list` of `tuple`
The sorted leaderboard in the form of :code:`(user_id, raw_account)`
Raises
------
TypeError
If the bank is guild-specific and no guild was specified
"""
if await is_global():
raw_accounts = await _config.all_users()
if guild is not None:
tmp = raw_accounts.copy()
for acc in tmp:
if not guild.get_member(acc):
del raw_accounts[acc]
else:
if guild is None:
raise TypeError("Expected a guild, got NoneType object instead!")
raw_accounts = await _config.all_members(guild)
sorted_acc = sorted(raw_accounts.items(), key=lambda x: x[1]["balance"], reverse=True)
if positions is None:
return sorted_acc
else:
return sorted_acc[:positions]
async def get_leaderboard_position(
member: Union[discord.User, discord.Member]
) -> Union[int, None]:
"""
Get the leaderboard position for the specified user
Parameters
----------
member : `discord.User` or `discord.Member`
The user to get the leaderboard position of
Returns
-------
`int`
The position of the user on the leaderboard
Raises
------
TypeError
If the bank is currently guild-specific and a `discord.User` object was passed in
"""
if await is_global():
guild = None
else:
guild = member.guild if hasattr(member, "guild") else None
try:
leaderboard = await get_leaderboard(None, guild)
except TypeError:
raise
else:
pos = discord.utils.find(lambda x: x[1][0] == member.id, enumerate(leaderboard, 1))
if pos is None:
return None
else:
return pos[0]
async def get_account(member: Union[discord.Member, discord.User]) -> Account:
"""Get the appropriate account for the given user or member.
A member is required if the bank is currently guild specific.
Parameters
----------
member : `discord.User` or `discord.Member`
The user whose account to get.
Returns
-------
Account
The user's account.
"""
if await is_global():
all_accounts = await _config.all_users()
else:
all_accounts = await _config.all_members(member.guild)
if member.id not in all_accounts:
acc_data = {"name": member.display_name, "created_at": _DEFAULT_MEMBER["created_at"]}
try:
acc_data["balance"] = await get_default_balance(member.guild)
except AttributeError:
acc_data["balance"] = await get_default_balance()
else:
acc_data = all_accounts[member.id]
acc_data["created_at"] = _decode_time(acc_data["created_at"])
return Account(**acc_data)
async def is_global() -> bool:
"""Determine if the bank is currently global.
Returns
-------
bool
:code:`True` if the bank is global, otherwise :code:`False`.
"""
global _cache_is_global
if _cache_is_global is None:
_cache_is_global = await _config.is_global()
return _cache_is_global
async def set_global(global_: bool) -> bool:
"""Set global status of the bank.
.. important::
All accounts are reset when you switch!
Parameters
----------
global_ : bool
:code:`True` will set bank to global mode.
Returns
-------
bool
New bank mode, :code:`True` is global.
Raises
------
RuntimeError
If bank is becoming global and a `discord.Member` was not provided.
"""
if (await is_global()) is global_:
return global_
global _cache_is_global
if await is_global():
await _config.clear_all_users()
else:
await _config.clear_all_members()
await _config.is_global.set(global_)
_cache_is_global = global_
return global_
async def get_bank_name(guild: discord.Guild = None) -> str:
"""Get the current bank name.
Parameters
----------
guild : `discord.Guild`, optional
The guild to get the bank name for (required if bank is
guild-specific).
Returns
-------
str
The bank's name.
Raises
------
RuntimeError
If the bank is guild-specific and guild was not provided.
"""
if await is_global():
global _cache
if _cache["bank_name"] is None:
_cache["bank_name"] = await _config.bank_name()
return _cache["bank_name"]
elif guild is not None:
return await _config.guild(guild).bank_name()
else:
raise RuntimeError("Guild parameter is required and missing.")
async def set_bank_name(name: str, guild: discord.Guild = None) -> str:
"""Set the bank name.
Parameters
----------
name : str
The new name for the bank.
guild : `discord.Guild`, optional
The guild to set the bank name for (required if bank is
guild-specific).
Returns
-------
str
The new name for the bank.
Raises
------
RuntimeError
If the bank is guild-specific and guild was not provided.
"""
if await is_global():
await _config.bank_name.set(name)
global _cache
_cache["bank_name"] = name
elif guild is not None:
await _config.guild(guild).bank_name.set(name)
else:
raise RuntimeError("Guild must be provided if setting the name of a guild-specific bank.")
return name
async def get_currency_name(guild: discord.Guild = None) -> str:
"""Get the currency name of the bank.
Parameters
----------
guild : `discord.Guild`, optional
The guild to get the currency name for (required if bank is
guild-specific).
Returns
-------
str
The currency name.
Raises
------
RuntimeError
If the bank is guild-specific and guild was not provided.
"""
if await is_global():
global _cache
if _cache["currency"] is None:
_cache["currency"] = await _config.currency()
return _cache["currency"]
elif guild is not None:
return await _config.guild(guild).currency()
else:
raise RuntimeError("Guild must be provided.")
async def set_currency_name(name: str, guild: discord.Guild = None) -> str:
"""Set the currency name for the bank.
Parameters
----------
name : str
The new name for the currency.
guild : `discord.Guild`, optional
The guild to set the currency name for (required if bank is
guild-specific).
Returns
-------
str
The new name for the currency.
Raises
------
RuntimeError
If the bank is guild-specific and guild was not provided.
"""
if await is_global():
await _config.currency.set(name)
global _cache
_cache["currency"] = name
elif guild is not None:
await _config.guild(guild).currency.set(name)
else:
raise RuntimeError(
"Guild must be provided if setting the currency name of a guild-specific bank."
)
return name
async def get_max_balance(guild: discord.Guild = None) -> int:
"""Get the max balance for the bank.
Parameters
----------
guild : `discord.Guild`, optional
The guild to get the max balance for (required if bank is
guild-specific).
Returns
-------
int
The maximum allowed balance.
Raises
------
RuntimeError
If the bank is guild-specific and guild was not provided.
"""
if await is_global():
if _cache["max_balance"] is None:
_cache["max_balance"] = await _config.max_balance()
return _cache["max_balance"]
elif guild is not None:
return await _config.guild(guild).max_balance()
else:
raise RuntimeError("Guild must be provided.")
async def set_max_balance(amount: int, guild: discord.Guild = None) -> int:
"""Set the maximum balance for the bank.
Parameters
----------
amount : int
The new maximum balance.
guild : `discord.Guild`, optional
The guild to set the max balance for (required if bank is
guild-specific).
Returns
-------
int
The new maximum balance.
Raises
------
RuntimeError
If the bank is guild-specific and guild was not provided.
ValueError
If the amount is less than 0 or higher than 2 ** 63 - 1.
TypeError
If the amount is not an `int`.
"""
if not isinstance(amount, int):
raise TypeError("Amount must be of type int, not {}.".format(type(amount)))
if not (0 < amount <= _MAX_BALANCE):
raise ValueError(
"Amount must be greater than zero and less than {max}.".format(
max=humanize_number(_MAX_BALANCE, override_locale="en_US")
)
)
if await is_global():
await _config.max_balance.set(amount)
global _cache
_cache["max_balance"] = amount
elif guild is not None:
await _config.guild(guild).max_balance.set(amount)
else:
raise RuntimeError(
"Guild must be provided if setting the maximum balance of a guild-specific bank."
)
return amount
async def get_default_balance(guild: discord.Guild = None) -> int:
"""Get the current default balance amount.
Parameters
----------
guild : `discord.Guild`, optional
The guild to get the default balance for (required if bank is
guild-specific).
Returns
-------
int
The bank's default balance.
Raises
------
RuntimeError
If the bank is guild-specific and guild was not provided.
"""
if await is_global():
if _cache["default_balance"] is None:
_cache["default_balance"] = await _config.default_balance()
return _cache["default_balance"]
elif guild is not None:
return await _config.guild(guild).default_balance()
else:
raise RuntimeError("Guild is missing and required!")
async def set_default_balance(amount: int, guild: discord.Guild = None) -> int:
"""Set the default balance amount.
Parameters
----------
amount : int
The new default balance.
guild : `discord.Guild`, optional
The guild to set the default balance for (required if bank is
guild-specific).
Returns
-------
int
The new default balance.
Raises
------
RuntimeError
If the bank is guild-specific and guild was not provided.
ValueError
If the amount is less than 0 or higher than the max allowed balance.
TypeError
If the amount is not an `int`.
"""
if not isinstance(amount, int):
raise TypeError("Amount must be of type int, not {}.".format(type(amount)))
max_bal = await get_max_balance(guild)
if not (0 <= amount <= max_bal):
raise ValueError(
"Amount must be greater than or equal zero and less than or equal {max}.".format(
max=humanize_number(max_bal, override_locale="en_US")
)
)
if await is_global():
await _config.default_balance.set(amount)
global _cache
_cache["default_balance"] = amount
elif guild is not None:
await _config.guild(guild).default_balance.set(amount)
else:
raise RuntimeError("Guild is missing and required.")
return amount
class AbortPurchase(Exception):
pass
def cost(amount: int):
"""
Decorates a coroutine-function or command to have a cost.
If the command raises an exception, the cost will be refunded.
You can intentionally refund by raising `AbortPurchase`
(this error will be consumed and not show to users)
Other exceptions will propagate and will be handled by Red's (and/or
any other configured) error handling.
"""
# TODO: Add documentation for input/output/exceptions
if not isinstance(amount, int) or amount < 0:
raise ValueError("This decorator requires an integer cost greater than or equal to zero")
def deco(coro_or_command):
is_command = isinstance(coro_or_command, commands.Command)
if not is_command and not asyncio.iscoroutinefunction(coro_or_command):
raise TypeError("@bank.cost() can only be used on commands or `async def` functions")
coro = coro_or_command.callback if is_command else coro_or_command
@wraps(coro)
async def wrapped(*args, **kwargs):
context: commands.Context = None
for arg in args:
if isinstance(arg, commands.Context):
context = arg
break
if not context.guild and not await is_global():
raise commands.UserFeedbackCheckFailure(
_("Can't pay for this command in DM without a global bank.")
)
try:
await withdraw_credits(context.author, amount)
except Exception:
credits_name = await get_currency_name(context.guild)
raise commands.UserFeedbackCheckFailure(
_("You need at least {cost} {currency} to use this command.").format(
cost=humanize_number(amount), currency=credits_name
)
)
else:
try:
return await coro(*args, **kwargs)
except AbortPurchase:
await deposit_credits(context.author, amount)
except Exception:
await deposit_credits(context.author, amount)
raise
if not is_command:
return wrapped
else:
wrapped.__module__ = coro_or_command.callback.__module__
coro_or_command.callback = wrapped
return coro_or_command
return deco
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/core/bank.py
| 0.847873 | 0.174445 |
bank.py
|
pypi
|
import contextlib
import keyword
import pkgutil
from importlib import import_module, invalidate_caches
from importlib.machinery import ModuleSpec
from pathlib import Path
from typing import Union, List, Optional
import redbot.cogs
from redbot.core.utils import deduplicate_iterables
import discord
from . import checks, commands
from .config import Config
from .i18n import Translator, cog_i18n
from .data_manager import cog_data_path
from .utils.chat_formatting import box, pagify
__all__ = ["CogManager"]
class NoSuchCog(ImportError):
"""Thrown when a cog is missing.
Different from ImportError because some ImportErrors can happen inside cogs.
"""
class CogManager:
"""Directory manager for Red's cogs.
This module allows you to load cogs from multiple directories and even from
outside the bot directory. You may also set a directory for downloader to
install new cogs to, the default being the :code:`cogs/` folder in the root
bot directory.
"""
CORE_PATH = Path(redbot.cogs.__path__[0])
def __init__(self):
self.config = Config.get_conf(self, 2938473984732, True)
tmp_cog_install_path = cog_data_path(self) / "cogs"
tmp_cog_install_path.mkdir(parents=True, exist_ok=True)
self.config.register_global(paths=[], install_path=str(tmp_cog_install_path))
async def paths(self) -> List[Path]:
"""Get all currently valid path directories, in order of priority
Returns
-------
List[pathlib.Path]
A list of paths where cog packages can be found. The
install path is highest priority, followed by the
user-defined paths, and the core path has the lowest
priority.
"""
return deduplicate_iterables(
[await self.install_path()], await self.user_defined_paths(), [self.CORE_PATH]
)
async def install_path(self) -> Path:
"""Get the install path for 3rd party cogs.
Returns
-------
pathlib.Path
The path to the directory where 3rd party cogs are stored.
"""
return Path(await self.config.install_path()).resolve()
async def user_defined_paths(self) -> List[Path]:
"""Get a list of user-defined cog paths.
All paths will be absolute and unique, in order of priority.
Returns
-------
List[pathlib.Path]
A list of user-defined paths.
"""
return list(map(Path, deduplicate_iterables(await self.config.paths())))
async def set_install_path(self, path: Path) -> Path:
"""Set the install path for 3rd party cogs.
Note
----
The bot will not remember your old cog install path which means
that **all previously installed cogs** will no longer be found.
Parameters
----------
path : pathlib.Path
The new directory for cog installs.
Returns
-------
pathlib.Path
Absolute path to the new install directory.
Raises
------
ValueError
If :code:`path` is not an existing directory.
"""
if not path.is_dir():
raise ValueError("The install path must be an existing directory.")
resolved = path.resolve()
await self.config.install_path.set(str(resolved))
return resolved
@staticmethod
def _ensure_path_obj(path: Union[Path, str]) -> Path:
"""Guarantee an object will be a path object.
Parameters
----------
path : `pathlib.Path` or `str`
Returns
-------
pathlib.Path
"""
return Path(path)
async def add_path(self, path: Union[Path, str]) -> None:
"""Add a cog path to current list.
This will ignore duplicates.
Parameters
----------
path : `pathlib.Path` or `str`
Path to add.
Raises
------
ValueError
If :code:`path` does not resolve to an existing directory.
"""
path = self._ensure_path_obj(path)
# This makes the path absolute, will break if a bot install
# changes OS/Computer?
path = path.resolve()
if not path.is_dir():
raise ValueError("'{}' is not a valid directory.".format(path))
if path == await self.install_path():
raise ValueError("Cannot add the install path as an additional path.")
if path == self.CORE_PATH:
raise ValueError("Cannot add the core path as an additional path.")
current_paths = await self.user_defined_paths()
if path not in current_paths:
current_paths.append(path)
await self.set_paths(current_paths)
async def remove_path(self, path: Union[Path, str]) -> None:
"""Remove a path from the current paths list.
Parameters
----------
path : `pathlib.Path` or `str`
Path to remove.
"""
path = self._ensure_path_obj(path)
paths = await self.user_defined_paths()
paths.remove(path)
await self.set_paths(paths)
async def set_paths(self, paths_: List[Path]):
"""Set the current paths list.
Parameters
----------
paths_ : `list` of `pathlib.Path`
List of paths to set.
"""
str_paths = list(map(str, paths_))
await self.config.paths.set(str_paths)
async def _find_ext_cog(self, name: str) -> ModuleSpec:
"""
Attempts to find a spec for a third party installed cog.
Parameters
----------
name : str
Name of the cog package to look for.
Returns
-------
importlib.machinery.ModuleSpec
Module spec to be used for cog loading.
Raises
------
NoSuchCog
When no cog with the requested name was found.
"""
if not name.isidentifier() or keyword.iskeyword(name):
# reject package names that can't be valid python identifiers
raise NoSuchCog(
f"No 3rd party module by the name of '{name}' was found in any available path.",
name=name,
)
real_paths = list(map(str, [await self.install_path()] + await self.user_defined_paths()))
for finder, module_name, _ in pkgutil.iter_modules(real_paths):
if name == module_name:
spec = finder.find_spec(name)
if spec:
return spec
raise NoSuchCog(
f"No 3rd party module by the name of '{name}' was found in any available path.",
name=name,
)
@staticmethod
async def _find_core_cog(name: str) -> ModuleSpec:
"""
Attempts to find a spec for a core cog.
Parameters
----------
name : str
Returns
-------
importlib.machinery.ModuleSpec
Raises
------
RuntimeError
When no matching spec can be found.
"""
real_name = ".{}".format(name)
package = "redbot.cogs"
try:
mod = import_module(real_name, package=package)
except ImportError as e:
if e.name == package + real_name:
raise NoSuchCog(
"No core cog by the name of '{}' could be found.".format(name),
path=e.path,
name=e.name,
) from e
raise
return mod.__spec__
# noinspection PyUnreachableCode
async def find_cog(self, name: str) -> Optional[ModuleSpec]:
"""Find a cog in the list of available paths.
Parameters
----------
name : str
Name of the cog to find.
Returns
-------
Optional[importlib.machinery.ModuleSpec]
A module spec to be used for specialized cog loading, if found.
"""
with contextlib.suppress(NoSuchCog):
return await self._find_ext_cog(name)
with contextlib.suppress(NoSuchCog):
return await self._find_core_cog(name)
async def available_modules(self) -> List[str]:
"""Finds the names of all available modules to load."""
paths = list(map(str, await self.paths()))
ret = []
for finder, module_name, _ in pkgutil.iter_modules(paths):
# reject package names that can't be valid python identifiers
if module_name.isidentifier() and not keyword.iskeyword(module_name):
ret.append(module_name)
return ret
@staticmethod
def invalidate_caches():
"""Re-evaluate modules in the py cache.
This is an alias for an importlib internal and should be called
any time that a new module has been installed to a cog directory.
"""
invalidate_caches()
_ = Translator("CogManagerUI", __file__)
@cog_i18n(_)
class CogManagerUI(commands.Cog):
"""Commands to interface with Red's cog manager."""
async def red_delete_data_for_user(self, **kwargs):
""" Nothing to delete (Core Config is handled in a bot method ) """
return
@commands.command()
@checks.is_owner()
async def paths(self, ctx: commands.Context):
"""
Lists current cog paths in order of priority.
"""
cog_mgr = ctx.bot._cog_mgr
install_path = await cog_mgr.install_path()
core_path = cog_mgr.CORE_PATH
cog_paths = await cog_mgr.user_defined_paths()
msg = _("Install Path: {install_path}\nCore Path: {core_path}\n\n").format(
install_path=install_path, core_path=core_path
)
partial = []
for i, p in enumerate(cog_paths, start=1):
partial.append("{}. {}".format(i, p))
msg += "\n".join(partial)
await ctx.send(box(msg))
@commands.command()
@checks.is_owner()
async def addpath(self, ctx: commands.Context, *, path: Path):
"""
Add a path to the list of available cog paths.
"""
if not path.is_dir():
await ctx.send(_("That path does not exist or does not point to a valid directory."))
return
try:
await ctx.bot._cog_mgr.add_path(path)
except ValueError as e:
await ctx.send(str(e))
else:
await ctx.send(_("Path successfully added."))
@commands.command()
@checks.is_owner()
async def removepath(self, ctx: commands.Context, path_number: int):
"""
Removes a path from the available cog paths given the `path_number` from `[p]paths`.
"""
path_number -= 1
if path_number < 0:
await ctx.send(_("Path numbers must be positive."))
return
cog_paths = await ctx.bot._cog_mgr.user_defined_paths()
try:
to_remove = cog_paths.pop(path_number)
except IndexError:
await ctx.send(_("That is an invalid path number."))
return
await ctx.bot._cog_mgr.remove_path(to_remove)
await ctx.send(_("Path successfully removed."))
@commands.command()
@checks.is_owner()
async def reorderpath(self, ctx: commands.Context, from_: int, to: int):
"""
Reorders paths internally to allow discovery of different cogs.
"""
# Doing this because in the paths command they're 1 indexed
from_ -= 1
to -= 1
if from_ < 0 or to < 0:
await ctx.send(_("Path numbers must be positive."))
return
all_paths = await ctx.bot._cog_mgr.user_defined_paths()
try:
to_move = all_paths.pop(from_)
except IndexError:
await ctx.send(_("Invalid 'from' index."))
return
try:
all_paths.insert(to, to_move)
except IndexError:
await ctx.send(_("Invalid 'to' index."))
return
await ctx.bot._cog_mgr.set_paths(all_paths)
await ctx.send(_("Paths reordered."))
@commands.command()
@checks.is_owner()
async def installpath(self, ctx: commands.Context, path: Path = None):
"""
Returns the current install path or sets it if one is provided.
The provided path must be absolute or relative to the bot's
directory and it must already exist.
No installed cogs will be transferred in the process.
"""
if path:
if not path.is_absolute():
path = (ctx.bot._main_dir / path).resolve()
try:
await ctx.bot._cog_mgr.set_install_path(path)
except ValueError:
await ctx.send(_("That path does not exist."))
return
install_path = await ctx.bot._cog_mgr.install_path()
await ctx.send(
_("The bot will install new cogs to the `{}` directory.").format(install_path)
)
@commands.command()
@checks.is_owner()
async def cogs(self, ctx: commands.Context):
"""
Lists all loaded and available cogs.
"""
loaded = set(ctx.bot.extensions.keys())
all_cogs = set(await ctx.bot._cog_mgr.available_modules())
unloaded = all_cogs - loaded
loaded = sorted(list(loaded), key=str.lower)
unloaded = sorted(list(unloaded), key=str.lower)
if await ctx.embed_requested():
loaded = _("**{} loaded:**\n").format(len(loaded)) + ", ".join(loaded)
unloaded = _("**{} unloaded:**\n").format(len(unloaded)) + ", ".join(unloaded)
for page in pagify(loaded, delims=[", ", "\n"], page_length=1800):
if page.startswith(", "):
page = page[2:]
e = discord.Embed(description=page, colour=discord.Colour.dark_green())
await ctx.send(embed=e)
for page in pagify(unloaded, delims=[", ", "\n"], page_length=1800):
if page.startswith(", "):
page = page[2:]
e = discord.Embed(description=page, colour=discord.Colour.dark_red())
await ctx.send(embed=e)
else:
loaded_count = _("**{} loaded:**\n").format(len(loaded))
loaded = ", ".join(loaded)
unloaded_count = _("**{} unloaded:**\n").format(len(unloaded))
unloaded = ", ".join(unloaded)
loaded_count_sent = False
unloaded_count_sent = False
for page in pagify(loaded, delims=[", ", "\n"], page_length=1800):
if page.startswith(", "):
page = page[2:]
if not loaded_count_sent:
await ctx.send(loaded_count + box(page, lang="css"))
loaded_count_sent = True
else:
await ctx.send(box(page, lang="css"))
for page in pagify(unloaded, delims=[", ", "\n"], page_length=1800):
if page.startswith(", "):
page = page[2:]
if not unloaded_count_sent:
await ctx.send(unloaded_count + box(page, lang="ldif"))
unloaded_count_sent = True
else:
await ctx.send(box(page, lang="ldif"))
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/core/cog_manager.py
| 0.808521 | 0.211091 |
cog_manager.py
|
pypi
|
import importlib.machinery
import discord
from redbot.core.utils.chat_formatting import humanize_number
from .i18n import Translator
_ = Translator(__name__, __file__)
class RedError(Exception):
"""Base error class for Red-related errors."""
class PackageAlreadyLoaded(RedError):
"""Raised when trying to load an already-loaded package."""
def __init__(self, spec: importlib.machinery.ModuleSpec, *args, **kwargs):
super().__init__(*args, **kwargs)
self.spec: importlib.machinery.ModuleSpec = spec
def __str__(self) -> str:
return f"There is already a package named {self.spec.name.split('.')[-1]} loaded"
class CogLoadError(RedError):
"""Raised by a cog when it cannot load itself.
The message will be sent to the user."""
pass
class BankError(RedError):
"""Base error class for bank-related errors."""
class BalanceTooHigh(BankError, OverflowError):
"""Raised when trying to set a user's balance to higher than the maximum."""
def __init__(
self, user: discord.abc.User, max_balance: int, currency_name: str, *args, **kwargs
):
super().__init__(*args, **kwargs)
self.user = user
self.max_balance = max_balance
self.currency_name = currency_name
def __str__(self) -> str:
return _("{user}'s balance cannot rise above {max} {currency}.").format(
user=self.user, max=humanize_number(self.max_balance), currency=self.currency_name
)
class BankPruneError(BankError):
"""Raised when trying to prune a local bank and no server is specified."""
class MissingExtraRequirements(RedError):
"""Raised when an extra requirement is missing but required."""
class ConfigError(RedError):
"""Error in a Config operation."""
class StoredTypeError(ConfigError, TypeError):
"""A TypeError pertaining to stored Config data.
This error may arise when, for example, trying to increment a value
which is not a number, or trying to toggle a value which is not a
boolean.
"""
class CannotSetSubfield(StoredTypeError):
"""Tried to set sub-field of an invalid data structure.
This would occur in the following example::
>>> import asyncio
>>> from redbot.core import Config
>>> config = Config.get_conf(None, 1234, cog_name="Example")
>>> async def example():
... await config.foo.set(True)
... await config.set_raw("foo", "bar", False) # Should raise here
...
>>> asyncio.run(example())
"""
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/core/errors.py
| 0.771155 | 0.188175 |
errors.py
|
pypi
|
import ast
import asyncio
import aiohttp
import inspect
import io
import textwrap
import traceback
import types
import re
from contextlib import redirect_stdout
from copy import copy
import discord
from . import checks, commands
from .commands import NoParseOptional as Optional
from .i18n import Translator, cog_i18n
from .utils.chat_formatting import pagify
from .utils.predicates import MessagePredicate
"""
Notice:
95% of the below code came from R.Danny which can be found here:
https://github.com/Rapptz/RoboDanny/blob/master/cogs/repl.py
"""
_ = Translator("Dev", __file__)
START_CODE_BLOCK_RE = re.compile(r"^((```py(thon)?)(?=\s)|(```))")
@cog_i18n(_)
class Dev(commands.Cog):
"""Various development focused utilities."""
async def red_delete_data_for_user(self, **kwargs):
"""
Because despite my best efforts to advise otherwise,
people use ``--dev`` in production
"""
return
def __init__(self):
super().__init__()
self._last_result = None
self.sessions = {}
self.env_extensions = {}
@staticmethod
def async_compile(source, filename, mode):
return compile(source, filename, mode, flags=ast.PyCF_ALLOW_TOP_LEVEL_AWAIT, optimize=0)
@staticmethod
async def maybe_await(coro):
for i in range(2):
if inspect.isawaitable(coro):
coro = await coro
else:
return coro
return coro
@staticmethod
def cleanup_code(content):
"""Automatically removes code blocks from the code."""
# remove ```py\n```
if content.startswith("```") and content.endswith("```"):
return START_CODE_BLOCK_RE.sub("", content)[:-3]
# remove `foo`
return content.strip("` \n")
@classmethod
def get_syntax_error(cls, e):
"""Format a syntax error to send to the user.
Returns a string representation of the error formatted as a codeblock.
"""
if e.text is None:
return cls.get_pages("{0.__class__.__name__}: {0}".format(e))
return cls.get_pages(
"{0.text}\n{1:>{0.offset}}\n{2}: {0}".format(e, "^", type(e).__name__)
)
@staticmethod
def get_pages(msg: str):
"""Pagify the given message for output to the user."""
return pagify(msg, delims=["\n", " "], priority=True, shorten_by=10)
@staticmethod
def sanitize_output(ctx: commands.Context, input_: str) -> str:
"""Hides the bot's token from a string."""
token = ctx.bot.http.token
return re.sub(re.escape(token), "[EXPUNGED]", input_, re.I)
def get_environment(self, ctx: commands.Context) -> dict:
env = {
"bot": ctx.bot,
"ctx": ctx,
"channel": ctx.channel,
"author": ctx.author,
"guild": ctx.guild,
"message": ctx.message,
"asyncio": asyncio,
"aiohttp": aiohttp,
"discord": discord,
"commands": commands,
"_": self._last_result,
"__name__": "__main__",
}
for name, value in self.env_extensions.items():
try:
env[name] = value(ctx)
except Exception as e:
traceback.clear_frames(e.__traceback__)
env[name] = e
return env
@commands.command()
@checks.is_owner()
async def debug(self, ctx, *, code):
"""Evaluate a statement of python code.
The bot will always respond with the return value of the code.
If the return value of the code is a coroutine, it will be awaited,
and the result of that will be the bot's response.
Note: Only one statement may be evaluated. Using certain restricted
keywords, e.g. yield, will result in a syntax error. For multiple
lines or asynchronous code, see [p]repl or [p]eval.
Environment Variables:
ctx - command invocation context
bot - bot object
channel - the current channel object
author - command author's member object
message - the command's message object
discord - discord.py library
commands - redbot.core.commands
_ - The result of the last dev command.
"""
env = self.get_environment(ctx)
code = self.cleanup_code(code)
try:
compiled = self.async_compile(code, "<string>", "eval")
result = await self.maybe_await(eval(compiled, env))
except SyntaxError as e:
await ctx.send_interactive(self.get_syntax_error(e), box_lang="py")
return
except Exception as e:
await ctx.send_interactive(
self.get_pages("{}: {!s}".format(type(e).__name__, e)), box_lang="py"
)
return
self._last_result = result
result = self.sanitize_output(ctx, str(result))
await ctx.tick()
await ctx.send_interactive(self.get_pages(result), box_lang="py")
@commands.command(name="eval")
@checks.is_owner()
async def _eval(self, ctx, *, body: str):
"""Execute asynchronous code.
This command wraps code into the body of an async function and then
calls and awaits it. The bot will respond with anything printed to
stdout, as well as the return value of the function.
The code can be within a codeblock, inline code or neither, as long
as they are not mixed and they are formatted correctly.
Environment Variables:
ctx - command invocation context
bot - bot object
channel - the current channel object
author - command author's member object
message - the command's message object
discord - discord.py library
commands - redbot.core.commands
_ - The result of the last dev command.
"""
env = self.get_environment(ctx)
body = self.cleanup_code(body)
stdout = io.StringIO()
to_compile = "async def func():\n%s" % textwrap.indent(body, " ")
try:
compiled = self.async_compile(to_compile, "<string>", "exec")
exec(compiled, env)
except SyntaxError as e:
return await ctx.send_interactive(self.get_syntax_error(e), box_lang="py")
func = env["func"]
result = None
try:
with redirect_stdout(stdout):
result = await func()
except:
printed = "{}{}".format(stdout.getvalue(), traceback.format_exc())
else:
printed = stdout.getvalue()
await ctx.tick()
if result is not None:
self._last_result = result
msg = "{}{}".format(printed, result)
else:
msg = printed
msg = self.sanitize_output(ctx, msg)
await ctx.send_interactive(self.get_pages(msg), box_lang="py")
@commands.group(invoke_without_command=True)
@checks.is_owner()
async def repl(self, ctx):
"""Open an interactive REPL.
The REPL will only recognise code as messages which start with a
backtick. This includes codeblocks, and as such multiple lines can be
evaluated.
"""
if ctx.channel.id in self.sessions:
if self.sessions[ctx.channel.id]:
await ctx.send(
_("Already running a REPL session in this channel. Exit it with `quit`.")
)
else:
await ctx.send(
_(
"Already running a REPL session in this channel. Resume the REPL with `{}repl resume`."
).format(ctx.prefix)
)
return
env = self.get_environment(ctx)
env["__builtins__"] = __builtins__
env["_"] = None
self.sessions[ctx.channel.id] = True
await ctx.send(
_(
"Enter code to execute or evaluate. `exit()` or `quit` to exit. `{}repl pause` to pause."
).format(ctx.prefix)
)
while True:
response = await ctx.bot.wait_for("message", check=MessagePredicate.regex(r"^`", ctx))
if not self.sessions[ctx.channel.id]:
continue
cleaned = self.cleanup_code(response.content)
if cleaned in ("quit", "exit", "exit()"):
await ctx.send(_("Exiting."))
del self.sessions[ctx.channel.id]
return
executor = None
if cleaned.count("\n") == 0:
# single statement, potentially 'eval'
try:
code = self.async_compile(cleaned, "<repl session>", "eval")
except SyntaxError:
pass
else:
executor = eval
if executor is None:
try:
code = self.async_compile(cleaned, "<repl session>", "exec")
except SyntaxError as e:
await ctx.send_interactive(self.get_syntax_error(e), box_lang="py")
continue
env["message"] = response
stdout = io.StringIO()
msg = ""
try:
with redirect_stdout(stdout):
if executor is None:
result = types.FunctionType(code, env)()
else:
result = executor(code, env)
result = await self.maybe_await(result)
except:
value = stdout.getvalue()
msg = "{}{}".format(value, traceback.format_exc())
else:
value = stdout.getvalue()
if result is not None:
msg = "{}{}".format(value, result)
env["_"] = result
elif value:
msg = "{}".format(value)
msg = self.sanitize_output(ctx, msg)
try:
await ctx.send_interactive(self.get_pages(msg), box_lang="py")
except discord.Forbidden:
pass
except discord.HTTPException as e:
await ctx.send(_("Unexpected error: `{}`").format(e))
@repl.command(aliases=["resume"])
async def pause(self, ctx, toggle: Optional[bool] = None):
"""Pauses/resumes the REPL running in the current channel"""
if ctx.channel.id not in self.sessions:
await ctx.send(_("There is no currently running REPL session in this channel."))
return
if toggle is None:
toggle = not self.sessions[ctx.channel.id]
self.sessions[ctx.channel.id] = toggle
if toggle:
await ctx.send(_("The REPL session in this channel has been resumed."))
else:
await ctx.send(_("The REPL session in this channel is now paused."))
@commands.command()
@checks.is_owner()
async def mock(self, ctx, user: discord.Member, *, command):
"""Mock another user invoking a command.
The prefix must not be entered.
"""
msg = copy(ctx.message)
msg.author = user
msg.content = ctx.prefix + command
ctx.bot.dispatch("message", msg)
@commands.command(name="mockmsg")
@checks.is_owner()
async def mock_msg(self, ctx, user: discord.Member, *, content: str):
"""Dispatch a message event as if it were sent by a different user.
Only reads the raw content of the message. Attachments, embeds etc. are
ignored.
"""
old_author = ctx.author
old_content = ctx.message.content
ctx.message.author = user
ctx.message.content = content
ctx.bot.dispatch("message", ctx.message)
# If we change the author and content back too quickly,
# the bot won't process the mocked message in time.
await asyncio.sleep(2)
ctx.message.author = old_author
ctx.message.content = old_content
@commands.command()
@checks.is_owner()
async def bypasscooldowns(self, ctx, toggle: Optional[bool] = None):
"""Give bot owners the ability to bypass cooldowns.
Does not persist through restarts."""
if toggle is None:
toggle = not ctx.bot._bypass_cooldowns
ctx.bot._bypass_cooldowns = toggle
if toggle:
await ctx.send(_("Bot owners will now bypass all commands with cooldowns."))
else:
await ctx.send(_("Bot owners will no longer bypass all commands with cooldowns."))
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/core/dev_commands.py
| 0.723407 | 0.385519 |
dev_commands.py
|
pypi
|
import enum
from typing import Optional, Type
from .. import data_manager
from .base import IdentifierData, BaseDriver, ConfigCategory
from .json import JsonDriver
from .postgres import PostgresDriver
__all__ = [
"get_driver",
"ConfigCategory",
"IdentifierData",
"BaseDriver",
"JsonDriver",
"PostgresDriver",
"BackendType",
]
class BackendType(enum.Enum):
"""Represents storage backend type."""
#: JSON storage backend.
JSON = "JSON"
#: Postgres storage backend.
POSTGRES = "Postgres"
# Dead drivers below retained for error handling.
MONGOV1 = "MongoDB"
MONGO = "MongoDBV2"
_DRIVER_CLASSES = {BackendType.JSON: JsonDriver, BackendType.POSTGRES: PostgresDriver}
def _get_driver_class_include_old(storage_type: Optional[BackendType] = None) -> Type[BaseDriver]:
"""
ONLY for use in CLI for moving data away from a no longer supported backend
"""
if storage_type and storage_type == BackendType.MONGO:
from ._mongo import MongoDriver
return MongoDriver
else:
return get_driver_class(storage_type)
def get_driver_class(storage_type: Optional[BackendType] = None) -> Type[BaseDriver]:
"""Get the driver class for the given storage type.
Parameters
----------
storage_type : Optional[BackendType]
The backend you want a driver class for. Omit to try to obtain
the backend from data manager.
Returns
-------
Type[BaseDriver]
A subclass of `BaseDriver`.
Raises
------
ValueError
If there is no driver for the given storage type.
"""
if storage_type is None:
storage_type = BackendType(data_manager.storage_type())
try:
return _DRIVER_CLASSES[storage_type]
except KeyError:
raise ValueError(f"No driver found for storage type {storage_type}") from None
def get_driver(
cog_name: str,
identifier: str,
storage_type: Optional[BackendType] = None,
*,
allow_old: bool = False,
**kwargs,
):
"""Get a driver instance.
Parameters
----------
cog_name : str
The cog's name.
identifier : str
The cog's discriminator.
storage_type : Optional[BackendType]
The backend you want a driver for. Omit to try to obtain the
backend from data manager.
**kwargs
Driver-specific keyword arguments.
Returns
-------
BaseDriver
A driver instance.
Raises
------
RuntimeError
If the storage type is MongoV1, Mongo, or invalid.
"""
if storage_type is None:
try:
storage_type = BackendType(data_manager.storage_type())
except RuntimeError:
storage_type = BackendType.JSON
try:
if not allow_old:
driver_cls: Type[BaseDriver] = get_driver_class(storage_type)
else:
driver_cls: Type[BaseDriver] = _get_driver_class_include_old(storage_type)
except ValueError:
if storage_type in (BackendType.MONGOV1, BackendType.MONGO):
raise RuntimeError(
"Please convert to JSON first to continue using the bot."
"Mongo support was removed in 3.2."
) from None
else:
raise RuntimeError(f"Invalid driver type: '{storage_type}'") from None
return driver_cls(cog_name, identifier, **kwargs)
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/core/drivers/__init__.py
| 0.849878 | 0.203272 |
__init__.py
|
pypi
|
import asyncio
import contextlib
import functools
from typing import Iterable, List, Union
import discord
from .. import commands
from .predicates import ReactionPredicate
_ReactableEmoji = Union[str, discord.Emoji]
async def menu(
ctx: commands.Context,
pages: Union[List[str], List[discord.Embed]],
controls: dict,
message: discord.Message = None,
page: int = 0,
timeout: float = 30.0,
):
"""
An emoji-based menu
.. note:: All pages should be of the same type
.. note:: All functions for handling what a particular emoji does
should be coroutines (i.e. :code:`async def`). Additionally,
they must take all of the parameters of this function, in
addition to a string representing the emoji reacted with.
This parameter should be the last one, and none of the
parameters in the handling functions are optional
Parameters
----------
ctx: commands.Context
The command context
pages: `list` of `str` or `discord.Embed`
The pages of the menu.
controls: dict
A mapping of emoji to the function which handles the action for the
emoji.
message: discord.Message
The message representing the menu. Usually :code:`None` when first opening
the menu
page: int
The current page number of the menu
timeout: float
The time (in seconds) to wait for a reaction
Raises
------
RuntimeError
If either of the notes above are violated
"""
if not isinstance(pages[0], (discord.Embed, str)):
raise RuntimeError("Pages must be of type discord.Embed or str")
if not all(isinstance(x, discord.Embed) for x in pages) and not all(
isinstance(x, str) for x in pages
):
raise RuntimeError("All pages must be of the same type")
for key, value in controls.items():
maybe_coro = value
if isinstance(value, functools.partial):
maybe_coro = value.func
if not asyncio.iscoroutinefunction(maybe_coro):
raise RuntimeError("Function must be a coroutine")
current_page = pages[page]
if not message:
if isinstance(current_page, discord.Embed):
message = await ctx.send(embed=current_page)
else:
message = await ctx.send(current_page)
# Don't wait for reactions to be added (GH-1797)
# noinspection PyAsyncCall
start_adding_reactions(message, controls.keys())
else:
try:
if isinstance(current_page, discord.Embed):
await message.edit(embed=current_page)
else:
await message.edit(content=current_page)
except discord.NotFound:
return
try:
predicates = ReactionPredicate.with_emojis(tuple(controls.keys()), message, ctx.author)
tasks = [
asyncio.ensure_future(ctx.bot.wait_for("reaction_add", check=predicates)),
asyncio.ensure_future(ctx.bot.wait_for("reaction_remove", check=predicates)),
]
done, pending = await asyncio.wait(
tasks, timeout=timeout, return_when=asyncio.FIRST_COMPLETED
)
for task in pending:
task.cancel()
if len(done) == 0:
raise asyncio.TimeoutError()
react, user = done.pop().result()
except asyncio.TimeoutError:
if not ctx.me:
return
try:
if message.channel.permissions_for(ctx.me).manage_messages:
await message.clear_reactions()
else:
raise RuntimeError
except (discord.Forbidden, RuntimeError): # cannot remove all reactions
for key in controls.keys():
try:
await message.remove_reaction(key, ctx.bot.user)
except discord.Forbidden:
return
except discord.HTTPException:
pass
except discord.NotFound:
return
else:
return await controls[react.emoji](
ctx, pages, controls, message, page, timeout, react.emoji
)
async def next_page(
ctx: commands.Context,
pages: list,
controls: dict,
message: discord.Message,
page: int,
timeout: float,
emoji: str,
):
if page == len(pages) - 1:
page = 0 # Loop around to the first item
else:
page = page + 1
return await menu(ctx, pages, controls, message=message, page=page, timeout=timeout)
async def prev_page(
ctx: commands.Context,
pages: list,
controls: dict,
message: discord.Message,
page: int,
timeout: float,
emoji: str,
):
if page == 0:
page = len(pages) - 1 # Loop around to the last item
else:
page = page - 1
return await menu(ctx, pages, controls, message=message, page=page, timeout=timeout)
async def close_menu(
ctx: commands.Context,
pages: list,
controls: dict,
message: discord.Message,
page: int,
timeout: float,
emoji: str,
):
with contextlib.suppress(discord.NotFound):
await message.delete()
def start_adding_reactions(
message: discord.Message, emojis: Iterable[_ReactableEmoji]
) -> asyncio.Task:
"""Start adding reactions to a message.
This is a non-blocking operation - calling this will schedule the
reactions being added, but the calling code will continue to
execute asynchronously. There is no need to await this function.
This is particularly useful if you wish to start waiting for a
reaction whilst the reactions are still being added - in fact,
this is exactly what `menu` uses to do that.
Parameters
----------
message: discord.Message
The message to add reactions to.
emojis : Iterable[Union[str, discord.Emoji]]
The emojis to react to the message with.
Returns
-------
asyncio.Task
The task for the coroutine adding the reactions.
"""
async def task():
# The task should exit silently if the message is deleted
with contextlib.suppress(discord.NotFound):
for emoji in emojis:
await message.add_reaction(emoji)
return asyncio.create_task(task())
DEFAULT_CONTROLS = {
"\N{LEFTWARDS BLACK ARROW}\N{VARIATION SELECTOR-16}": prev_page,
"\N{CROSS MARK}": close_menu,
"\N{BLACK RIGHTWARDS ARROW}\N{VARIATION SELECTOR-16}": next_page,
}
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/core/utils/menus.py
| 0.749179 | 0.303125 |
menus.py
|
pypi
|
from __future__ import annotations
import re
from typing import Callable, ClassVar, List, Optional, Pattern, Sequence, Tuple, Union, cast
import discord
from redbot.core import commands
_ID_RE = re.compile(r"([0-9]{15,20})$")
_USER_MENTION_RE = re.compile(r"<@!?([0-9]{15,20})>$")
_CHAN_MENTION_RE = re.compile(r"<#([0-9]{15,20})>$")
_ROLE_MENTION_RE = re.compile(r"<@&([0-9]{15,20})>$")
class MessagePredicate(Callable[[discord.Message], bool]):
"""A simple collection of predicates for message events.
These predicates intend to help simplify checks in message events
and reduce boilerplate code.
This class should be created through the provided classmethods.
Instances of this class are callable message predicates, i.e. they
return ``True`` if a message matches the criteria.
All predicates are combined with :meth:`MessagePredicate.same_context`.
Examples
--------
Waiting for a response in the same channel and from the same
author::
await bot.wait_for("message", check=MessagePredicate.same_context(ctx))
Waiting for a response to a yes or no question::
pred = MessagePredicate.yes_or_no(ctx)
await bot.wait_for("message", check=pred)
if pred.result is True:
# User responded "yes"
...
Getting a member object from a user's response::
pred = MessagePredicate.valid_member(ctx)
await bot.wait_for("message", check=pred)
member = pred.result
Attributes
----------
result : Any
The object which the message content matched with. This is
dependent on the predicate used - see each predicate's
documentation for details, not every method will assign this
attribute. Defaults to ``None``.
"""
def __init__(self, predicate: Callable[["MessagePredicate", discord.Message], bool]) -> None:
self._pred: Callable[["MessagePredicate", discord.Message], bool] = predicate
self.result = None
def __call__(self, message: discord.Message) -> bool:
return self._pred(self, message)
@classmethod
def same_context(
cls,
ctx: Optional[commands.Context] = None,
channel: Optional[Union[discord.TextChannel, discord.DMChannel]] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the message fits the described context.
Parameters
----------
ctx : Optional[Context]
The current invocation context.
channel : Optional[discord.TextChannel]
The channel we expect a message in. If unspecified,
defaults to ``ctx.channel``. If ``ctx`` is unspecified
too, the message's channel will be ignored.
user : Optional[discord.abc.User]
The user we expect a message from. If unspecified,
defaults to ``ctx.author``. If ``ctx`` is unspecified
too, the message's author will be ignored.
Returns
-------
MessagePredicate
The event predicate.
"""
if ctx is not None:
channel = channel or ctx.channel
user = user or ctx.author
return cls(
lambda self, m: (user is None or user.id == m.author.id)
and (channel is None or channel.id == m.channel.id)
)
@classmethod
def cancelled(
cls,
ctx: Optional[commands.Context] = None,
channel: Optional[Union[discord.TextChannel, discord.DMChannel]] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the message is ``[p]cancel``.
Parameters
----------
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
same_context = cls.same_context(ctx, channel, user)
return cls(
lambda self, m: (same_context(m) and m.content.lower() == f"{ctx.prefix}cancel")
)
@classmethod
def yes_or_no(
cls,
ctx: Optional[commands.Context] = None,
channel: Optional[Union[discord.TextChannel, discord.DMChannel]] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the message is "yes"/"y" or "no"/"n".
This will assign ``True`` for *yes*, or ``False`` for *no* to
the `result` attribute.
Parameters
----------
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
same_context = cls.same_context(ctx, channel, user)
def predicate(self: MessagePredicate, m: discord.Message) -> bool:
if not same_context(m):
return False
content = m.content.lower()
if content in ("yes", "y"):
self.result = True
elif content in ("no", "n"):
self.result = False
else:
return False
return True
return cls(predicate)
@classmethod
def valid_int(
cls,
ctx: Optional[commands.Context] = None,
channel: Optional[Union[discord.TextChannel, discord.DMChannel]] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the response is an integer.
Assigns the response to `result` as an `int`.
Parameters
----------
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
same_context = cls.same_context(ctx, channel, user)
def predicate(self: MessagePredicate, m: discord.Message) -> bool:
if not same_context(m):
return False
try:
self.result = int(m.content)
except ValueError:
return False
else:
return True
return cls(predicate)
@classmethod
def valid_float(
cls,
ctx: Optional[commands.Context] = None,
channel: Optional[Union[discord.TextChannel, discord.DMChannel]] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the response is a float.
Assigns the response to `result` as a `float`.
Parameters
----------
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
same_context = cls.same_context(ctx, channel, user)
def predicate(self: MessagePredicate, m: discord.Message) -> bool:
if not same_context(m):
return False
try:
self.result = float(m.content)
except ValueError:
return False
else:
return True
return cls(predicate)
@classmethod
def positive(
cls,
ctx: Optional[commands.Context] = None,
channel: Optional[Union[discord.TextChannel, discord.DMChannel]] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the response is a positive number.
Assigns the response to `result` as a `float`.
Parameters
----------
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
same_context = cls.same_context(ctx, channel, user)
def predicate(self: MessagePredicate, m: discord.Message) -> bool:
if not same_context(m):
return False
try:
number = float(m.content)
except ValueError:
return False
else:
if number > 0:
self.result = number
return True
else:
return False
return cls(predicate)
@classmethod
def valid_role(
cls,
ctx: Optional[commands.Context] = None,
channel: Optional[discord.TextChannel] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the response refers to a role in the current guild.
Assigns the matching `discord.Role` object to `result`.
This predicate cannot be used in DM.
Parameters
----------
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
same_context = cls.same_context(ctx, channel, user)
guild = cls._get_guild(ctx, channel, cast(discord.Member, user))
def predicate(self: MessagePredicate, m: discord.Message) -> bool:
if not same_context(m):
return False
role = self._find_role(guild, m.content)
if role is None:
return False
self.result = role
return True
return cls(predicate)
@classmethod
def valid_member(
cls,
ctx: Optional[commands.Context] = None,
channel: Optional[discord.TextChannel] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the response refers to a member in the current guild.
Assigns the matching `discord.Member` object to `result`.
This predicate cannot be used in DM.
Parameters
----------
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
same_context = cls.same_context(ctx, channel, user)
guild = cls._get_guild(ctx, channel, cast(discord.Member, user))
def predicate(self: MessagePredicate, m: discord.Message) -> bool:
if not same_context(m):
return False
match = _ID_RE.match(m.content) or _USER_MENTION_RE.match(m.content)
if match:
result = guild.get_member(int(match.group(1)))
else:
result = guild.get_member_named(m.content)
if result is None:
return False
self.result = result
return True
return cls(predicate)
@classmethod
def valid_text_channel(
cls,
ctx: Optional[commands.Context] = None,
channel: Optional[discord.TextChannel] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the response refers to a text channel in the current guild.
Assigns the matching `discord.TextChannel` object to `result`.
This predicate cannot be used in DM.
Parameters
----------
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
same_context = cls.same_context(ctx, channel, user)
guild = cls._get_guild(ctx, channel, cast(discord.Member, user))
def predicate(self: MessagePredicate, m: discord.Message) -> bool:
if not same_context(m):
return False
match = _ID_RE.match(m.content) or _CHAN_MENTION_RE.match(m.content)
if match:
result = guild.get_channel(int(match.group(1)))
else:
result = discord.utils.get(guild.text_channels, name=m.content)
if not isinstance(result, discord.TextChannel):
return False
self.result = result
return True
return cls(predicate)
@classmethod
def has_role(
cls,
ctx: Optional[commands.Context] = None,
channel: Optional[discord.TextChannel] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the response refers to a role which the author has.
Assigns the matching `discord.Role` object to `result`.
One of ``user`` or ``ctx`` must be supplied. This predicate
cannot be used in DM.
Parameters
----------
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
same_context = cls.same_context(ctx, channel, user)
guild = cls._get_guild(ctx, channel, cast(discord.Member, user))
if user is None:
if ctx is None:
raise TypeError(
"One of `user` or `ctx` must be supplied to `MessagePredicate.has_role`."
)
user = ctx.author
def predicate(self: MessagePredicate, m: discord.Message) -> bool:
if not same_context(m):
return False
role = self._find_role(guild, m.content)
if role is None or role not in user.roles:
return False
self.result = role
return True
return cls(predicate)
@classmethod
def equal_to(
cls,
value: str,
ctx: Optional[commands.Context] = None,
channel: Optional[Union[discord.TextChannel, discord.DMChannel]] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the response is equal to the specified value.
Parameters
----------
value : str
The value to compare the response with.
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
same_context = cls.same_context(ctx, channel, user)
return cls(lambda self, m: same_context(m) and m.content == value)
@classmethod
def lower_equal_to(
cls,
value: str,
ctx: Optional[commands.Context] = None,
channel: Optional[Union[discord.TextChannel, discord.DMChannel]] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the response *as lowercase* is equal to the specified value.
Parameters
----------
value : str
The value to compare the response with.
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
same_context = cls.same_context(ctx, channel, user)
return cls(lambda self, m: same_context(m) and m.content.lower() == value)
@classmethod
def less(
cls,
value: Union[int, float],
ctx: Optional[commands.Context] = None,
channel: Optional[Union[discord.TextChannel, discord.DMChannel]] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the response is less than the specified value.
Parameters
----------
value : Union[int, float]
The value to compare the response with.
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
valid_int = cls.valid_int(ctx, channel, user)
valid_float = cls.valid_float(ctx, channel, user)
return cls(lambda self, m: (valid_int(m) or valid_float(m)) and float(m.content) < value)
@classmethod
def greater(
cls,
value: Union[int, float],
ctx: Optional[commands.Context] = None,
channel: Optional[Union[discord.TextChannel, discord.DMChannel]] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the response is greater than the specified value.
Parameters
----------
value : Union[int, float]
The value to compare the response with.
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
valid_int = cls.valid_int(ctx, channel, user)
valid_float = cls.valid_float(ctx, channel, user)
return cls(lambda self, m: (valid_int(m) or valid_float(m)) and float(m.content) > value)
@classmethod
def length_less(
cls,
length: int,
ctx: Optional[commands.Context] = None,
channel: Optional[Union[discord.TextChannel, discord.DMChannel]] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the response's length is less than the specified length.
Parameters
----------
length : int
The value to compare the response's length with.
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
same_context = cls.same_context(ctx, channel, user)
return cls(lambda self, m: same_context(m) and len(m.content) <= length)
@classmethod
def length_greater(
cls,
length: int,
ctx: Optional[commands.Context] = None,
channel: Optional[Union[discord.TextChannel, discord.DMChannel]] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the response's length is greater than the specified length.
Parameters
----------
length : int
The value to compare the response's length with.
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
same_context = cls.same_context(ctx, channel, user)
return cls(lambda self, m: same_context(m) and len(m.content) >= length)
@classmethod
def contained_in(
cls,
collection: Sequence[str],
ctx: Optional[commands.Context] = None,
channel: Optional[Union[discord.TextChannel, discord.DMChannel]] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the response is contained in the specified collection.
The index of the response in the ``collection`` sequence is
assigned to the `result` attribute.
Parameters
----------
collection : Sequence[str]
The collection containing valid responses.
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
same_context = cls.same_context(ctx, channel, user)
def predicate(self: MessagePredicate, m: discord.Message) -> bool:
if not same_context(m):
return False
try:
self.result = collection.index(m.content)
except ValueError:
return False
else:
return True
return cls(predicate)
@classmethod
def lower_contained_in(
cls,
collection: Sequence[str],
ctx: Optional[commands.Context] = None,
channel: Optional[Union[discord.TextChannel, discord.DMChannel]] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Same as :meth:`contained_in`, but the response is set to lowercase before matching.
Parameters
----------
collection : Sequence[str]
The collection containing valid lowercase responses.
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
same_context = cls.same_context(ctx, channel, user)
def predicate(self: MessagePredicate, m: discord.Message) -> bool:
if not same_context(m):
return False
try:
self.result = collection.index(m.content.lower())
except ValueError:
return False
else:
return True
return cls(predicate)
@classmethod
def regex(
cls,
pattern: Union[Pattern[str], str],
ctx: Optional[commands.Context] = None,
channel: Optional[Union[discord.TextChannel, discord.DMChannel]] = None,
user: Optional[discord.abc.User] = None,
) -> "MessagePredicate":
"""Match if the response matches the specified regex pattern.
This predicate will use `re.search` to find a match. The
resulting `match object <match-objects>` will be assigned
to `result`.
Parameters
----------
pattern : Union[`pattern object <re-objects>`, str]
The pattern to search for in the response.
ctx : Optional[Context]
Same as ``ctx`` in :meth:`same_context`.
channel : Optional[discord.TextChannel]
Same as ``channel`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
MessagePredicate
The event predicate.
"""
same_context = cls.same_context(ctx, channel, user)
def predicate(self: MessagePredicate, m: discord.Message) -> bool:
if not same_context(m):
return False
if isinstance(pattern, str):
pattern_obj = re.compile(pattern)
else:
pattern_obj = pattern
match = pattern_obj.search(m.content)
if match:
self.result = match
return True
return False
return cls(predicate)
@staticmethod
def _find_role(guild: discord.Guild, argument: str) -> Optional[discord.Role]:
match = _ID_RE.match(argument) or _ROLE_MENTION_RE.match(argument)
if match:
result = guild.get_role(int(match.group(1)))
else:
result = discord.utils.get(guild.roles, name=argument)
return result
@staticmethod
def _get_guild(
ctx: commands.Context, channel: discord.TextChannel, user: discord.Member
) -> discord.Guild:
if ctx is not None:
return ctx.guild
elif channel is not None:
return channel.guild
elif user is not None:
return user.guild
class ReactionPredicate(Callable[[discord.Reaction, discord.abc.User], bool]):
"""A collection of predicates for reaction events.
All checks are combined with :meth:`ReactionPredicate.same_context`.
Examples
--------
Confirming a yes/no question with a tick/cross reaction::
from redbot.core.utils.predicates import ReactionPredicate
from redbot.core.utils.menus import start_adding_reactions
msg = await ctx.send("Yes or no?")
start_adding_reactions(msg, ReactionPredicate.YES_OR_NO_EMOJIS)
pred = ReactionPredicate.yes_or_no(msg, ctx.author)
await ctx.bot.wait_for("reaction_add", check=pred)
if pred.result is True:
# User responded with tick
...
else:
# User responded with cross
...
Waiting for the first reaction from any user with one of the first
5 letters of the alphabet::
from redbot.core.utils.predicates import ReactionPredicate
from redbot.core.utils.menus import start_adding_reactions
msg = await ctx.send("React to me!")
emojis = ReactionPredicate.ALPHABET_EMOJIS[:5]
start_adding_reactions(msg, emojis)
pred = ReactionPredicate.with_emojis(emojis, msg)
await ctx.bot.wait_for("reaction_add", check=pred)
# pred.result is now the index of the letter in `emojis`
Attributes
----------
result : Any
The object which the reaction matched with. This is
dependent on the predicate used - see each predicate's
documentation for details, not every method will assign this
attribute. Defaults to ``None``.
"""
YES_OR_NO_EMOJIS: ClassVar[Tuple[str, str]] = (
"\N{WHITE HEAVY CHECK MARK}",
"\N{NEGATIVE SQUARED CROSS MARK}",
)
"""Tuple[str, str] : A tuple containing the tick emoji and cross emoji, in that order."""
ALPHABET_EMOJIS: ClassVar[List[str]] = [
chr(code)
for code in range(
ord("\N{REGIONAL INDICATOR SYMBOL LETTER A}"),
ord("\N{REGIONAL INDICATOR SYMBOL LETTER Z}") + 1,
)
]
"""List[str] : A list of all 26 alphabetical letter emojis."""
NUMBER_EMOJIS: ClassVar[List[str]] = [
chr(code) + "\N{COMBINING ENCLOSING KEYCAP}" for code in range(ord("0"), ord("9") + 1)
]
"""List[str] : A list of all single-digit number emojis, 0 through 9."""
def __init__(
self, predicate: Callable[["ReactionPredicate", discord.Reaction, discord.abc.User], bool]
) -> None:
self._pred: Callable[
["ReactionPredicate", discord.Reaction, discord.abc.User], bool
] = predicate
self.result = None
def __call__(self, reaction: discord.Reaction, user: discord.abc.User) -> bool:
return self._pred(self, reaction, user)
# noinspection PyUnusedLocal
@classmethod
def same_context(
cls, message: Optional[discord.Message] = None, user: Optional[discord.abc.User] = None
) -> "ReactionPredicate":
"""Match if a reaction fits the described context.
This will ignore reactions added by the bot user, regardless
of whether or not ``user`` is supplied.
Parameters
----------
message : Optional[discord.Message]
The message which we expect a reaction to. If unspecified,
the reaction's message will be ignored.
user : Optional[discord.abc.User]
The user we expect to react. If unspecified, the user who
added the reaction will be ignored.
Returns
-------
ReactionPredicate
The event predicate.
"""
# noinspection PyProtectedMember
me_id = message._state.self_id
return cls(
lambda self, r, u: u.id != me_id
and (message is None or r.message.id == message.id)
and (user is None or u.id == user.id)
)
@classmethod
def with_emojis(
cls,
emojis: Sequence[Union[str, discord.Emoji, discord.PartialEmoji]],
message: Optional[discord.Message] = None,
user: Optional[discord.abc.User] = None,
) -> "ReactionPredicate":
"""Match if the reaction is one of the specified emojis.
Parameters
----------
emojis : Sequence[Union[str, discord.Emoji, discord.PartialEmoji]]
The emojis of which one we expect to be reacted.
message : discord.Message
Same as ``message`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
ReactionPredicate
The event predicate.
"""
same_context = cls.same_context(message, user)
def predicate(self: ReactionPredicate, r: discord.Reaction, u: discord.abc.User):
if not same_context(r, u):
return False
try:
self.result = emojis.index(r.emoji)
except ValueError:
return False
else:
return True
return cls(predicate)
@classmethod
def yes_or_no(
cls, message: Optional[discord.Message] = None, user: Optional[discord.abc.User] = None
) -> "ReactionPredicate":
"""Match if the reaction is a tick or cross emoji.
The emojis used are in
`ReactionPredicate.YES_OR_NO_EMOJIS`.
This will assign ``True`` for *yes*, or ``False`` for *no* to
the `result` attribute.
Parameters
----------
message : discord.Message
Same as ``message`` in :meth:`same_context`.
user : Optional[discord.abc.User]
Same as ``user`` in :meth:`same_context`.
Returns
-------
ReactionPredicate
The event predicate.
"""
same_context = cls.same_context(message, user)
def predicate(self: ReactionPredicate, r: discord.Reaction, u: discord.abc.User) -> bool:
if not same_context(r, u):
return False
try:
self.result = not bool(self.YES_OR_NO_EMOJIS.index(r.emoji))
except ValueError:
return False
else:
return True
return cls(predicate)
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/core/utils/predicates.py
| 0.930411 | 0.292873 |
predicates.py
|
pypi
|
import asyncio
import warnings
from datetime import timedelta
from typing import List, Iterable, Union, TYPE_CHECKING, Dict
import discord
if TYPE_CHECKING:
from .. import Config
from ..bot import Red
from ..commands import Context
async def mass_purge(messages: List[discord.Message], channel: discord.TextChannel):
"""Bulk delete messages from a channel.
If more than 100 messages are supplied, the bot will delete 100 messages at
a time, sleeping between each action.
Note
----
Messages must not be older than 14 days, and the bot must not be a user
account.
Parameters
----------
messages : `list` of `discord.Message`
The messages to bulk delete.
channel : discord.TextChannel
The channel to delete messages from.
Raises
------
discord.Forbidden
You do not have proper permissions to delete the messages or you’re not
using a bot account.
discord.HTTPException
Deleting the messages failed.
"""
while messages:
# discord.NotFound can be raised when `len(messages) == 1` and the message does not exist.
# As a result of this obscure behavior, this error needs to be caught just in case.
try:
await channel.delete_messages(messages[:100])
except discord.errors.HTTPException:
pass
messages = messages[100:]
await asyncio.sleep(1.5)
async def slow_deletion(messages: Iterable[discord.Message]):
"""Delete a list of messages one at a time.
Any exceptions raised when trying to delete the message will be silenced.
Parameters
----------
messages : `iterable` of `discord.Message`
The messages to delete.
"""
for message in messages:
try:
await message.delete()
except discord.HTTPException:
pass
def get_audit_reason(author: discord.Member, reason: str = None, *, shorten: bool = False):
"""Construct a reason to appear in the audit log.
Parameters
----------
author : discord.Member
The author behind the audit log action.
reason : str
The reason behind the audit log action.
shorten : bool
When set to ``True``, the returned audit reason string will be
shortened to fit the max length allowed by Discord audit logs.
Returns
-------
str
The formatted audit log reason.
"""
audit_reason = (
"Action requested by {} (ID {}). Reason: {}".format(author, author.id, reason)
if reason
else "Action requested by {} (ID {}).".format(author, author.id)
)
if shorten and len(audit_reason) > 512:
audit_reason = f"{audit_reason[:509]}..."
return audit_reason
async def is_allowed_by_hierarchy(
bot: "Red", settings: "Config", guild: discord.Guild, mod: discord.Member, user: discord.Member
):
warnings.warn(
"`is_allowed_by_hierarchy()` is deprecated since Red 3.4.1"
" and will be removed in the first minor release after 2020-11-31.",
DeprecationWarning,
stacklevel=2,
)
if not await settings.guild(guild).respect_hierarchy():
return True
is_special = mod == guild.owner or await bot.is_owner(mod)
return mod.top_role > user.top_role or is_special
async def is_mod_or_superior(
bot: "Red", obj: Union[discord.Message, discord.Member, discord.Role]
):
"""Check if an object has mod or superior permissions.
If a message is passed, its author's permissions are checked. If a role is
passed, it simply checks if it is one of either the admin or mod roles.
Parameters
----------
bot : redbot.core.bot.Red
The bot object.
obj : `discord.Message` or `discord.Member` or `discord.Role`
The object to check permissions for.
Returns
-------
bool
:code:`True` if the object has mod permissions.
Raises
------
TypeError
If the wrong type of ``obj`` was passed.
"""
if isinstance(obj, discord.Message):
user = obj.author
elif isinstance(obj, discord.Member):
user = obj
elif isinstance(obj, discord.Role):
gid = obj.guild.id
if obj in await bot.get_admin_role_ids(gid):
return True
if obj in await bot.get_mod_role_ids(gid):
return True
return False
else:
raise TypeError("Only messages, members or roles may be passed")
if await bot.is_owner(user):
return True
if await bot.is_mod(user):
return True
return False
def strfdelta(delta: timedelta):
"""Format a timedelta object to a message with time units.
Parameters
----------
delta : datetime.timedelta
The duration to parse.
Returns
-------
str
A message representing the timedelta with units.
"""
s = []
if delta.days:
ds = "%i day" % delta.days
if delta.days > 1:
ds += "s"
s.append(ds)
hrs, rem = divmod(delta.seconds, 60 * 60)
if hrs:
hs = "%i hr" % hrs
if hrs > 1:
hs += "s"
s.append(hs)
mins, secs = divmod(rem, 60)
if mins:
s.append("%i min" % mins)
if secs:
s.append("%i sec" % secs)
return " ".join(s)
async def is_admin_or_superior(
bot: "Red", obj: Union[discord.Message, discord.Member, discord.Role]
):
"""Same as `is_mod_or_superior` except for admin permissions.
If a message is passed, its author's permissions are checked. If a role is
passed, it simply checks if it is the admin role.
Parameters
----------
bot : redbot.core.bot.Red
The bot object.
obj : `discord.Message` or `discord.Member` or `discord.Role`
The object to check permissions for.
Returns
-------
bool
:code:`True` if the object has admin permissions.
Raises
------
TypeError
If the wrong type of ``obj`` was passed.
"""
if isinstance(obj, discord.Message):
user = obj.author
elif isinstance(obj, discord.Member):
user = obj
elif isinstance(obj, discord.Role):
return obj.id in await bot.get_admin_role_ids(obj.guild.id)
else:
raise TypeError("Only messages, members or roles may be passed")
if await bot.is_owner(user):
return True
if await bot.is_admin(user):
return True
return False
async def check_permissions(ctx: "Context", perms: Dict[str, bool]) -> bool:
"""Check if the author has required permissions.
This will always return ``True`` if the author is a bot owner, or
has the ``administrator`` permission. If ``perms`` is empty, this
will only check if the user is a bot owner.
Parameters
----------
ctx : Context
The command invocation context to check.
perms : Dict[str, bool]
A dictionary mapping permissions to their required states.
Valid permission names are those listed as properties of
the `discord.Permissions` class.
Returns
-------
bool
``True`` if the author has the required permissions.
"""
if await ctx.bot.is_owner(ctx.author):
return True
elif not perms:
return False
resolved = ctx.channel.permissions_for(ctx.author)
return resolved.administrator or all(
getattr(resolved, name, None) == value for name, value in perms.items()
)
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/core/utils/mod.py
| 0.866217 | 0.313972 |
mod.py
|
pypi
|
from __future__ import annotations
import asyncio
import collections.abc
import contextlib
import json
import logging
import os
import re
import shutil
import tarfile
import warnings
from datetime import datetime
from pathlib import Path
from typing import (
AsyncIterable,
AsyncIterator,
Awaitable,
Callable,
Generator,
Iterable,
Iterator,
List,
Optional,
Union,
TypeVar,
TYPE_CHECKING,
Tuple,
cast,
)
import aiohttp
import discord
import pkg_resources
from fuzzywuzzy import fuzz, process
from rich.progress import ProgressColumn
from rich.progress_bar import ProgressBar
from redbot import VersionInfo
from redbot.core import data_manager
from redbot.core.utils.chat_formatting import box
if TYPE_CHECKING:
from redbot.core.bot import Red
from redbot.core.commands import Command, Context
main_log = logging.getLogger("red")
__all__ = (
"safe_delete",
"fuzzy_command_search",
"format_fuzzy_results",
"create_backup",
"send_to_owners_with_preprocessor",
"send_to_owners_with_prefix_replaced",
"expected_version",
"fetch_latest_red_version_info",
"deprecated_removed",
"RichIndefiniteBarColumn",
)
_T = TypeVar("_T")
def safe_delete(pth: Path):
if pth.exists():
for root, dirs, files in os.walk(str(pth)):
os.chmod(root, 0o700)
for d in dirs:
os.chmod(os.path.join(root, d), 0o700)
for f in files:
os.chmod(os.path.join(root, f), 0o700)
shutil.rmtree(str(pth), ignore_errors=True)
def _fuzzy_log_filter(record):
return record.funcName != "extractWithoutOrder"
logging.getLogger().addFilter(_fuzzy_log_filter)
async def fuzzy_command_search(
ctx: Context,
term: Optional[str] = None,
*,
commands: Optional[Union[AsyncIterator[Command], Iterator[Command]]] = None,
min_score: int = 80,
) -> Optional[List[Command]]:
"""Search for commands which are similar in name to the one invoked.
Returns a maximum of 5 commands which must all be at least matched
greater than ``min_score``.
Parameters
----------
ctx : `commands.Context <redbot.core.commands.Context>`
The command invocation context.
term : Optional[str]
The name of the invoked command. If ``None``,
`Context.invoked_with` will be used instead.
commands : Optional[Union[AsyncIterator[commands.Command], Iterator[commands.Command]]]
The commands available to choose from when doing a fuzzy match.
When omitted, `Bot.walk_commands` will be used instead.
min_score : int
The minimum score for matched commands to reach. Defaults to 80.
Returns
-------
Optional[List[`commands.Command <redbot.core.commands.Command>`]]
A list of commands which were fuzzily matched with the invoked
command.
"""
if ctx.guild is not None:
enabled = await ctx.bot._config.guild(ctx.guild).fuzzy()
else:
enabled = await ctx.bot._config.fuzzy()
if not enabled:
return None
if term is None:
term = ctx.invoked_with
# If the term is an alias or CC, we don't want to send a supplementary fuzzy search.
alias_cog = ctx.bot.get_cog("Alias")
if alias_cog is not None:
alias = await alias_cog._aliases.get_alias(ctx.guild, term)
if alias:
return None
customcom_cog = ctx.bot.get_cog("CustomCommands")
if customcom_cog is not None:
cmd_obj = customcom_cog.commandobj
try:
await cmd_obj.get(ctx.message, term)
except:
pass
else:
return None
if commands is None:
choices = set(ctx.bot.walk_commands())
elif isinstance(commands, collections.abc.AsyncIterator):
choices = {c async for c in commands}
else:
choices = set(commands)
# Do the scoring. `extracted` is a list of tuples in the form `(command, score)`
extracted = process.extract(term, choices, limit=5, scorer=fuzz.QRatio)
if not extracted:
return None
# Filter through the fuzzy-matched commands.
matched_commands = []
for command, score in extracted:
if score < min_score:
# Since the list is in decreasing order of score, we can exit early.
break
if await command.can_see(ctx):
matched_commands.append(command)
return matched_commands
async def format_fuzzy_results(
ctx: Context, matched_commands: List[Command], *, embed: Optional[bool] = None
) -> Union[str, discord.Embed]:
"""Format the result of a fuzzy command search.
Parameters
----------
ctx : `commands.Context <redbot.core.commands.Context>`
The context in which this result is being displayed.
matched_commands : List[`commands.Command <redbot.core.commands.Command>`]
A list of commands which have been matched by the fuzzy search, sorted
in order of decreasing similarity.
embed : bool
Whether or not the result should be an embed. If set to ``None``, this
will default to the result of `ctx.embed_requested`.
Returns
-------
Union[str, discord.Embed]
The formatted results.
"""
if embed is not False and (embed is True or await ctx.embed_requested()):
lines = []
for cmd in matched_commands:
short_doc = cmd.format_shortdoc_for_context(ctx)
lines.append(f"**{ctx.clean_prefix}{cmd.qualified_name}** {short_doc}")
return discord.Embed(
title="Perhaps you wanted one of these?",
colour=await ctx.embed_colour(),
description="\n".join(lines),
)
else:
lines = []
for cmd in matched_commands:
short_doc = cmd.format_shortdoc_for_context(ctx)
lines.append(f"{ctx.clean_prefix}{cmd.qualified_name} -- {short_doc}")
return "Perhaps you wanted one of these? " + box("\n".join(lines), lang="vhdl")
async def create_backup(dest: Path = Path.home()) -> Optional[Path]:
data_path = Path(data_manager.core_data_path().parent)
if not data_path.exists():
return None
dest.mkdir(parents=True, exist_ok=True)
timestr = datetime.utcnow().strftime("%Y-%m-%dT%H-%M-%S")
backup_fpath = dest / f"redv3_{data_manager.instance_name}_{timestr}.tar.gz"
to_backup = []
exclusions = [
"__pycache__",
"Lavalink.jar",
os.path.join("Downloader", "lib"),
os.path.join("CogManager", "cogs"),
os.path.join("RepoManager", "repos"),
os.path.join("Audio", "logs"),
]
# Avoiding circular imports
from ...cogs.downloader.repo_manager import RepoManager
repo_mgr = RepoManager()
await repo_mgr.initialize()
repo_output = []
for repo in repo_mgr.repos:
repo_output.append({"url": repo.url, "name": repo.name, "branch": repo.branch})
repos_file = data_path / "cogs" / "RepoManager" / "repos.json"
with repos_file.open("w") as fs:
json.dump(repo_output, fs, indent=4)
instance_file = data_path / "instance.json"
with instance_file.open("w") as fs:
json.dump({data_manager.instance_name: data_manager.basic_config}, fs, indent=4)
for f in data_path.glob("**/*"):
if not any(ex in str(f) for ex in exclusions) and f.is_file():
to_backup.append(f)
with tarfile.open(str(backup_fpath), "w:gz") as tar:
for f in to_backup:
tar.add(str(f), arcname=str(f.relative_to(data_path)), recursive=False)
return backup_fpath
# this might be worth moving to `bot.send_to_owners` at later date
async def send_to_owners_with_preprocessor(
bot: Red,
content: str,
*,
content_preprocessor: Optional[
Callable[[Red, discord.abc.Messageable, str], Awaitable[str]]
] = None,
**kwargs,
):
"""
This sends something to all owners and their configured extra destinations.
This acts the same as `Red.send_to_owners`, with
one added keyword argument as detailed below in *Other Parameters*.
Other Parameters
----------------
content_preprocessor: Optional[Callable[[Red, discord.abc.Messageable, str], Awaitable[str]]]
Optional async function that takes
bot object, owner notification destination and message content
and returns the content that should be sent to given location.
"""
destinations = await bot.get_owner_notification_destinations()
async def wrapped_send(bot, location, content=None, preprocessor=None, **kwargs):
try:
if preprocessor is not None:
content = await preprocessor(bot, location, content)
await location.send(content, **kwargs)
except Exception as _exc:
main_log.error(
"I could not send an owner notification to %s (%s)",
location,
location.id,
exc_info=_exc,
)
sends = [wrapped_send(bot, d, content, content_preprocessor, **kwargs) for d in destinations]
await asyncio.gather(*sends)
async def send_to_owners_with_prefix_replaced(bot: Red, content: str, **kwargs):
"""
This sends something to all owners and their configured extra destinations.
This acts the same as `Red.send_to_owners`, with one addition - `[p]` in ``content`` argument
is replaced with a clean prefix for each specific destination.
"""
async def preprocessor(bot: Red, destination: discord.abc.Messageable, content: str) -> str:
prefixes = await bot.get_valid_prefixes(getattr(destination, "guild", None))
prefix = re.sub(
rf"<@!?{bot.user.id}>", f"@{bot.user.name}".replace("\\", r"\\"), prefixes[0]
)
return content.replace("[p]", prefix)
await send_to_owners_with_preprocessor(bot, content, content_preprocessor=preprocessor)
def expected_version(current: str, expected: str) -> bool:
# `pkg_resources` needs a regular requirement string, so "x" serves as requirement's name here
return current in pkg_resources.Requirement.parse(f"x{expected}")
async def fetch_latest_red_version_info() -> Tuple[Optional[VersionInfo], Optional[str]]:
try:
async with aiohttp.ClientSession() as session:
async with session.get("https://pypi.org/pypi/Red-DiscordBot/json") as r:
data = await r.json()
except (aiohttp.ClientError, asyncio.TimeoutError):
return None, None
else:
release = VersionInfo.from_str(data["info"]["version"])
required_python = data["info"]["requires_python"]
return release, required_python
def deprecated_removed(
deprecation_target: str,
deprecation_version: str,
minimum_days: int,
message: str = "",
stacklevel: int = 1,
) -> None:
warnings.warn(
f"{deprecation_target} is deprecated since version {deprecation_version}"
" and will be removed in the first minor version that gets released"
f" after {minimum_days} days since deprecation. {message}",
DeprecationWarning,
stacklevel=stacklevel + 1,
)
class RichIndefiniteBarColumn(ProgressColumn):
def render(self, task):
return ProgressBar(
pulse=task.completed < task.total,
animation_time=task.get_time(),
width=40,
total=task.total,
completed=task.completed,
)
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/core/utils/_internal_utils.py
| 0.774839 | 0.152221 |
_internal_utils.py
|
pypi
|
import re
__all__ = [
"URL_RE",
"INVITE_URL_RE",
"MASS_MENTION_RE",
"filter_urls",
"filter_invites",
"filter_mass_mentions",
"filter_various_mentions",
"normalize_smartquotes",
"escape_spoilers",
"escape_spoilers_and_mass_mentions",
]
# regexes
URL_RE = re.compile(r"(https?|s?ftp)://(\S+)", re.I)
INVITE_URL_RE = re.compile(r"(discord\.(?:gg|io|me|li)|discord(?:app)?\.com\/invite)\/(\S+)", re.I)
MASS_MENTION_RE = re.compile(r"(@)(?=everyone|here)") # This only matches the @ for sanitizing
OTHER_MENTION_RE = re.compile(r"(<)(@[!&]?|#)(\d+>)")
SMART_QUOTE_REPLACEMENT_DICT = {
"\u2018": "'", # Left single quote
"\u2019": "'", # Right single quote
"\u201C": '"', # Left double quote
"\u201D": '"', # Right double quote
}
SMART_QUOTE_REPLACE_RE = re.compile("|".join(SMART_QUOTE_REPLACEMENT_DICT.keys()))
SPOILER_CONTENT_RE = re.compile(
r"(?s)(?<!\\)(?P<OPEN>\|{2})(?P<SPOILERED>.*?)(?<!\\)(?P<CLOSE>\|{2})"
)
# convenience wrappers
def filter_urls(to_filter: str) -> str:
"""Get a string with URLs sanitized.
This will match any URLs starting with these protocols:
- ``http://``
- ``https://``
- ``ftp://``
- ``sftp://``
Parameters
----------
to_filter : str
The string to filter.
Returns
-------
str
The sanitized string.
"""
return URL_RE.sub("[SANITIZED URL]", to_filter)
def filter_invites(to_filter: str) -> str:
"""Get a string with discord invites sanitized.
Will match any discord.gg, discordapp.com/invite, discord.com/invite, discord.me, or discord.io/discord.li
invite URL.
Parameters
----------
to_filter : str
The string to filter.
Returns
-------
str
The sanitized string.
"""
return INVITE_URL_RE.sub("[SANITIZED INVITE]", to_filter)
def filter_mass_mentions(to_filter: str) -> str:
"""Get a string with mass mentions sanitized.
Will match any *here* and/or *everyone* mentions.
Parameters
----------
to_filter : str
The string to filter.
Returns
-------
str
The sanitized string.
"""
return MASS_MENTION_RE.sub("@\u200b", to_filter)
def filter_various_mentions(to_filter: str) -> str:
"""
Get a string with role, user, and channel mentions sanitized.
This is mainly for use on user display names, not message content,
and should be applied sparingly.
Parameters
----------
to_filter : str
The string to filter.
Returns
-------
str
The sanitized string.
"""
return OTHER_MENTION_RE.sub(r"\1\\\2\3", to_filter)
def normalize_smartquotes(to_normalize: str) -> str:
"""
Get a string with smart quotes replaced with normal ones
Parameters
----------
to_normalize : str
The string to normalize.
Returns
-------
str
The normalized string.
"""
def replacement_for(obj):
return SMART_QUOTE_REPLACEMENT_DICT.get(obj.group(0), "")
return SMART_QUOTE_REPLACE_RE.sub(replacement_for, to_normalize)
def escape_spoilers(content: str) -> str:
"""
Get a string with spoiler syntax escaped.
Parameters
----------
content : str
The string to escape.
Returns
-------
str
The escaped string.
"""
return SPOILER_CONTENT_RE.sub(r"\\\g<OPEN>\g<SPOILERED>\\\g<CLOSE>", content)
def escape_spoilers_and_mass_mentions(content: str) -> str:
"""
Get a string with spoiler syntax and mass mentions escaped
Parameters
----------
content : str
The string to escape.
Returns
-------
str
The escaped string.
"""
return escape_spoilers(filter_mass_mentions(content))
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/core/utils/common_filters.py
| 0.765856 | 0.329891 |
common_filters.py
|
pypi
|
from __future__ import annotations
import asyncio
import json
import logging
from asyncio import as_completed, Semaphore
from asyncio.futures import isfuture
from itertools import chain
from pathlib import Path
from typing import (
Any,
AsyncIterator,
AsyncIterable,
Awaitable,
Callable,
Iterable,
Iterator,
List,
Optional,
Tuple,
TypeVar,
Union,
Generator,
Coroutine,
)
from discord.utils import maybe_coroutine
__all__ = (
"bounded_gather",
"bounded_gather_iter",
"deduplicate_iterables",
"AsyncIter",
"get_end_user_data_statement",
"get_end_user_data_statement_or_raise",
)
log = logging.getLogger("red.core.utils")
_T = TypeVar("_T")
_S = TypeVar("_S")
# Benchmarked to be the fastest method.
def deduplicate_iterables(*iterables):
"""
Returns a list of all unique items in ``iterables``, in the order they
were first encountered.
"""
# dict insertion order is guaranteed to be preserved in 3.6+
return list(dict.fromkeys(chain.from_iterable(iterables)))
# https://github.com/PyCQA/pylint/issues/2717
class AsyncFilter(AsyncIterator[_T], Awaitable[List[_T]]): # pylint: disable=duplicate-bases
"""Class returned by `async_filter`. See that function for details.
We don't recommend instantiating this class directly.
"""
def __init__(
self,
func: Callable[[_T], Union[bool, Awaitable[bool]]],
iterable: Union[AsyncIterable[_T], Iterable[_T]],
) -> None:
self.__func: Callable[[_T], Union[bool, Awaitable[bool]]] = func
self.__iterable: Union[AsyncIterable[_T], Iterable[_T]] = iterable
# We assign the generator strategy based on the arguments' types
if isinstance(iterable, AsyncIterable):
if asyncio.iscoroutinefunction(func):
self.__generator_instance = self.__async_generator_async_pred()
else:
self.__generator_instance = self.__async_generator_sync_pred()
elif asyncio.iscoroutinefunction(func):
self.__generator_instance = self.__sync_generator_async_pred()
else:
raise TypeError("Must be either an async predicate, an async iterable, or both.")
async def __sync_generator_async_pred(self) -> AsyncIterator[_T]:
for item in self.__iterable:
if await self.__func(item):
yield item
async def __async_generator_sync_pred(self) -> AsyncIterator[_T]:
async for item in self.__iterable:
if self.__func(item):
yield item
async def __async_generator_async_pred(self) -> AsyncIterator[_T]:
async for item in self.__iterable:
if await self.__func(item):
yield item
async def __flatten(self) -> List[_T]:
return [item async for item in self]
def __aiter__(self):
return self
def __await__(self):
# Simply return the generator filled into a list
return self.__flatten().__await__()
def __anext__(self) -> Awaitable[_T]:
# This will use the generator strategy set in __init__
return self.__generator_instance.__anext__()
def async_filter(
func: Callable[[_T], Union[bool, Awaitable[bool]]],
iterable: Union[AsyncIterable[_T], Iterable[_T]],
) -> AsyncFilter[_T]:
"""Filter an (optionally async) iterable with an (optionally async) predicate.
At least one of the arguments must be async.
Parameters
----------
func : Callable[[T], Union[bool, Awaitable[bool]]]
A function or coroutine function which takes one item of ``iterable``
as an argument, and returns ``True`` or ``False``.
iterable : Union[AsyncIterable[_T], Iterable[_T]]
An iterable or async iterable which is to be filtered.
Raises
------
TypeError
If neither of the arguments are async.
Returns
-------
AsyncFilter[T]
An object which can either be awaited to yield a list of the filtered
items, or can also act as an async iterator to yield items one by one.
"""
return AsyncFilter(func, iterable)
async def async_enumerate(
async_iterable: AsyncIterable[_T], start: int = 0
) -> AsyncIterator[Tuple[int, _T]]:
"""Async iterable version of `enumerate`.
Parameters
----------
async_iterable : AsyncIterable[T]
The iterable to enumerate.
start : int
The index to start from. Defaults to 0.
Returns
-------
AsyncIterator[Tuple[int, T]]
An async iterator of tuples in the form of ``(index, item)``.
"""
async for item in async_iterable:
yield start, item
start += 1
async def _sem_wrapper(sem, task):
async with sem:
return await task
def bounded_gather_iter(
*coros_or_futures, limit: int = 4, semaphore: Optional[Semaphore] = None
) -> Iterator[Awaitable[Any]]:
"""
An iterator that returns tasks as they are ready, but limits the
number of tasks running at a time.
Parameters
----------
*coros_or_futures
The awaitables to run in a bounded concurrent fashion.
limit : Optional[`int`]
The maximum number of concurrent tasks. Used when no ``semaphore``
is passed.
semaphore : Optional[:class:`asyncio.Semaphore`]
The semaphore to use for bounding tasks. If `None`, create one
using ``loop`` and ``limit``.
Raises
------
TypeError
When invalid parameters are passed
"""
loop = asyncio.get_running_loop()
if semaphore is None:
if not isinstance(limit, int) or limit <= 0:
raise TypeError("limit must be an int > 0")
semaphore = Semaphore(limit)
pending = []
for cof in coros_or_futures:
if isfuture(cof) and cof._loop is not loop:
raise ValueError("futures are tied to different event loops")
cof = _sem_wrapper(semaphore, cof)
pending.append(cof)
return as_completed(pending)
def bounded_gather(
*coros_or_futures,
return_exceptions: bool = False,
limit: int = 4,
semaphore: Optional[Semaphore] = None,
) -> Awaitable[List[Any]]:
"""
A semaphore-bounded wrapper to :meth:`asyncio.gather`.
Parameters
----------
*coros_or_futures
The awaitables to run in a bounded concurrent fashion.
return_exceptions : bool
If true, gather exceptions in the result list instead of raising.
limit : Optional[`int`]
The maximum number of concurrent tasks. Used when no ``semaphore``
is passed.
semaphore : Optional[:class:`asyncio.Semaphore`]
The semaphore to use for bounding tasks. If `None`, create one
using ``loop`` and ``limit``.
Raises
------
TypeError
When invalid parameters are passed
"""
loop = asyncio.get_running_loop()
if semaphore is None:
if not isinstance(limit, int) or limit <= 0:
raise TypeError("limit must be an int > 0")
semaphore = Semaphore(limit)
tasks = (_sem_wrapper(semaphore, task) for task in coros_or_futures)
return asyncio.gather(*tasks, return_exceptions=return_exceptions)
class AsyncIter(AsyncIterator[_T], Awaitable[List[_T]]): # pylint: disable=duplicate-bases
"""Asynchronous iterator yielding items from ``iterable``
that sleeps for ``delay`` seconds every ``steps`` items.
Parameters
----------
iterable: Iterable
The iterable to make async.
delay: Union[float, int]
The amount of time in seconds to sleep.
steps: int
The number of iterations between sleeps.
Raises
------
ValueError
When ``steps`` is lower than 1.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> async for value in AsyncIter(range(3)):
... print(value)
0
1
2
"""
def __init__(
self, iterable: Iterable[_T], delay: Union[float, int] = 0, steps: int = 1
) -> None:
if steps < 1:
raise ValueError("Steps must be higher than or equals to 1")
self._delay = delay
self._iterator = iter(iterable)
self._i = 0
self._steps = steps
self._map = None
def __aiter__(self) -> AsyncIter[_T]:
return self
async def __anext__(self) -> _T:
try:
item = next(self._iterator)
except StopIteration:
raise StopAsyncIteration
if self._i == self._steps:
self._i = 0
await asyncio.sleep(self._delay)
self._i += 1
return await maybe_coroutine(self._map, item) if self._map is not None else item
def __await__(self) -> Generator[Any, None, List[_T]]:
"""Returns a list of the iterable.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> iterator = AsyncIter(range(5))
>>> await iterator
[0, 1, 2, 3, 4]
"""
return self.flatten().__await__()
async def next(self, default: Any = ...) -> _T:
"""Returns a next entry of the iterable.
Parameters
----------
default: Optional[Any]
The value to return if the iterator is exhausted.
Raises
------
StopAsyncIteration
When ``default`` is not specified and the iterator has been exhausted.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> iterator = AsyncIter(range(5))
>>> await iterator.next()
0
>>> await iterator.next()
1
"""
try:
value = await self.__anext__()
except StopAsyncIteration:
if default is ...:
raise
value = default
return value
async def flatten(self) -> List[_T]:
"""Returns a list of the iterable.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> iterator = AsyncIter(range(5))
>>> await iterator.flatten()
[0, 1, 2, 3, 4]
"""
return [item async for item in self]
def filter(self, function: Callable[[_T], Union[bool, Awaitable[bool]]]) -> AsyncFilter[_T]:
"""Filter the iterable with an (optionally async) predicate.
Parameters
----------
function: Callable[[T], Union[bool, Awaitable[bool]]]
A function or coroutine function which takes one item of ``iterable``
as an argument, and returns ``True`` or ``False``.
Returns
-------
AsyncFilter[T]
An object which can either be awaited to yield a list of the filtered
items, or can also act as an async iterator to yield items one by one.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> def predicate(value):
... return value <= 5
>>> iterator = AsyncIter([1, 10, 5, 100])
>>> async for i in iterator.filter(predicate):
... print(i)
1
5
>>> from redbot.core.utils import AsyncIter
>>> def predicate(value):
... return value <= 5
>>> iterator = AsyncIter([1, 10, 5, 100])
>>> await iterator.filter(predicate)
[1, 5]
"""
return async_filter(function, self)
def enumerate(self, start: int = 0) -> AsyncIterator[Tuple[int, _T]]:
"""Async iterable version of `enumerate`.
Parameters
----------
start: int
The index to start from. Defaults to 0.
Returns
-------
AsyncIterator[Tuple[int, T]]
An async iterator of tuples in the form of ``(index, item)``.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> iterator = AsyncIter(['one', 'two', 'three'])
>>> async for i in iterator.enumerate(start=10):
... print(i)
(10, 'one')
(11, 'two')
(12, 'three')
"""
return async_enumerate(self, start)
async def without_duplicates(self) -> AsyncIterator[_T]:
"""
Iterates while omitting duplicated entries.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> iterator = AsyncIter([1,2,3,3,4,4,5])
>>> async for i in iterator.without_duplicates():
... print(i)
1
2
3
4
5
"""
_temp = set()
async for item in self:
if item not in _temp:
yield item
_temp.add(item)
del _temp
async def find(
self,
predicate: Callable[[_T], Union[bool, Awaitable[bool]]],
default: Optional[Any] = None,
) -> AsyncIterator[_T]:
"""Calls ``predicate`` over items in iterable and return first value to match.
Parameters
----------
predicate: Union[Callable, Coroutine]
A function that returns a boolean-like result. The predicate provided can be a coroutine.
default: Optional[Any]
The value to return if there are no matches.
Raises
------
TypeError
When ``predicate`` is not a callable.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> await AsyncIter(range(3)).find(lambda x: x == 1)
1
"""
while True:
try:
elem = await self.__anext__()
except StopAsyncIteration:
return default
ret = await maybe_coroutine(predicate, elem)
if ret:
return elem
def map(self, func: Callable[[_T], Union[_S, Awaitable[_S]]]) -> AsyncIter[_S]:
"""Set the mapping callable for this instance of `AsyncIter`.
.. important::
This should be called after AsyncIter initialization and before any other of its methods.
Parameters
----------
func: Union[Callable, Coroutine]
The function to map values to. The function provided can be a coroutine.
Raises
------
TypeError
When ``func`` is not a callable.
Examples
--------
>>> from redbot.core.utils import AsyncIter
>>> async for value in AsyncIter(range(3)).map(bool):
... print(value)
False
True
True
"""
if not callable(func):
raise TypeError("Mapping must be a callable.")
self._map = func
return self
def get_end_user_data_statement(file: Union[Path, str]) -> Optional[str]:
"""
This function attempts to get the ``end_user_data_statement`` key from cog's ``info.json``.
This will log the reason if ``None`` is returned.
Parameters
----------
file: Union[pathlib.Path, str]
The ``__file__`` variable for the cog's ``__init__.py`` file.
Returns
-------
Optional[str]
The end user data statement found in the info.json
or ``None`` if there was an issue finding one.
Examples
--------
>>> # In cog's `__init__.py`
>>> from redbot.core.utils import get_end_user_data_statement
>>> __red_end_user_data_statement__ = get_end_user_data_statement(__file__)
>>> def setup(bot):
... ...
"""
try:
file = Path(file).parent.absolute()
info_json = file / "info.json"
statement = get_end_user_data_statement_or_raise(info_json)
except FileNotFoundError:
log.critical("'%s' does not exist.", str(info_json))
except KeyError:
log.critical("'%s' is missing an entry for 'end_user_data_statement'", str(info_json))
except json.JSONDecodeError as exc:
log.critical("'%s' is not a valid JSON file.", str(info_json), exc_info=exc)
except UnicodeError as exc:
log.critical("'%s' has a bad encoding.", str(info_json), exc_info=exc)
except Exception as exc:
log.critical(
"There was an error when trying to load the end user data statement from '%s'.",
str(info_json),
exc_info=exc,
)
else:
return statement
return None
def get_end_user_data_statement_or_raise(file: Union[Path, str]) -> str:
"""
This function attempts to get the ``end_user_data_statement`` key from cog's ``info.json``.
Parameters
----------
file: Union[pathlib.Path, str]
The ``__file__`` variable for the cog's ``__init__.py`` file.
Returns
-------
str
The end user data statement found in the info.json.
Raises
------
FileNotFoundError
When ``info.json`` does not exist.
KeyError
When ``info.json`` does not have the ``end_user_data_statement`` key.
json.JSONDecodeError
When ``info.json`` can't be decoded with ``json.load()``
UnicodeError
When ``info.json`` can't be decoded due to bad encoding.
Exception
Any other exception raised from ``pathlib`` and ``json`` modules
when attempting to parse the ``info.json`` for the ``end_user_data_statement`` key.
"""
file = Path(file).parent.absolute()
info_json = file / "info.json"
with info_json.open(encoding="utf-8") as fp:
return json.load(fp)["end_user_data_statement"]
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/core/utils/__init__.py
| 0.905111 | 0.296069 |
__init__.py
|
pypi
|
import datetime
import itertools
import textwrap
from io import BytesIO
from typing import Iterator, List, Optional, Sequence, SupportsInt, Union
import discord
from babel.lists import format_list as babel_list
from babel.numbers import format_decimal
from redbot.core.i18n import Translator, get_babel_locale, get_babel_regional_format
_ = Translator("UtilsChatFormatting", __file__)
def error(text: str) -> str:
"""Get text prefixed with an error emoji.
Parameters
----------
text : str
The text to be prefixed.
Returns
-------
str
The new message.
"""
return "\N{NO ENTRY SIGN} {}".format(text)
def warning(text: str) -> str:
"""Get text prefixed with a warning emoji.
Parameters
----------
text : str
The text to be prefixed.
Returns
-------
str
The new message.
"""
return "\N{WARNING SIGN}\N{VARIATION SELECTOR-16} {}".format(text)
def info(text: str) -> str:
"""Get text prefixed with an info emoji.
Parameters
----------
text : str
The text to be prefixed.
Returns
-------
str
The new message.
"""
return "\N{INFORMATION SOURCE}\N{VARIATION SELECTOR-16} {}".format(text)
def question(text: str) -> str:
"""Get text prefixed with a question emoji.
Parameters
----------
text : str
The text to be prefixed.
Returns
-------
str
The new message.
"""
return "\N{BLACK QUESTION MARK ORNAMENT}\N{VARIATION SELECTOR-16} {}".format(text)
def bold(text: str, escape_formatting: bool = True) -> str:
"""Get the given text in bold.
Note: By default, this function will escape ``text`` prior to emboldening.
Parameters
----------
text : str
The text to be marked up.
escape_formatting : `bool`, optional
Set to :code:`False` to not escape markdown formatting in the text.
Returns
-------
str
The marked up text.
"""
text = escape(text, formatting=escape_formatting)
return "**{}**".format(text)
def box(text: str, lang: str = "") -> str:
"""Get the given text in a code block.
Parameters
----------
text : str
The text to be marked up.
lang : `str`, optional
The syntax highlighting language for the codeblock.
Returns
-------
str
The marked up text.
"""
ret = "```{}\n{}\n```".format(lang, text)
return ret
def inline(text: str) -> str:
"""Get the given text as inline code.
Parameters
----------
text : str
The text to be marked up.
Returns
-------
str
The marked up text.
"""
if "`" in text:
return "``{}``".format(text)
else:
return "`{}`".format(text)
def italics(text: str, escape_formatting: bool = True) -> str:
"""Get the given text in italics.
Note: By default, this function will escape ``text`` prior to italicising.
Parameters
----------
text : str
The text to be marked up.
escape_formatting : `bool`, optional
Set to :code:`False` to not escape markdown formatting in the text.
Returns
-------
str
The marked up text.
"""
text = escape(text, formatting=escape_formatting)
return "*{}*".format(text)
def spoiler(text: str, escape_formatting: bool = True) -> str:
"""Get the given text as a spoiler.
Note: By default, this function will escape ``text`` prior to making the text a spoiler.
Parameters
----------
text : str
The text to be marked up.
escape_formatting : `bool`, optional
Set to :code:`False` to not escape markdown formatting in the text.
Returns
-------
str
The marked up text.
"""
text = escape(text, formatting=escape_formatting)
return "||{}||".format(text)
def bordered(*columns: Sequence[str], ascii_border: bool = False) -> str:
"""Get two blocks of text inside borders.
Note
----
This will only work with a monospaced font.
Parameters
----------
*columns : `sequence` of `str`
The columns of text, each being a list of lines in that column.
ascii_border : bool
Whether or not the border should be pure ASCII.
Returns
-------
str
The bordered text.
"""
borders = {
"TL": "+" if ascii_border else "┌", # Top-left
"TR": "+" if ascii_border else "┐", # Top-right
"BL": "+" if ascii_border else "└", # Bottom-left
"BR": "+" if ascii_border else "┘", # Bottom-right
"HZ": "-" if ascii_border else "─", # Horizontal
"VT": "|" if ascii_border else "│", # Vertical
}
sep = " " * 4 # Separator between boxes
widths = tuple(max(len(row) for row in column) + 9 for column in columns) # width of each col
colsdone = [False] * len(columns) # whether or not each column is done
lines = [sep.join("{TL}" + "{HZ}" * width + "{TR}" for width in widths)]
for line in itertools.zip_longest(*columns):
row = []
for colidx, column in enumerate(line):
width = widths[colidx]
done = colsdone[colidx]
if column is None:
if not done:
# bottom border of column
column = "{HZ}" * width
row.append("{BL}" + column + "{BR}")
colsdone[colidx] = True # mark column as done
else:
# leave empty
row.append(" " * (width + 2))
else:
column += " " * (width - len(column)) # append padded spaces
row.append("{VT}" + column + "{VT}")
lines.append(sep.join(row))
final_row = []
for width, done in zip(widths, colsdone):
if not done:
final_row.append("{BL}" + "{HZ}" * width + "{BR}")
else:
final_row.append(" " * (width + 2))
lines.append(sep.join(final_row))
return "\n".join(lines).format(**borders)
def pagify(
text: str,
delims: Sequence[str] = ["\n"],
*,
priority: bool = False,
escape_mass_mentions: bool = True,
shorten_by: int = 8,
page_length: int = 2000,
) -> Iterator[str]:
"""Generate multiple pages from the given text.
Note
----
This does not respect code blocks or inline code.
Parameters
----------
text : str
The content to pagify and send.
delims : `sequence` of `str`, optional
Characters where page breaks will occur. If no delimiters are found
in a page, the page will break after ``page_length`` characters.
By default this only contains the newline.
Other Parameters
----------------
priority : `bool`
Set to :code:`True` to choose the page break delimiter based on the
order of ``delims``. Otherwise, the page will always break at the
last possible delimiter.
escape_mass_mentions : `bool`
If :code:`True`, any mass mentions (here or everyone) will be
silenced.
shorten_by : `int`
How much to shorten each page by. Defaults to 8.
page_length : `int`
The maximum length of each page. Defaults to 2000.
Yields
------
`str`
Pages of the given text.
"""
in_text = text
page_length -= shorten_by
while len(in_text) > page_length:
this_page_len = page_length
if escape_mass_mentions:
this_page_len -= in_text.count("@here", 0, page_length) + in_text.count(
"@everyone", 0, page_length
)
closest_delim = (in_text.rfind(d, 1, this_page_len) for d in delims)
if priority:
closest_delim = next((x for x in closest_delim if x > 0), -1)
else:
closest_delim = max(closest_delim)
closest_delim = closest_delim if closest_delim != -1 else this_page_len
if escape_mass_mentions:
to_send = escape(in_text[:closest_delim], mass_mentions=True)
else:
to_send = in_text[:closest_delim]
if len(to_send.strip()) > 0:
yield to_send
in_text = in_text[closest_delim:]
if len(in_text.strip()) > 0:
if escape_mass_mentions:
yield escape(in_text, mass_mentions=True)
else:
yield in_text
def strikethrough(text: str, escape_formatting: bool = True) -> str:
"""Get the given text with a strikethrough.
Note: By default, this function will escape ``text`` prior to applying a strikethrough.
Parameters
----------
text : str
The text to be marked up.
escape_formatting : `bool`, optional
Set to :code:`False` to not escape markdown formatting in the text.
Returns
-------
str
The marked up text.
"""
text = escape(text, formatting=escape_formatting)
return "~~{}~~".format(text)
def underline(text: str, escape_formatting: bool = True) -> str:
"""Get the given text with an underline.
Note: By default, this function will escape ``text`` prior to underlining.
Parameters
----------
text : str
The text to be marked up.
escape_formatting : `bool`, optional
Set to :code:`False` to not escape markdown formatting in the text.
Returns
-------
str
The marked up text.
"""
text = escape(text, formatting=escape_formatting)
return "__{}__".format(text)
def quote(text: str) -> str:
"""Quotes the given text.
Parameters
----------
text : str
The text to be marked up.
Returns
-------
str
The marked up text.
"""
return textwrap.indent(text, "> ", lambda l: True)
def escape(text: str, *, mass_mentions: bool = False, formatting: bool = False) -> str:
"""Get text with all mass mentions or markdown escaped.
Parameters
----------
text : str
The text to be escaped.
mass_mentions : `bool`, optional
Set to :code:`True` to escape mass mentions in the text.
formatting : `bool`, optional
Set to :code:`True` to escape any markdown formatting in the text.
Returns
-------
str
The escaped text.
"""
if mass_mentions:
text = text.replace("@everyone", "@\u200beveryone")
text = text.replace("@here", "@\u200bhere")
if formatting:
text = discord.utils.escape_markdown(text)
return text
def humanize_list(
items: Sequence[str], *, locale: Optional[str] = None, style: str = "standard"
) -> str:
"""Get comma-separated list, with the last element joined with *and*.
Parameters
----------
items : Sequence[str]
The items of the list to join together.
locale : Optional[str]
The locale to convert, if not specified it defaults to the bot's locale.
style : str
The style to format the list with.
Note: Not all styles are necessarily available in all locales,
see documentation of `babel.lists.format_list` for more details.
standard
A typical 'and' list for arbitrary placeholders.
eg. "January, February, and March"
standard-short
A short version of a 'and' list, suitable for use with short or
abbreviated placeholder values.
eg. "Jan., Feb., and Mar."
or
A typical 'or' list for arbitrary placeholders.
eg. "January, February, or March"
or-short
A short version of an 'or' list.
eg. "Jan., Feb., or Mar."
unit
A list suitable for wide units.
eg. "3 feet, 7 inches"
unit-short
A list suitable for short units
eg. "3 ft, 7 in"
unit-narrow
A list suitable for narrow units, where space on the screen is very limited.
eg. "3′ 7″"
Raises
------
ValueError
The locale does not support the specified style.
Examples
--------
.. testsetup::
from redbot.core.utils.chat_formatting import humanize_list
.. doctest::
>>> humanize_list(['One', 'Two', 'Three'])
'One, Two, and Three'
>>> humanize_list(['One'])
'One'
>>> humanize_list(['omena', 'peruna', 'aplari'], style='or', locale='fi')
'omena, peruna tai aplari'
"""
return babel_list(items, style=style, locale=get_babel_locale(locale))
def format_perms_list(perms: discord.Permissions) -> str:
"""Format a list of permission names.
This will return a humanized list of the names of all enabled
permissions in the provided `discord.Permissions` object.
Parameters
----------
perms : discord.Permissions
The permissions object with the requested permissions to list
enabled.
Returns
-------
str
The humanized list.
"""
perm_names: List[str] = []
for perm, value in perms:
if value is True:
perm_name = '"' + perm.replace("_", " ").title() + '"'
perm_names.append(perm_name)
return humanize_list(perm_names).replace("Guild", "Server")
def humanize_timedelta(
*, timedelta: Optional[datetime.timedelta] = None, seconds: Optional[SupportsInt] = None
) -> str:
"""
Get a locale aware human timedelta representation.
This works with either a timedelta object or a number of seconds.
Fractional values will be omitted, and values less than 1 second
an empty string.
Parameters
----------
timedelta: Optional[datetime.timedelta]
A timedelta object
seconds: Optional[SupportsInt]
A number of seconds
Returns
-------
str
A locale aware representation of the timedelta or seconds.
Raises
------
ValueError
The function was called with neither a number of seconds nor a timedelta object
"""
try:
obj = seconds if seconds is not None else timedelta.total_seconds()
except AttributeError:
raise ValueError("You must provide either a timedelta or a number of seconds")
seconds = int(obj)
periods = [
(_("year"), _("years"), 60 * 60 * 24 * 365),
(_("month"), _("months"), 60 * 60 * 24 * 30),
(_("day"), _("days"), 60 * 60 * 24),
(_("hour"), _("hours"), 60 * 60),
(_("minute"), _("minutes"), 60),
(_("second"), _("seconds"), 1),
]
strings = []
for period_name, plural_period_name, period_seconds in periods:
if seconds >= period_seconds:
period_value, seconds = divmod(seconds, period_seconds)
if period_value == 0:
continue
unit = plural_period_name if period_value > 1 else period_name
strings.append(f"{period_value} {unit}")
return ", ".join(strings)
def humanize_number(val: Union[int, float], override_locale=None) -> str:
"""
Convert an int or float to a str with digit separators based on bot locale
Parameters
----------
val : Union[int, float]
The int/float to be formatted.
override_locale: Optional[str]
A value to override bot's regional format.
Returns
-------
str
locale aware formatted number.
"""
return format_decimal(val, locale=get_babel_regional_format(override_locale))
def text_to_file(
text: str, filename: str = "file.txt", *, spoiler: bool = False, encoding: str = "utf-8"
):
"""Prepares text to be sent as a file on Discord, without character limit.
This writes text into a bytes object that can be used for the ``file`` or ``files`` parameters
of :meth:`discord.abc.Messageable.send`.
Parameters
----------
text: str
The text to put in your file.
filename: str
The name of the file sent. Defaults to ``file.txt``.
spoiler: bool
Whether the attachment is a spoiler. Defaults to ``False``.
Returns
-------
discord.File
The file containing your text.
"""
file = BytesIO(text.encode(encoding))
return discord.File(file, filename, spoiler=spoiler)
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/core/utils/chat_formatting.py
| 0.886181 | 0.537345 |
chat_formatting.py
|
pypi
|
import functools
import re
from datetime import timedelta
from typing import (
TYPE_CHECKING,
Generic,
Optional,
Optional as NoParseOptional,
Tuple,
List,
Dict,
Type,
TypeVar,
Literal as Literal,
Any,
Union as UserInputOptional,
)
import discord
from discord.ext import commands as dpy_commands
from discord.ext.commands import BadArgument
from ..i18n import Translator
from ..utils.chat_formatting import humanize_timedelta, humanize_list
if TYPE_CHECKING:
from .context import Context
__all__ = [
"DictConverter",
"UserInputOptional",
"NoParseOptional",
"TimedeltaConverter",
"get_dict_converter",
"get_timedelta_converter",
"parse_timedelta",
"Literal",
]
_ = Translator("commands.converter", __file__)
ID_REGEX = re.compile(r"([0-9]{15,20})")
# Taken with permission from
# https://github.com/mikeshardmind/SinbadCogs/blob/816f3bc2ba860243f75112904b82009a8a9e1f99/scheduler/time_utils.py#L9-L19
TIME_RE_STRING = r"\s?".join(
[
r"((?P<weeks>\d+?)\s?(weeks?|w))?",
r"((?P<days>\d+?)\s?(days?|d))?",
r"((?P<hours>\d+?)\s?(hours?|hrs|hr?))?",
r"((?P<minutes>\d+?)\s?(minutes?|mins?|m(?!o)))?", # prevent matching "months"
r"((?P<seconds>\d+?)\s?(seconds?|secs?|s))?",
]
)
TIME_RE = re.compile(TIME_RE_STRING, re.I)
def parse_timedelta(
argument: str,
*,
maximum: Optional[timedelta] = None,
minimum: Optional[timedelta] = None,
allowed_units: Optional[List[str]] = None,
) -> Optional[timedelta]:
"""
This converts a user provided string into a timedelta
The units should be in order from largest to smallest.
This works with or without whitespace.
Parameters
----------
argument : str
The user provided input
maximum : Optional[timedelta]
If provided, any parsed value higher than this will raise an exception
minimum : Optional[timedelta]
If provided, any parsed value lower than this will raise an exception
allowed_units : Optional[List[str]]
If provided, you can constrain a user to expressing the amount of time
in specific units. The units you can chose to provide are the same as the
parser understands. (``weeks``, ``days``, ``hours``, ``minutes``, ``seconds``)
Returns
-------
Optional[timedelta]
If matched, the timedelta which was parsed. This can return `None`
Raises
------
BadArgument
If the argument passed uses a unit not allowed, but understood
or if the value is out of bounds.
"""
matches = TIME_RE.match(argument)
allowed_units = allowed_units or ["weeks", "days", "hours", "minutes", "seconds"]
if matches:
params = {k: int(v) for k, v in matches.groupdict().items() if v is not None}
for k in params.keys():
if k not in allowed_units:
raise BadArgument(
_("`{unit}` is not a valid unit of time for this command").format(unit=k)
)
if params:
try:
delta = timedelta(**params)
except OverflowError:
raise BadArgument(
_("The time set is way too high, consider setting something reasonable.")
)
if maximum and maximum < delta:
raise BadArgument(
_(
"This amount of time is too large for this command. (Maximum: {maximum})"
).format(maximum=humanize_timedelta(timedelta=maximum))
)
if minimum and delta < minimum:
raise BadArgument(
_(
"This amount of time is too small for this command. (Minimum: {minimum})"
).format(minimum=humanize_timedelta(timedelta=minimum))
)
return delta
return None
class _GuildConverter(discord.Guild):
"""Converts to a `discord.Guild` object.
The lookup strategy is as follows (in order):
1. Lookup by ID.
2. Lookup by name.
.. deprecated-removed:: 3.4.8 60
``GuildConverter`` is now only provided within ``redbot.core.commands`` namespace.
"""
@classmethod
async def convert(cls, ctx: "Context", argument: str) -> discord.Guild:
return await dpy_commands.GuildConverter().convert(ctx, argument)
_GuildConverter.__name__ = "GuildConverter"
def __getattr__(name: str, *, stacklevel: int = 2) -> Any:
# Let me just say it one more time... This is awesome! (PEP-562)
if name == "GuildConverter":
# let's not waste time on importing this when we don't need it
# (and let's not put in the public API)
from redbot.core.utils._internal_utils import deprecated_removed
deprecated_removed(
"`GuildConverter` from `redbot.core.commands.converter` namespace",
"3.4.8",
60,
"Use `GuildConverter` from `redbot.core.commands` namespace instead.",
stacklevel=2,
)
return globals()["_GuildConverter"]
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
def __dir__() -> List[str]:
return [*globals().keys(), "GuildConverter"]
# Below this line are a lot of lies for mypy about things that *end up* correct when
# These are used for command conversion purposes. Please refer to the portion
# which is *not* for type checking for the actual implementation
# and ensure the lies stay correct for how the object should look as a typehint
if TYPE_CHECKING:
DictConverter = Dict[str, str]
else:
class DictConverter(dpy_commands.Converter):
"""
Converts pairs of space separated values to a dict
"""
def __init__(self, *expected_keys: str, delims: Optional[List[str]] = None):
self.expected_keys = expected_keys
self.delims = delims or [" "]
self.pattern = re.compile(r"|".join(re.escape(d) for d in self.delims))
async def convert(self, ctx: "Context", argument: str) -> Dict[str, str]:
ret: Dict[str, str] = {}
args = self.pattern.split(argument)
if len(args) % 2 != 0:
raise BadArgument()
iterator = iter(args)
for key in iterator:
if self.expected_keys and key not in self.expected_keys:
raise BadArgument(_("Unexpected key {key}").format(key=key))
ret[key] = next(iterator)
return ret
if TYPE_CHECKING:
def get_dict_converter(*expected_keys: str, delims: Optional[List[str]] = None) -> Type[dict]:
...
else:
def get_dict_converter(*expected_keys: str, delims: Optional[List[str]] = None) -> Type[dict]:
"""
Returns a typechecking safe `DictConverter` suitable for use with discord.py
"""
class PartialMeta(type):
__call__ = functools.partialmethod(
type(DictConverter).__call__, *expected_keys, delims=delims
)
class ValidatedConverter(DictConverter, metaclass=PartialMeta):
pass
return ValidatedConverter
if TYPE_CHECKING:
TimedeltaConverter = timedelta
else:
class TimedeltaConverter(dpy_commands.Converter):
"""
This is a converter for timedeltas.
The units should be in order from largest to smallest.
This works with or without whitespace.
See `parse_timedelta` for more information about how this functions.
Attributes
----------
maximum : Optional[timedelta]
If provided, any parsed value higher than this will raise an exception
minimum : Optional[timedelta]
If provided, any parsed value lower than this will raise an exception
allowed_units : Optional[List[str]]
If provided, you can constrain a user to expressing the amount of time
in specific units. The units you can choose to provide are the same as the
parser understands: (``weeks``, ``days``, ``hours``, ``minutes``, ``seconds``)
default_unit : Optional[str]
If provided, it will additionally try to match integer-only input into
a timedelta, using the unit specified. Same units as in ``allowed_units``
apply.
"""
def __init__(self, *, minimum=None, maximum=None, allowed_units=None, default_unit=None):
self.allowed_units = allowed_units
self.default_unit = default_unit
self.minimum = minimum
self.maximum = maximum
async def convert(self, ctx: "Context", argument: str) -> timedelta:
if self.default_unit and argument.isdecimal():
argument = argument + self.default_unit
delta = parse_timedelta(
argument,
minimum=self.minimum,
maximum=self.maximum,
allowed_units=self.allowed_units,
)
if delta is not None:
return delta
raise BadArgument() # This allows this to be a required argument.
if TYPE_CHECKING:
def get_timedelta_converter(
*,
default_unit: Optional[str] = None,
maximum: Optional[timedelta] = None,
minimum: Optional[timedelta] = None,
allowed_units: Optional[List[str]] = None,
) -> Type[timedelta]:
...
else:
def get_timedelta_converter(
*,
default_unit: Optional[str] = None,
maximum: Optional[timedelta] = None,
minimum: Optional[timedelta] = None,
allowed_units: Optional[List[str]] = None,
) -> Type[timedelta]:
"""
This creates a type suitable for typechecking which works with discord.py's
commands.
See `parse_timedelta` for more information about how this functions.
Parameters
----------
maximum : Optional[timedelta]
If provided, any parsed value higher than this will raise an exception
minimum : Optional[timedelta]
If provided, any parsed value lower than this will raise an exception
allowed_units : Optional[List[str]]
If provided, you can constrain a user to expressing the amount of time
in specific units. The units you can choose to provide are the same as the
parser understands: (``weeks``, ``days``, ``hours``, ``minutes``, ``seconds``)
default_unit : Optional[str]
If provided, it will additionally try to match integer-only input into
a timedelta, using the unit specified. Same units as in ``allowed_units``
apply.
Returns
-------
type
The converter class, which will be a subclass of `TimedeltaConverter`
"""
class PartialMeta(type):
__call__ = functools.partialmethod(
type(DictConverter).__call__,
allowed_units=allowed_units,
default_unit=default_unit,
minimum=minimum,
maximum=maximum,
)
class ValidatedConverter(TimedeltaConverter, metaclass=PartialMeta):
pass
return ValidatedConverter
if not TYPE_CHECKING:
class NoParseOptional:
"""
This can be used instead of `typing.Optional`
to avoid discord.py special casing the conversion behavior.
.. seealso::
The `ignore_optional_for_conversion` option of commands.
"""
def __class_getitem__(cls, key):
if isinstance(key, tuple):
raise TypeError("Must only provide a single type to Optional")
return key
_T = TypeVar("_T")
if not TYPE_CHECKING:
#: This can be used when user input should be converted as discord.py
#: treats `typing.Optional`, but the type should not be equivalent to
#: ``typing.Union[DesiredType, None]`` for type checking.
#:
#: Note: In type checking context, this type hint can be passed
#: multiple types, but such usage is not supported and will fail at runtime
#:
#: .. warning::
#: This converter class is still provisional.
UserInputOptional = Optional
if not TYPE_CHECKING:
class Literal(dpy_commands.Converter):
"""
This can be used as a converter for `typing.Literal`.
In a type checking context it is `typing.Literal`.
In a runtime context, it's a converter which only matches the literals it was given.
.. warning::
This converter class is still provisional.
"""
def __init__(self, valid_names: Tuple[str]):
self.valid_names = valid_names
def __call__(self, ctx, arg):
# Callable's are treated as valid types:
# https://github.com/python/cpython/blob/3.8/Lib/typing.py#L148
# Without this, ``typing.Union[Literal["clear"], bool]`` would fail
return self.convert(ctx, arg)
async def convert(self, ctx, arg):
if arg in self.valid_names:
return arg
raise BadArgument(_("Expected one of: {}").format(humanize_list(self.valid_names)))
def __class_getitem__(cls, k):
if not k:
raise ValueError("Need at least one value for Literal")
if isinstance(k, tuple):
return cls(k)
else:
return cls((k,))
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/core/commands/converter.py
| 0.822403 | 0.341994 |
converter.py
|
pypi
|
from __future__ import annotations
import inspect
import io
import re
import functools
import weakref
from typing import (
Any,
Awaitable,
Callable,
Dict,
List,
Literal,
Optional,
Tuple,
Union,
MutableMapping,
TYPE_CHECKING,
cast,
)
import discord
from discord.ext.commands import (
BadArgument,
CommandError,
CheckFailure,
DisabledCommand,
command as dpy_command_deco,
Command as DPYCommand,
Cog as DPYCog,
CogMeta as DPYCogMeta,
Group as DPYGroup,
Greedy,
)
from .errors import ConversionFailure
from .requires import PermState, PrivilegeLevel, Requires, PermStateAllowedStates
from ..i18n import Translator
if TYPE_CHECKING:
# circular import avoidance
from .context import Context
__all__ = [
"Cog",
"CogMixin",
"CogCommandMixin",
"CogGroupMixin",
"Command",
"Group",
"GroupMixin",
"command",
"group",
"RESERVED_COMMAND_NAMES",
"RedUnhandledAPI",
]
#: The following names are reserved for various reasons
RESERVED_COMMAND_NAMES = (
"cancel", # reserved due to use in ``redbot.core.utils.MessagePredicate``
)
_ = Translator("commands.commands", __file__)
DisablerDictType = MutableMapping[discord.Guild, Callable[["Context"], Awaitable[bool]]]
class RedUnhandledAPI(Exception):
""" An exception which can be raised to signal a lack of handling specific APIs """
pass
class CogCommandMixin:
"""A mixin for cogs and commands."""
@property
def help(self) -> str:
"""To be defined by subclasses"""
...
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if isinstance(self, Command):
decorated = self.callback
else:
decorated = self
self.requires: Requires = Requires(
privilege_level=getattr(
decorated, "__requires_privilege_level__", PrivilegeLevel.NONE
),
user_perms=getattr(decorated, "__requires_user_perms__", {}),
bot_perms=getattr(decorated, "__requires_bot_perms__", {}),
checks=getattr(decorated, "__requires_checks__", []),
)
def format_text_for_context(self, ctx: "Context", text: str) -> str:
"""
This formats text based on values in context
The steps are (currently, roughly) the following:
- substitute ``[p]`` with ``ctx.clean_prefix``
- substitute ``[botname]`` with ``ctx.me.display_name``
More steps may be added at a later time.
Cog creators should only override this if they want
help text to be modified, and may also want to
look at `format_help_for_context` and (for commands only)
``format_shortdoc_for_context``
Parameters
----------
ctx: Context
text: str
Returns
-------
str
text which has had some portions replaced based on context
"""
formatting_pattern = re.compile(r"\[p\]|\[botname\]")
def replacement(m: re.Match) -> str:
s = m.group(0)
if s == "[p]":
return ctx.clean_prefix
if s == "[botname]":
return ctx.me.display_name
# We shouldn't get here:
return s
return formatting_pattern.sub(replacement, text)
def format_help_for_context(self, ctx: "Context") -> str:
"""
This formats the help string based on values in context
The steps are (currently, roughly) the following:
- get the localized help
- substitute ``[p]`` with ``ctx.clean_prefix``
- substitute ``[botname]`` with ``ctx.me.display_name``
More steps may be added at a later time.
Cog creators may override this in their own command classes
as long as the method signature stays the same.
Parameters
----------
ctx: Context
Returns
-------
str
Localized help with some formatting
"""
help_str = self.help
if not help_str:
# Short circuit out on an empty help string
return help_str
return self.format_text_for_context(ctx, help_str)
def allow_for(self, model_id: Union[int, str], guild_id: int) -> None:
"""Actively allow this command for the given model.
Parameters
----------
model_id : Union[int, str]
Must be an `int` if supplying an ID. `str` is only valid
for "default".
guild_id : int
The guild ID to allow this cog or command in. For global
rules, use ``0``.
"""
self.requires.set_rule(model_id, PermState.ACTIVE_ALLOW, guild_id=guild_id)
def deny_to(self, model_id: Union[int, str], guild_id: int) -> None:
"""Actively deny this command to the given model.
Parameters
----------
model_id : Union[int, str]
Must be an `int` if supplying an ID. `str` is only valid
for "default".
guild_id : int
The guild ID to deny this cog or command in. For global
rules, use ``0``.
"""
cur_rule = self.requires.get_rule(model_id, guild_id=guild_id)
if cur_rule is PermState.PASSIVE_ALLOW:
self.requires.set_rule(model_id, PermState.CAUTIOUS_ALLOW, guild_id=guild_id)
else:
self.requires.set_rule(model_id, PermState.ACTIVE_DENY, guild_id=guild_id)
def clear_rule_for(
self, model_id: Union[int, str], guild_id: int
) -> Tuple[PermState, PermState]:
"""Clear the rule which is currently set for this model.
Parameters
----------
model_id : Union[int, str]
Must be an `int` if supplying an ID. `str` is only valid
for "default".
guild_id : int
The guild ID. For global rules, use ``0``.
"""
cur_rule = self.requires.get_rule(model_id, guild_id=guild_id)
if cur_rule is PermState.ACTIVE_ALLOW:
new_rule = PermState.NORMAL
elif cur_rule is PermState.ACTIVE_DENY:
new_rule = PermState.NORMAL
elif cur_rule is PermState.CAUTIOUS_ALLOW:
new_rule = PermState.PASSIVE_ALLOW
else:
return cur_rule, cur_rule
self.requires.set_rule(model_id, new_rule, guild_id=guild_id)
return cur_rule, new_rule
def set_default_rule(self, rule: Optional[bool], guild_id: int) -> None:
"""Set the default rule for this cog or command.
Parameters
----------
rule : Optional[bool]
The rule to set as default. If ``True`` for allow,
``False`` for deny and ``None`` for normal.
guild_id : int
The guild to set the default rule in. When ``0``, this will
set the global default rule.
"""
if rule is None:
self.clear_rule_for(Requires.DEFAULT, guild_id=guild_id)
elif rule is True:
self.allow_for(Requires.DEFAULT, guild_id=guild_id)
elif rule is False:
self.deny_to(Requires.DEFAULT, guild_id=guild_id)
class Command(CogCommandMixin, DPYCommand):
"""Command class for Red.
This should not be created directly, and instead via the decorator.
This class inherits from `discord.ext.commands.Command`. The
attributes listed below are simply additions to the ones listed
with that class.
.. warning::
If you subclass this command, attributes and methods
must remain compatible.
None of your methods should start with ``red_`` or
be dunder names which start with red (eg. ``__red_test_thing__``)
unless to override behavior in a method designed to be overridden,
as this prefix is reserved for future methods in order to be
able to add features non-breakingly.
Attributes
----------
checks : List[`coroutine function`]
A list of check predicates which cannot be overridden, unlike
`Requires.checks`.
translator : Translator
A translator for this command's help docstring.
ignore_optional_for_conversion : bool
A value which can be set to not have discord.py's
argument parsing behavior for ``typing.Optional``
(type used will be of the inner type instead)
"""
def __call__(self, *args, **kwargs):
if self.cog:
# We need to inject cog as self here
return self.callback(self.cog, *args, **kwargs)
else:
return self.callback(*args, **kwargs)
def __init__(self, *args, **kwargs):
self.ignore_optional_for_conversion = kwargs.pop("ignore_optional_for_conversion", False)
super().__init__(*args, **kwargs)
self._help_override = kwargs.pop("help_override", None)
self.translator = kwargs.pop("i18n", None)
if self.parent is None:
for name in (self.name, *self.aliases):
if name in RESERVED_COMMAND_NAMES:
raise RuntimeError(
f"The name `{name}` cannot be set as a command name. It is reserved for internal use."
)
if len(self.qualified_name) > 60:
raise RuntimeError(
f"This command ({self.qualified_name}) has an excessively long qualified name, "
"and will not be added to the bot to prevent breaking tools and menus. (limit 60)"
)
def _ensure_assignment_on_copy(self, other):
super()._ensure_assignment_on_copy(other)
# Red specific
other.requires = self.requires
other.ignore_optional_for_conversion = self.ignore_optional_for_conversion
return other
@property
def callback(self):
return self._callback
@callback.setter
def callback(self, function):
"""
Below should be mostly the same as discord.py
Currently, we modify behavior for
- functools.partial support
- typing.Optional behavior change as an option
"""
self._callback = function
if isinstance(function, functools.partial):
self.module = function.func.__module__
globals_ = function.func.__globals__
else:
self.module = function.__module__
globals_ = function.__globals__
signature = inspect.signature(function)
self.params = signature.parameters.copy()
# PEP-563 allows postponing evaluation of annotations with a __future__
# import. When postponed, Parameter.annotation will be a string and must
# be replaced with the real value for the converters to work later on
for key, value in self.params.items():
if isinstance(value.annotation, str):
self.params[key] = value = value.replace(
annotation=eval(value.annotation, globals_)
)
# fail early for when someone passes an unparameterized Greedy type
if value.annotation is Greedy:
raise TypeError("Unparameterized Greedy[...] is disallowed in signature.")
if not self.ignore_optional_for_conversion:
continue # reduces indentation compared to alternative
try:
vtype = value.annotation.__origin__
if vtype is Union:
_NoneType = type if TYPE_CHECKING else type(None)
args = value.annotation.__args__
if _NoneType in args:
args = tuple(a for a in args if a is not _NoneType)
if len(args) == 1:
# can't have a union of 1 or 0 items
# 1 prevents this from becoming 0
# we need to prevent 2 become 1
# (Don't change that to becoming, it's intentional :musical_note:)
self.params[key] = value = value.replace(annotation=args[0])
else:
# and mypy wretches at the correct Union[args]
temp_type = type if TYPE_CHECKING else Union[args]
self.params[key] = value = value.replace(annotation=temp_type)
except AttributeError:
continue
@property
def help(self):
"""Help string for this command.
If the :code:`help` kwarg was passed into the decorator, it will
default to that. If not, it will attempt to translate the docstring
of the command's callback function.
"""
if self._help_override is not None:
return self._help_override
if self.translator is None:
translator = getattr(self.cog, "__translator__", lambda s: s)
else:
translator = self.translator
command_doc = self.callback.__doc__
if command_doc is None:
return ""
return inspect.cleandoc(translator(command_doc))
@help.setter
def help(self, value):
# We don't want our help property to be overwritten, namely by super()
pass
@property
def parents(self) -> List["Group"]:
"""List[commands.Group] : Returns all parent commands of this command.
This is sorted by the length of :attr:`.qualified_name` from highest to lowest.
If the command has no parents, this will be an empty list.
"""
cmd = self.parent
entries = []
while cmd is not None:
entries.append(cmd)
cmd = cmd.parent
return sorted(entries, key=lambda x: len(x.qualified_name), reverse=True)
# noinspection PyMethodOverriding
async def can_run(
self,
ctx: "Context",
*,
check_all_parents: bool = False,
change_permission_state: bool = False,
) -> bool:
"""Check if this command can be run in the given context.
This function first checks if the command can be run using
discord.py's method `discord.ext.commands.Command.can_run`,
then will return the result of `Requires.verify`.
Keyword Arguments
-----------------
check_all_parents : bool
If ``True``, this will check permissions for all of this
command's parents and its cog as well as the command
itself. Defaults to ``False``.
change_permission_state : bool
Whether or not the permission state should be changed as
a result of this call. For most cases this should be
``False``. Defaults to ``False``.
"""
ret = await super().can_run(ctx)
if ret is False:
return False
# This is so contexts invoking other commands can be checked with
# this command as well
original_command = ctx.command
original_state = ctx.permission_state
ctx.command = self
if check_all_parents is True:
# Since we're starting from the beginning, we should reset the state to normal
ctx.permission_state = PermState.NORMAL
for parent in reversed(self.parents):
try:
result = await parent.can_run(ctx, change_permission_state=True)
except CommandError:
result = False
if result is False:
return False
if self.parent is None and self.cog is not None:
# For top-level commands, we need to check the cog's requires too
ret = await self.cog.requires.verify(ctx)
if ret is False:
return False
try:
return await self.requires.verify(ctx)
finally:
ctx.command = original_command
if not change_permission_state:
ctx.permission_state = original_state
async def prepare(self, ctx):
ctx.command = self
if not self.enabled:
raise DisabledCommand(f"{self.name} command is disabled")
if not await self.can_run(ctx, change_permission_state=True):
raise CheckFailure(f"The check functions for command {self.qualified_name} failed.")
if self._max_concurrency is not None:
await self._max_concurrency.acquire(ctx)
try:
if self.cooldown_after_parsing:
await self._parse_arguments(ctx)
self._prepare_cooldowns(ctx)
else:
self._prepare_cooldowns(ctx)
await self._parse_arguments(ctx)
await self.call_before_hooks(ctx)
except:
if self._max_concurrency is not None:
await self._max_concurrency.release(ctx)
raise
async def do_conversion(
self, ctx: "Context", converter, argument: str, param: inspect.Parameter
):
"""Convert an argument according to its type annotation.
Raises
------
ConversionFailure
If doing the conversion failed.
Returns
-------
Any
The converted argument.
"""
# Let's not worry about all of this junk if it's just a str converter
if converter is str:
return argument
try:
return await super().do_conversion(ctx, converter, argument, param)
except BadArgument as exc:
raise ConversionFailure(converter, argument, param, *exc.args) from exc
except ValueError as exc:
# Some common converters need special treatment...
if converter in (int, float):
message = _('"{argument}" is not a number.').format(argument=argument)
raise ConversionFailure(converter, argument, param, message) from exc
# We should expose anything which might be a bug in the converter
raise exc
async def can_see(self, ctx: "Context"):
"""Check if this command is visible in the given context.
In short, this will verify whether the user can run the
command, and also whether the command is hidden or not.
Parameters
----------
ctx : `Context`
The invocation context to check with.
Returns
-------
bool
``True`` if this command is visible in the given context.
"""
for cmd in (self, *self.parents):
if cmd.hidden:
return False
try:
can_run = await self.can_run(
ctx, check_all_parents=True, change_permission_state=False
)
except (CheckFailure, DisabledCommand):
return False
else:
if can_run is False:
return False
return True
def disable_in(self, guild: discord.Guild) -> bool:
"""Disable this command in the given guild.
Parameters
----------
guild : discord.Guild
The guild to disable the command in.
Returns
-------
bool
``True`` if the command wasn't already disabled.
"""
disabler = get_command_disabler(guild)
if disabler in self.checks:
return False
else:
self.checks.append(disabler)
return True
def enable_in(self, guild: discord.Guild) -> bool:
"""Enable this command in the given guild.
Parameters
----------
guild : discord.Guild
The guild to enable the command in.
Returns
-------
bool
``True`` if the command wasn't already enabled.
"""
disabler = get_command_disabler(guild)
try:
self.checks.remove(disabler)
except ValueError:
return False
else:
return True
def allow_for(self, model_id: Union[int, str], guild_id: int) -> None:
super().allow_for(model_id, guild_id=guild_id)
parents = self.parents
if self.cog is not None:
parents.append(self.cog)
for parent in parents:
cur_rule = parent.requires.get_rule(model_id, guild_id=guild_id)
if cur_rule is PermState.NORMAL:
parent.requires.set_rule(model_id, PermState.PASSIVE_ALLOW, guild_id=guild_id)
elif cur_rule is PermState.ACTIVE_DENY:
parent.requires.set_rule(model_id, PermState.CAUTIOUS_ALLOW, guild_id=guild_id)
def clear_rule_for(
self, model_id: Union[int, str], guild_id: int
) -> Tuple[PermState, PermState]:
old_rule, new_rule = super().clear_rule_for(model_id, guild_id=guild_id)
if old_rule is PermState.ACTIVE_ALLOW:
parents = self.parents
if self.cog is not None:
parents.append(self.cog)
for parent in parents:
should_continue = parent.reevaluate_rules_for(model_id, guild_id=guild_id)[1]
if not should_continue:
break
return old_rule, new_rule
def error(self, coro):
"""
A decorator that registers a coroutine as a local error handler.
A local error handler is an :func:`.on_command_error` event limited to
a single command.
The on_command_error event is still dispatched
for commands with a dedicated error handler.
Red's global error handler will ignore commands with a registered error handler.
To have red handle specific errors with the default behavior,
call ``Red.on_command_error`` with ``unhandled_by_cog`` set to True.
Due to how discord.py wraps exceptions, the exception you are expecting here
is likely in ``error.original`` despite that the normal event handler for bot
wide command error handling has no such wrapping.
For example:
.. code-block:: python
@a_command.error
async def a_command_error_handler(self, ctx, error):
if isinstance(error.original, MyErrorType):
self.log_exception(error.original)
else:
await ctx.bot.on_command_error(ctx, error.original, unhandled_by_cog=True)
Parameters
-----------
coro : :term:`coroutine function`
The coroutine to register as the local error handler.
Raises
-------
discord.ClientException
The coroutine is not actually a coroutine.
"""
return super().error(coro)
def format_shortdoc_for_context(self, ctx: "Context") -> str:
"""
This formats the short version of the help
string based on values in context
See ``format_text_for_context`` for the actual implementation details
Cog creators may override this in their own command and cog classes
as long as the method signature stays the same.
Parameters
----------
ctx: Context
Returns
-------
str
Localized help with some formatting
"""
sh = self.short_doc
return self.format_text_for_context(ctx, sh) if sh else sh
class GroupMixin(discord.ext.commands.GroupMixin):
"""Mixin for `Group` and `Red` classes.
This class inherits from :class:`discord.ext.commands.GroupMixin`.
"""
def command(self, *args, **kwargs):
"""A shortcut decorator that invokes :func:`.command` and adds it to
the internal command list via :meth:`~.GroupMixin.add_command`.
"""
def decorator(func):
kwargs.setdefault("parent", self)
result = command(*args, **kwargs)(func)
self.add_command(result)
return result
return decorator
def group(self, *args, **kwargs):
"""A shortcut decorator that invokes :func:`.group` and adds it to
the internal command list via :meth:`~.GroupMixin.add_command`.
"""
def decorator(func):
kwargs.setdefault("parent", self)
result = group(*args, **kwargs)(func)
self.add_command(result)
return result
return decorator
class CogGroupMixin:
requires: Requires
def reevaluate_rules_for(
self, model_id: Union[str, int], guild_id: int = 0
) -> Tuple[PermState, bool]:
"""Re-evaluate a rule by checking subcommand rules.
This is called when a subcommand is no longer actively allowed.
Parameters
----------
model_id : Union[int, str]
Must be an `int` if supplying an ID. `str` is only valid
for "default".
guild_id : int
The guild ID. For global rules, use ``0``.
Returns
-------
Tuple[PermState, bool]
A 2-tuple containing the new rule and a bool indicating
whether or not the rule was changed as a result of this
call.
:meta private:
"""
cur_rule = self.requires.get_rule(model_id, guild_id=guild_id)
if cur_rule not in (PermState.NORMAL, PermState.ACTIVE_ALLOW, PermState.ACTIVE_DENY):
# The above three states are unaffected by subcommand rules
# Remaining states can be changed if there exists no actively-allowed
# subcommand (this includes subcommands multiple levels below)
all_commands: Dict[str, Command] = getattr(self, "all_commands", {})
if any(
cmd.requires.get_rule(model_id, guild_id=guild_id) in PermStateAllowedStates
for cmd in all_commands.values()
):
return cur_rule, False
elif cur_rule is PermState.PASSIVE_ALLOW:
self.requires.set_rule(model_id, PermState.NORMAL, guild_id=guild_id)
return PermState.NORMAL, True
elif cur_rule is PermState.CAUTIOUS_ALLOW:
self.requires.set_rule(model_id, PermState.ACTIVE_DENY, guild_id=guild_id)
return PermState.ACTIVE_DENY, True
# Default return value
return cur_rule, False
class Group(GroupMixin, Command, CogGroupMixin, DPYGroup):
"""Group command class for Red.
This class inherits from `Command`, with :class:`GroupMixin` and
`discord.ext.commands.Group` mixed in.
"""
def __init__(self, *args, **kwargs):
self.autohelp = kwargs.pop("autohelp", True)
super().__init__(*args, **kwargs)
async def invoke(self, ctx: "Context"):
# we skip prepare in some cases to avoid some things
# We still always want this part of the behavior though
ctx.command = self
ctx.subcommand_passed = None
# Our re-ordered behavior below.
view = ctx.view
previous = view.index
view.skip_ws()
trigger = view.get_word()
if trigger:
ctx.subcommand_passed = trigger
ctx.invoked_subcommand = self.all_commands.get(trigger, None)
view.index = previous
view.previous = previous
if ctx.invoked_subcommand is None or self == ctx.invoked_subcommand:
if self.autohelp and not self.invoke_without_command:
if not await self.can_run(ctx, change_permission_state=True):
raise CheckFailure()
# This ordering prevents sending help before checking `before_invoke` hooks
await super().invoke(ctx)
return await ctx.send_help()
elif self.invoke_without_command:
# So invoke_without_command when a subcommand of this group is invoked
# will skip the invocation of *this* command. However, because of
# how our permissions system works, we don't want it to skip the checks
# as well.
if not await self.can_run(ctx, change_permission_state=True):
raise CheckFailure()
# this is actually why we don't prepare earlier.
await super().invoke(ctx)
class CogMixin(CogGroupMixin, CogCommandMixin):
"""Mixin class for a cog, intended for use with discord.py's cog class"""
@property
def help(self):
doc = self.__doc__
translator = getattr(self, "__translator__", lambda s: s)
if doc:
return inspect.cleandoc(translator(doc))
async def red_get_data_for_user(self, *, user_id: int) -> MutableMapping[str, io.BytesIO]:
"""
.. note::
This method is documented provisionally
and may have minor changes made to it.
It is not expected to undergo major changes,
but nothing utilizes this method yet and the inclusion of this method
in documentation in advance is solely to allow cog creators time to prepare.
This should be overridden by all cogs.
Overridden implementations should return a mapping of filenames to io.BytesIO
containing a human-readable version of the data
the cog has about the specified user_id or an empty mapping
if the cog does not have end user data.
The data should be easily understood for what it represents to
most users of age to use Discord.
You may want to include a readme file
which explains specifics about the data.
This method may also be implemented for an extension.
Parameters
----------
user_id: int
Returns
-------
MutableMapping[str, io.BytesIO]
A mapping of filenames to BytesIO objects
suitable to send as a files or as part of an archive to a user.
This may be empty if you don't have data for users.
Raises
------
RedUnhandledAPI
If the method was not overridden,
or an overridden implementation is not handling this
"""
raise RedUnhandledAPI()
async def red_delete_data_for_user(
self,
*,
requester: Literal["discord_deleted_user", "owner", "user", "user_strict"],
user_id: int,
):
"""
This should be overridden by all cogs.
If your cog does not store data, overriding and doing nothing should still
be done to indicate that this has been considered.
.. note::
This may receive other strings in the future without warning
you should safely handle
any string value (log a warning if needed)
as additional requester types may be added
in the future without prior warning.
(see what this method can raise for details)
This method can currently be passed one of these strings:
- ``"discord_deleted_user"``:
The request should be processed as if
Discord has asked for the data removal
This then additionally must treat the
user ID itself as something to be deleted.
The user ID is no longer operational data
as the ID no longer refers to a valid user.
- ``"owner"``:
The request was made by the bot owner.
If removing the data requested by the owner
would be an operational hazard
(such as removing a user id from a blocked user list)
you may elect to inform the user of an alternative way
to remove that ID to ensure the process can not be abused
by users to bypass anti-abuse measures,
but there must remain a way for them to process this request.
- ``"user_strict"``:
The request was made by a user,
the bot settings allow a user to request their own data
be deleted, and the bot is configured to respect this
at the cost of functionality.
Cogs may retain data needed for anti abuse measures
such as IDs and timestamps of interactions,
but should not keep EUD such
as user nicknames if receiving a request of this nature.
- ``"user"``:
The request was made by a user,
the bot settings allow a user to request their own data
be deleted, and the bot is configured to let cogs keep
data needed for operation.
Under this case, you may elect to retain data which is
essential to the functionality of the cog. This case will
only happen if the bot owner has opted into keeping
minimal EUD needed for cog functionality.
Parameters
----------
requester: Literal["discord_deleted_user", "owner", "user", "user_strict"]
See above notes for details about this parameter
user_id: int
The user ID which needs deletion handling
Raises
------
RedUnhandledAPI
If the method was not overridden,
or an overridden implementation is not handling this
"""
raise RedUnhandledAPI()
async def can_run(self, ctx: "Context", **kwargs) -> bool:
"""
This really just exists to allow easy use with other methods using can_run
on commands and groups such as help formatters.
kwargs used in that won't apply here as they don't make sense to,
but will be swallowed silently for a compatible signature for ease of use.
Parameters
----------
ctx : `Context`
The invocation context to check with.
Returns
-------
bool
``True`` if this cog is usable in the given context.
:meta private:
"""
try:
can_run = await self.requires.verify(ctx)
except CommandError:
return False
return can_run
async def can_see(self, ctx: "Context") -> bool:
"""Check if this cog is visible in the given context.
In short, this will verify whether
the user is allowed to access the cog by permissions.
This has an identical signature to the one used by commands, and groups,
but needs a different underlying mechanism.
Parameters
----------
ctx : `Context`
The invocation context to check with.
Returns
-------
bool
``True`` if this cog is visible in the given context.
:meta private:
"""
return await self.can_run(ctx)
class Cog(CogMixin, DPYCog, metaclass=DPYCogMeta):
"""
Red's Cog base class
This includes a metaclass from discord.py
.. warning::
None of your methods should start with ``red_`` or
be dunder names which start with red (eg. ``__red_test_thing__``)
unless to override behavior in a method designed to be overridden,
as this prefix is reserved for future methods in order to be
able to add features non-breakingly.
Attributes and methods must remain compatible
with discord.py and with any of red's methods and attributes.
"""
__cog_commands__: Tuple[Command]
@property
def all_commands(self) -> Dict[str, Command]:
"""
This does not have identical behavior to
Group.all_commands but should return what you expect
:meta private:
"""
return {cmd.name: cmd for cmd in self.__cog_commands__}
def command(name=None, cls=Command, **attrs):
"""A decorator which transforms an async function into a `Command`.
Same interface as `discord.ext.commands.command`.
"""
attrs["help_override"] = attrs.pop("help", None)
return dpy_command_deco(name, cls, **attrs)
def group(name=None, cls=Group, **attrs):
"""A decorator which transforms an async function into a `Group`.
Same interface as `discord.ext.commands.group`.
"""
return dpy_command_deco(name, cls, **attrs)
__command_disablers: DisablerDictType = weakref.WeakValueDictionary()
def get_command_disabler(guild: discord.Guild) -> Callable[["Context"], Awaitable[bool]]:
"""Get the command disabler for a guild.
A command disabler is a simple check predicate which returns
``False`` if the context is within the given guild.
"""
try:
return __command_disablers[guild.id]
except KeyError:
async def disabler(ctx: "Context") -> bool:
if ctx.guild is not None and ctx.guild.id == guild.id:
raise DisabledCommand()
return True
__command_disablers[guild.id] = disabler
return disabler
# The below are intentionally left out of `__all__`
# as they are not intended for general use
class _AlwaysAvailableMixin:
"""
This should be used for commands
which should not be disabled or removed
These commands cannot belong to any cog except Core (core_commands.py)
to prevent issues with the appearance of certain behavior.
These commands do not respect most forms of checks, and
should only be used with that in mind.
This particular class is not supported for 3rd party use
"""
async def can_run(self, ctx, *args, **kwargs) -> bool:
return not ctx.author.bot
can_see = can_run
class _RuleDropper(CogCommandMixin):
"""
Objects inheriting from this, be they command or cog,
should not be interfered with operation except by their own rules,
or by global checks which are not tailored for these objects but instead
on global abuse prevention
(such as a check that disallows blocked users and bots from interacting.)
This should not be used by 3rd-party extensions directly for their own objects.
"""
def allow_for(self, model_id: Union[int, str], guild_id: int) -> None:
""" This will do nothing. """
def deny_to(self, model_id: Union[int, str], guild_id: int) -> None:
""" This will do nothing. """
def clear_rule_for(
self, model_id: Union[int, str], guild_id: int
) -> Tuple[PermState, PermState]:
"""
This will do nothing, except return a compatible rule
"""
cur_rule = self.requires.get_rule(model_id, guild_id=guild_id)
return cur_rule, cur_rule
def set_default_rule(self, rule: Optional[bool], guild_id: int) -> None:
""" This will do nothing. """
class _AlwaysAvailableCommand(_AlwaysAvailableMixin, _RuleDropper, Command):
pass
class _AlwaysAvailableGroup(_AlwaysAvailableMixin, _RuleDropper, Group):
pass
class _ForgetMeSpecialCommand(_RuleDropper, Command):
"""
We need special can_run behavior here
"""
async def can_run(self, ctx, *args, **kwargs) -> bool:
return await ctx.bot._config.datarequests.allow_user_requests()
can_see = can_run
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/core/commands/commands.py
| 0.841386 | 0.153803 |
commands.py
|
pypi
|
import asyncio
import enum
import inspect
from collections import ChainMap
from typing import (
TYPE_CHECKING,
Any,
Awaitable,
Callable,
ClassVar,
Dict,
List,
Mapping,
Optional,
Tuple,
TypeVar,
Union,
)
import discord
from discord.ext.commands import check
from .errors import BotMissingPermissions
if TYPE_CHECKING:
from .commands import Command
from .context import Context
_CommandOrCoro = TypeVar("_CommandOrCoro", Callable[..., Awaitable[Any]], Command)
__all__ = [
"CheckPredicate",
"DM_PERMS",
"GlobalPermissionModel",
"GuildPermissionModel",
"PermissionModel",
"PrivilegeLevel",
"PermState",
"Requires",
"permissions_check",
"bot_has_permissions",
"bot_in_a_guild",
"has_permissions",
"has_guild_permissions",
"is_owner",
"guildowner",
"guildowner_or_permissions",
"admin",
"admin_or_permissions",
"mod",
"mod_or_permissions",
"transition_permstate_to",
"PermStateTransitions",
"PermStateAllowedStates",
]
_T = TypeVar("_T")
GlobalPermissionModel = Union[
discord.User,
discord.VoiceChannel,
discord.TextChannel,
discord.CategoryChannel,
discord.Role,
discord.Guild,
]
GuildPermissionModel = Union[
discord.Member,
discord.VoiceChannel,
discord.TextChannel,
discord.CategoryChannel,
discord.Role,
discord.Guild,
]
PermissionModel = Union[GlobalPermissionModel, GuildPermissionModel]
CheckPredicate = Callable[["Context"], Union[Optional[bool], Awaitable[Optional[bool]]]]
# Here we are trying to model DM permissions as closely as possible. The only
# discrepancy I've found is that users can pin messages, but they cannot delete them.
# This means manage_messages is only half True, so it's left as False.
# This is also the same as the permissions returned when `permissions_for` is used in DM.
DM_PERMS = discord.Permissions.none()
DM_PERMS.update(
add_reactions=True,
attach_files=True,
embed_links=True,
external_emojis=True,
mention_everyone=True,
read_message_history=True,
read_messages=True,
send_messages=True,
)
class PrivilegeLevel(enum.IntEnum):
"""Enumeration for special privileges."""
# Maintainer Note: do NOT re-order these.
# Each privilege level also implies access to the ones before it.
# Inserting new privilege levels at a later point is fine if that is considered.
NONE = enum.auto()
"""No special privilege level."""
MOD = enum.auto()
"""User has the mod role."""
ADMIN = enum.auto()
"""User has the admin role."""
GUILD_OWNER = enum.auto()
"""User is the guild level."""
BOT_OWNER = enum.auto()
"""User is a bot owner."""
@classmethod
async def from_ctx(cls, ctx: "Context") -> "PrivilegeLevel":
"""Get a command author's PrivilegeLevel based on context."""
if await ctx.bot.is_owner(ctx.author):
return cls.BOT_OWNER
elif ctx.guild is None:
return cls.NONE
elif ctx.author == ctx.guild.owner:
return cls.GUILD_OWNER
# The following is simply an optimised way to check if the user has the
# admin or mod role.
guild_settings = ctx.bot._config.guild(ctx.guild)
member_snowflakes = ctx.author._roles # DEP-WARN
for snowflake in await guild_settings.admin_role():
if member_snowflakes.has(snowflake): # DEP-WARN
return cls.ADMIN
for snowflake in await guild_settings.mod_role():
if member_snowflakes.has(snowflake): # DEP-WARN
return cls.MOD
return cls.NONE
def __repr__(self) -> str:
return f"<{self.__class__.__name__}.{self.name}>"
class PermState(enum.Enum):
"""Enumeration for permission states used by rules."""
ACTIVE_ALLOW = enum.auto()
"""This command has been actively allowed, default user checks
should be ignored.
"""
NORMAL = enum.auto()
"""No overrides have been set for this command, make determination
from default user checks.
"""
PASSIVE_ALLOW = enum.auto()
"""There exists a subcommand in the `ACTIVE_ALLOW` state, continue
down the subcommand tree until we either find it or realise we're
on the wrong branch.
"""
CAUTIOUS_ALLOW = enum.auto()
"""This command has been actively denied, but there exists a
subcommand in the `ACTIVE_ALLOW` state. This occurs when
`PASSIVE_ALLOW` and `ACTIVE_DENY` are combined.
"""
ACTIVE_DENY = enum.auto()
"""This command has been actively denied, terminate the command
chain.
"""
# The below are valid states, but should not be transitioned to
# They should be set if they apply.
ALLOWED_BY_HOOK = enum.auto()
"""This command has been actively allowed by a permission hook.
check validation swaps this out, but the information may be useful
to developers. It is treated as `ACTIVE_ALLOW` for the current command
and `PASSIVE_ALLOW` for subcommands."""
DENIED_BY_HOOK = enum.auto()
"""This command has been actively denied by a permission hook
check validation swaps this out, but the information may be useful
to developers. It is treated as `ACTIVE_DENY` for the current command
and any subcommands."""
@classmethod
def from_bool(cls, value: Optional[bool]) -> "PermState":
"""Get a PermState from a bool or ``NoneType``."""
if value is True:
return cls.ACTIVE_ALLOW
elif value is False:
return cls.ACTIVE_DENY
else:
return cls.NORMAL
def __repr__(self) -> str:
return f"<{self.__class__.__name__}.{self.name}>"
# Here we're defining how we transition between states.
# The dict is in the form:
# previous state -> this state -> Tuple[override, next state]
# "override" is a bool describing whether or not the command should be
# invoked. It can be None, in which case the default permission checks
# will be used instead.
# There is also one case where the "next state" is dependent on the
# result of the default permission checks - the transition from NORMAL
# to PASSIVE_ALLOW. In this case "next state" is a dict mapping the
# permission check results to the actual next state.
TransitionResult = Tuple[Optional[bool], Union[PermState, Dict[bool, PermState]]]
TransitionDict = Dict[PermState, Dict[PermState, TransitionResult]]
PermStateTransitions: TransitionDict = {
PermState.ACTIVE_ALLOW: {
PermState.ACTIVE_ALLOW: (True, PermState.ACTIVE_ALLOW),
PermState.NORMAL: (True, PermState.ACTIVE_ALLOW),
PermState.PASSIVE_ALLOW: (True, PermState.ACTIVE_ALLOW),
PermState.CAUTIOUS_ALLOW: (True, PermState.CAUTIOUS_ALLOW),
PermState.ACTIVE_DENY: (False, PermState.ACTIVE_DENY),
},
PermState.NORMAL: {
PermState.ACTIVE_ALLOW: (True, PermState.ACTIVE_ALLOW),
PermState.NORMAL: (None, PermState.NORMAL),
PermState.PASSIVE_ALLOW: (True, {True: PermState.NORMAL, False: PermState.PASSIVE_ALLOW}),
PermState.CAUTIOUS_ALLOW: (True, PermState.CAUTIOUS_ALLOW),
PermState.ACTIVE_DENY: (False, PermState.ACTIVE_DENY),
},
PermState.PASSIVE_ALLOW: {
PermState.ACTIVE_ALLOW: (True, PermState.ACTIVE_ALLOW),
PermState.NORMAL: (False, PermState.NORMAL),
PermState.PASSIVE_ALLOW: (True, PermState.PASSIVE_ALLOW),
PermState.CAUTIOUS_ALLOW: (True, PermState.CAUTIOUS_ALLOW),
PermState.ACTIVE_DENY: (False, PermState.ACTIVE_DENY),
},
PermState.CAUTIOUS_ALLOW: {
PermState.ACTIVE_ALLOW: (True, PermState.ACTIVE_ALLOW),
PermState.NORMAL: (False, PermState.ACTIVE_DENY),
PermState.PASSIVE_ALLOW: (True, PermState.CAUTIOUS_ALLOW),
PermState.CAUTIOUS_ALLOW: (True, PermState.CAUTIOUS_ALLOW),
PermState.ACTIVE_DENY: (False, PermState.ACTIVE_DENY),
},
PermState.ACTIVE_DENY: { # We can only start from ACTIVE_DENY if it is set on a cog.
PermState.ACTIVE_ALLOW: (True, PermState.ACTIVE_ALLOW), # Should never happen
PermState.NORMAL: (False, PermState.ACTIVE_DENY),
PermState.PASSIVE_ALLOW: (False, PermState.ACTIVE_DENY), # Should never happen
PermState.CAUTIOUS_ALLOW: (False, PermState.ACTIVE_DENY), # Should never happen
PermState.ACTIVE_DENY: (False, PermState.ACTIVE_DENY),
},
}
PermStateAllowedStates = (
PermState.ACTIVE_ALLOW,
PermState.PASSIVE_ALLOW,
PermState.CAUTIOUS_ALLOW,
)
def transition_permstate_to(prev: PermState, next_state: PermState) -> TransitionResult:
# Transforms here are used so that the
# informational ALLOWED_BY_HOOK/DENIED_BY_HOOK
# remain, while retaining the behavior desired.
if prev is PermState.ALLOWED_BY_HOOK:
# As hook allows are extremely granular,
# we don't want this to allow every subcommand
prev = PermState.PASSIVE_ALLOW
elif prev is PermState.DENIED_BY_HOOK:
# However, denying should deny every subcommand
prev = PermState.ACTIVE_DENY
return PermStateTransitions[prev][next_state]
class Requires:
"""This class describes the requirements for executing a specific command.
The permissions described include both bot permissions and user
permissions.
Attributes
----------
checks : List[Callable[[Context], Union[bool, Awaitable[bool]]]]
A list of checks which can be overridden by rules. Use
`Command.checks` if you would like them to never be overridden.
privilege_level : PrivilegeLevel
The required privilege level (bot owner, admin, etc.) for users
to execute the command. Can be ``None``, in which case the
`user_perms` will be used exclusively, otherwise, for levels
other than bot owner, the user can still run the command if
they have the required `user_perms`.
ready_event : asyncio.Event
Event for when this Requires object has had its rules loaded.
If permissions is loaded, this should be set when permissions
has finished loading rules into this object. If permissions
is not loaded, it should be set as soon as the command or cog
is added.
user_perms : Optional[discord.Permissions]
The required permissions for users to execute the command. Can
be ``None``, in which case the `privilege_level` will be used
exclusively, otherwise, it will pass whether the user has the
required `privilege_level` _or_ `user_perms`.
bot_perms : discord.Permissions
The required bot permissions for a command to be executed. This
is not overrideable by other conditions.
"""
DEFAULT: ClassVar[str] = "default"
"""The key for the default rule in a rules dict."""
GLOBAL: ClassVar[int] = 0
"""Should be used in place of a guild ID when setting/getting
global rules.
"""
def __init__(
self,
privilege_level: Optional[PrivilegeLevel],
user_perms: Union[Dict[str, bool], discord.Permissions, None],
bot_perms: Union[Dict[str, bool], discord.Permissions],
checks: List[CheckPredicate],
):
self.checks: List[CheckPredicate] = checks
self.privilege_level: Optional[PrivilegeLevel] = privilege_level
self.ready_event = asyncio.Event()
if isinstance(user_perms, dict):
self.user_perms: Optional[discord.Permissions] = discord.Permissions.none()
_validate_perms_dict(user_perms)
self.user_perms.update(**user_perms)
else:
self.user_perms = user_perms
if isinstance(bot_perms, dict):
self.bot_perms: discord.Permissions = discord.Permissions.none()
_validate_perms_dict(bot_perms)
self.bot_perms.update(**bot_perms)
else:
self.bot_perms = bot_perms
self._global_rules: _RulesDict = _RulesDict()
self._guild_rules: _IntKeyDict[_RulesDict] = _IntKeyDict[_RulesDict]()
@staticmethod
def get_decorator(
privilege_level: Optional[PrivilegeLevel], user_perms: Optional[Dict[str, bool]]
) -> Callable[["_CommandOrCoro"], "_CommandOrCoro"]:
if not user_perms:
user_perms = None
def decorator(func: "_CommandOrCoro") -> "_CommandOrCoro":
if inspect.iscoroutinefunction(func):
func.__requires_privilege_level__ = privilege_level
func.__requires_user_perms__ = user_perms
else:
func.requires.privilege_level = privilege_level
if user_perms is None:
func.requires.user_perms = None
else:
_validate_perms_dict(user_perms)
assert func.requires.user_perms is not None
func.requires.user_perms.update(**user_perms)
return func
return decorator
def get_rule(self, model: Union[int, str, PermissionModel], guild_id: int) -> PermState:
"""Get the rule for a particular model.
Parameters
----------
model : Union[int, str, PermissionModel]
The model to get the rule for. `str` is only valid for
`Requires.DEFAULT`.
guild_id : int
The ID of the guild for the rule's scope. Set to
`Requires.GLOBAL` for a global rule.
If a global rule is set for a model,
it will be preferred over the guild rule.
Returns
-------
PermState
The state for this rule. See the `PermState` class
for an explanation.
"""
if not isinstance(model, (str, int)):
model = model.id
rules: Mapping[Union[int, str], PermState]
if guild_id:
rules = ChainMap(self._global_rules, self._guild_rules.get(guild_id, _RulesDict()))
else:
rules = self._global_rules
return rules.get(model, PermState.NORMAL)
def set_rule(self, model_id: Union[str, int], rule: PermState, guild_id: int) -> None:
"""Set the rule for a particular model.
Parameters
----------
model_id : Union[str, int]
The model to add a rule for. `str` is only valid for
`Requires.DEFAULT`.
rule : PermState
Which state this rule should be set as. See the `PermState`
class for an explanation.
guild_id : int
The ID of the guild for the rule's scope. Set to
`Requires.GLOBAL` for a global rule.
"""
if guild_id:
rules = self._guild_rules.setdefault(guild_id, _RulesDict())
else:
rules = self._global_rules
if rule is PermState.NORMAL:
rules.pop(model_id, None)
else:
rules[model_id] = rule
def clear_all_rules(self, guild_id: int, *, preserve_default_rule: bool = True) -> None:
"""Clear all rules of a particular scope.
Parameters
----------
guild_id : int
The guild ID to clear rules for. If set to
`Requires.GLOBAL`, this will clear all global rules and
leave all guild rules untouched.
Other Parameters
----------------
preserve_default_rule : bool
Whether to preserve the default rule or not.
This defaults to being preserved
"""
if guild_id:
rules = self._guild_rules.setdefault(guild_id, _RulesDict())
else:
rules = self._global_rules
default = rules.get(self.DEFAULT, None)
rules.clear()
if default is not None and preserve_default_rule:
rules[self.DEFAULT] = default
def reset(self) -> None:
"""Reset this Requires object to its original state.
This will clear all rules, including defaults. It also resets
the `Requires.ready_event`.
"""
self._guild_rules.clear() # pylint: disable=no-member
self._global_rules.clear() # pylint: disable=no-member
self.ready_event.clear()
async def verify(self, ctx: "Context") -> bool:
"""Check if the given context passes the requirements.
This will check the bot permissions, overrides, user permissions
and privilege level.
Parameters
----------
ctx : "Context"
The invocation context to check with.
Returns
-------
bool
``True`` if the context passes the requirements.
Raises
------
BotMissingPermissions
If the bot is missing required permissions to run the
command.
CommandError
Propagated from any permissions checks.
"""
if not self.ready_event.is_set():
await self.ready_event.wait()
await self._verify_bot(ctx)
# Owner should never be locked out of commands for user permissions.
if await ctx.bot.is_owner(ctx.author):
return True
# Owner-only commands are non-overrideable, and we already checked for owner.
if self.privilege_level is PrivilegeLevel.BOT_OWNER:
return False
hook_result = await ctx.bot.verify_permissions_hooks(ctx)
if hook_result is not None:
return hook_result
return await self._transition_state(ctx)
async def _verify_bot(self, ctx: "Context") -> None:
if ctx.guild is None:
bot_user = ctx.bot.user
else:
bot_user = ctx.guild.me
cog = ctx.cog
if cog and await ctx.bot.cog_disabled_in_guild(cog, ctx.guild):
raise discord.ext.commands.DisabledCommand()
bot_perms = ctx.channel.permissions_for(bot_user)
if not (bot_perms.administrator or bot_perms >= self.bot_perms):
raise BotMissingPermissions(missing=self._missing_perms(self.bot_perms, bot_perms))
async def _transition_state(self, ctx: "Context") -> bool:
should_invoke, next_state = self._get_transitioned_state(ctx)
if should_invoke is None:
# NORMAL invocation, we simply follow standard procedure
should_invoke = await self._verify_user(ctx)
elif isinstance(next_state, dict):
# NORMAL to PASSIVE_ALLOW; should we proceed as normal or transition?
# We must check what would happen normally, if no explicit rules were set.
would_invoke = self._get_would_invoke(ctx)
if would_invoke is None:
would_invoke = await self._verify_user(ctx)
next_state = next_state[would_invoke]
assert isinstance(next_state, PermState)
ctx.permission_state = next_state
return should_invoke
def _get_transitioned_state(self, ctx: "Context") -> TransitionResult:
prev_state = ctx.permission_state
cur_state = self._get_rule_from_ctx(ctx)
return transition_permstate_to(prev_state, cur_state)
def _get_would_invoke(self, ctx: "Context") -> Optional[bool]:
default_rule = PermState.NORMAL
if ctx.guild is not None:
default_rule = self.get_rule(self.DEFAULT, guild_id=ctx.guild.id)
if default_rule is PermState.NORMAL:
default_rule = self.get_rule(self.DEFAULT, self.GLOBAL)
if default_rule == PermState.ACTIVE_DENY:
return False
elif default_rule == PermState.ACTIVE_ALLOW:
return True
else:
return None
async def _verify_user(self, ctx: "Context") -> bool:
checks_pass = await self._verify_checks(ctx)
if checks_pass is False:
return False
if self.user_perms is not None:
user_perms = ctx.channel.permissions_for(ctx.author)
if user_perms.administrator or user_perms >= self.user_perms:
return True
if self.privilege_level is not None:
privilege_level = await PrivilegeLevel.from_ctx(ctx)
if privilege_level >= self.privilege_level:
return True
return False
def _get_rule_from_ctx(self, ctx: "Context") -> PermState:
author = ctx.author
guild = ctx.guild
if ctx.guild is None:
# We only check the user for DM channels
rule = self._global_rules.get(author.id)
if rule is not None:
return rule
return self.get_rule(self.DEFAULT, self.GLOBAL)
rules_chain = [self._global_rules]
guild_rules = self._guild_rules.get(ctx.guild.id)
if guild_rules:
rules_chain.append(guild_rules)
channels = []
if author.voice is not None:
channels.append(author.voice.channel)
channels.append(ctx.channel)
category = ctx.channel.category
if category is not None:
channels.append(category)
# We want author roles sorted highest to lowest, and exclude the @everyone role
author_roles = reversed(author.roles[1:])
model_chain = [author, *channels, *author_roles, guild]
for rules in rules_chain:
for model in model_chain:
rule = rules.get(model.id)
if rule is not None:
return rule
del model_chain[-1] # We don't check for the guild in guild rules
default_rule = self.get_rule(self.DEFAULT, guild.id)
if default_rule is PermState.NORMAL:
default_rule = self.get_rule(self.DEFAULT, self.GLOBAL)
return default_rule
async def _verify_checks(self, ctx: "Context") -> bool:
if not self.checks:
return True
return await discord.utils.async_all(check(ctx) for check in self.checks)
@staticmethod
def _get_perms_for(ctx: "Context", user: discord.abc.User) -> discord.Permissions:
if ctx.guild is None:
return DM_PERMS
else:
return ctx.channel.permissions_for(user)
@classmethod
def _get_bot_perms(cls, ctx: "Context") -> discord.Permissions:
return cls._get_perms_for(ctx, ctx.guild.me if ctx.guild else ctx.bot.user)
@staticmethod
def _missing_perms(
required: discord.Permissions, actual: discord.Permissions
) -> discord.Permissions:
# Explained in set theory terms:
# Assuming R is the set of required permissions, and A is
# the set of the user's permissions, the set of missing
# permissions will be equal to R \ A, i.e. the relative
# complement/difference of A with respect to R.
relative_complement = required.value & ~actual.value
return discord.Permissions(relative_complement)
@staticmethod
def _member_as_user(member: discord.abc.User) -> discord.User:
if isinstance(member, discord.Member):
# noinspection PyProtectedMember
return member._user
return member
def __repr__(self) -> str:
return (
f"<Requires privilege_level={self.privilege_level!r} user_perms={self.user_perms!r} "
f"bot_perms={self.bot_perms!r}>"
)
# check decorators
def permissions_check(predicate: CheckPredicate):
"""An overwriteable version of `discord.ext.commands.check`.
This has the same behaviour as `discord.ext.commands.check`,
however this check can be ignored if the command is allowed
through a permissions cog.
"""
def decorator(func: "_CommandOrCoro") -> "_CommandOrCoro":
if hasattr(func, "requires"):
func.requires.checks.append(predicate)
else:
if not hasattr(func, "__requires_checks__"):
func.__requires_checks__ = []
# noinspection PyUnresolvedReferences
func.__requires_checks__.append(predicate)
return func
return decorator
def has_guild_permissions(**perms):
"""Restrict the command to users with these guild permissions.
This check can be overridden by rules.
"""
_validate_perms_dict(perms)
def predicate(ctx):
return ctx.guild and ctx.author.guild_permissions >= discord.Permissions(**perms)
return permissions_check(predicate)
def bot_has_permissions(**perms: bool):
"""Complain if the bot is missing permissions.
If the user tries to run the command, but the bot is missing the
permissions, it will send a message describing which permissions
are missing.
This check cannot be overridden by rules.
"""
def decorator(func: "_CommandOrCoro") -> "_CommandOrCoro":
if asyncio.iscoroutinefunction(func):
func.__requires_bot_perms__ = perms
else:
_validate_perms_dict(perms)
func.requires.bot_perms.update(**perms)
return func
return decorator
def bot_in_a_guild():
"""Deny the command if the bot is not in a guild."""
async def predicate(ctx):
return len(ctx.bot.guilds) > 0
return check(predicate)
def has_permissions(**perms: bool):
"""Restrict the command to users with these permissions.
This check can be overridden by rules.
"""
if perms is None:
raise TypeError("Must provide at least one keyword argument to has_permissions")
return Requires.get_decorator(None, perms)
def is_owner():
"""Restrict the command to bot owners.
This check cannot be overridden by rules.
"""
return Requires.get_decorator(PrivilegeLevel.BOT_OWNER, {})
def guildowner_or_permissions(**perms: bool):
"""Restrict the command to the guild owner or users with these permissions.
This check can be overridden by rules.
"""
return Requires.get_decorator(PrivilegeLevel.GUILD_OWNER, perms)
def guildowner():
"""Restrict the command to the guild owner.
This check can be overridden by rules.
"""
return guildowner_or_permissions()
def admin_or_permissions(**perms: bool):
"""Restrict the command to users with the admin role or these permissions.
This check can be overridden by rules.
"""
return Requires.get_decorator(PrivilegeLevel.ADMIN, perms)
def admin():
"""Restrict the command to users with the admin role.
This check can be overridden by rules.
"""
return admin_or_permissions()
def mod_or_permissions(**perms: bool):
"""Restrict the command to users with the mod role or these permissions.
This check can be overridden by rules.
"""
return Requires.get_decorator(PrivilegeLevel.MOD, perms)
def mod():
"""Restrict the command to users with the mod role.
This check can be overridden by rules.
"""
return mod_or_permissions()
class _IntKeyDict(Dict[int, _T]):
"""Dict subclass which throws TypeError when a non-int key is used."""
get: Callable
setdefault: Callable
def __getitem__(self, key: Any) -> _T:
if not isinstance(key, int):
raise TypeError("Keys must be of type `int`")
return super().__getitem__(key) # pylint: disable=no-member
def __setitem__(self, key: Any, value: _T) -> None:
if not isinstance(key, int):
raise TypeError("Keys must be of type `int`")
return super().__setitem__(key, value) # pylint: disable=no-member
class _RulesDict(Dict[Union[int, str], PermState]):
"""Dict subclass which throws a TypeError when an invalid key is used."""
get: Callable
setdefault: Callable
def __getitem__(self, key: Any) -> PermState:
if key != Requires.DEFAULT and not isinstance(key, int):
raise TypeError(f'Expected "{Requires.DEFAULT}" or int key, not "{key}"')
return super().__getitem__(key) # pylint: disable=no-member
def __setitem__(self, key: Any, value: PermState) -> None:
if key != Requires.DEFAULT and not isinstance(key, int):
raise TypeError(f'Expected "{Requires.DEFAULT}" or int key, not "{key}"')
return super().__setitem__(key, value) # pylint: disable=no-member
def _validate_perms_dict(perms: Dict[str, bool]) -> None:
invalid_keys = set(perms.keys()) - set(discord.Permissions.VALID_FLAGS)
if invalid_keys:
raise TypeError(f"Invalid perm name(s): {', '.join(invalid_keys)}")
for perm, value in perms.items():
if value is not True:
# We reject any permission not specified as 'True', since this is the only value which
# makes practical sense.
raise TypeError(f"Permission {perm} may only be specified as 'True', not {value}")
|
/sausage_bot-3.6.2-py3-none-any.whl/redbot/core/commands/requires.py
| 0.79542 | 0.201204 |
requires.py
|
pypi
|
from sqlalchemy import exists, text, exc
from sqlalchemy.ext.compiler import compiles
from sqlalchemy import select, and_, literal, cast, types
from sqlalchemy.sql.expression import (
Executable, ClauseElement, Alias, ColumnClause)
__all__ = ('insert_from_select', 'idem_insert_from_select', 'idem_insert', 'sqltype')
class InsertFromSelect(Executable, ClauseElement):
""" Insert from select"""
def __init__(self, table, select, *fields, **kw):
self.table = table
self.select = select
self.fields = fields
@compiles(InsertFromSelect)
def visit_insert_from_select(element, compiler, **kw):
if element.fields:
f = ' (%s) ' % ', '.join(element.fields)
else:
f = ' '
return 'INSERT INTO %s%s(%s)' % (
compiler.process(element.table, asfrom=True),
f,
compiler.process(element.select)
)
insert_from_select = InsertFromSelect
class UpsertNode(Executable, ClauseElement):
def __init__(self, update, insert):
self.update = update
self.insert = insert
self._returning = None
@compiles(UpsertNode)
def visit_upsert_node(element, compiler, **kw):
return 'WITH update as (%s) %s' % (
compiler.process(element.update),
compiler.process(element.insert)
)
def idem_insert_from_select(table, q, *fields):
""" Idempotent insert from select"""
sq = q.alias('__q')
pks = table.primary_key.columns
q = (select([sq])
.select_from(sq.outerjoin(table,and_(*[sq.c[c.key] == c for c in pks])))
.where(list(pks)[0] == None))
return insert_from_select(table, q, *fields)
def idem_insert(table, **values):
""" Idempotent insert"""
values = values.items()
fields = [k for (k, v) in values]
vals = [cast(literal(v, table.c[k].type), table.c[k].type).label(k)
for (k, v) in values]
return idem_insert_from_select(table, select(vals), *fields)
def upsert(table, **values):
""" Upsert"""
pks = table.primary_key.columns
try:
pks_pred = and_(*[c == values[c.key] for c in pks])
except KeyError as e:
raise exc.ArgumentError('missing pk for upsert: %s' % e)
update = (table.update()
.values(**values)
.where(pks_pred)
.returning(literal(1)))
fields = [k for (k, v) in values.items()]
vals = [cast(literal(v, table.c[k].type), table.c[k].type).label(k)
for (k, v) in values.items()]
insert = insert_from_select(
table,
select(vals).where('not exists (select 1 from update)'),
*fields)
return UpsertNode(update, insert)
class TypeCoercion(types.UserDefinedType):
def __init__(self, name):
self.name = name
def get_col_spec(self):
return self.name
def bind_processor(self, dialect):
def process(value):
return value
return process
def result_processor(self, dialect, coltype):
def process(value):
return value
return process
sqltype = TypeCoercion
|
/sautils-0.4.1.tar.gz/sautils-0.4.1/sautils.py
| 0.486575 | 0.24552 |
sautils.py
|
pypi
|
from types import SimpleNamespace
from typing import Iterable, Tuple, Any, Union, Sequence, NamedTuple
from sav.bo.itertools import keymap
def split_names(names: Union[str, Iterable[str]]) -> Iterable[str]:
"""Split the specified argument into names.
This method replicates the manner in which the
:func:`collections.namedtuple` function parses its
`field_names` argument. If `names` is a string, it
is assumed to contain identifiers separated by
commas and/or whitespace, and parsed accordingly.
Otherwise, it is returned unaltered.
"""
return (names.replace(',', ' ').split()
if isinstance(names, str) else names)
def get_field_name(name: str) -> str:
"""Adjust to make a valid python field name.
Replaces hyphens with underscores.
:raise ValueError: if the result is not a valid identifier.
"""
name = name.replace('-', '_')
if not name.isidentifier():
raise ValueError('Not an identifier: ' + name)
if name.startswith('_'):
raise ValueError('Name should not be private or magic: ' + name)
return name
def ns_from_pairs(pairs: Iterable[Tuple[str, Any]]) -> Any:
"""Create namespace object from key-value pairs."""
return SimpleNamespace(**dict(keymap(get_field_name, pairs)))
def ns_from_keys(keys: Union[str, Iterable[str]],
prefix='') -> Any:
"""Register characters or strings.
This function returns a namespace object such that the
name of every field matches the value of that field. It may
therefore be used to group a collection of constant string
values.
:param keys: An iterable of strings. Note that this may also be
a single string, in which case an attribute will be created
for every character in the string.
:param prefix: An optional prefix to be added to the field values.
"""
return ns_from_pairs((key, prefix + key) for key in keys)
def ns_from_names(names: Union[str, Iterable[str]],
prefix: str= '') -> Any:
"""Register names.
Similar to :func:`ns_from_keys`, except that this function will
interpret a single string argument as a comma- and/or
whitespace-separated list of names.
:param names: Names to be split by :func:`split_names`.
:param prefix: An optional prefix to be added to the field values.
"""
return ns_from_keys(split_names(names), prefix)
def enum_names(names: Union[str, Iterable[str]],
start: int=0) -> Any:
pairs = map(reversed, enumerate(split_names(names), start))
return ns_from_pairs(pairs)
|
/sav.bo-0.1.0.tar.gz/sav.bo-0.1.0/sav/bo/names.py
| 0.935553 | 0.484502 |
names.py
|
pypi
|
import collections
from itertools import chain
from typing import (TypeVar, Iterable, Iterator, Callable, Tuple, Optional,
Container, Union, AbstractSet, Dict)
from sav.bo.apriori import T
KT = TypeVar('KT') # Key type.
KT_A = TypeVar('KT_A') # Key type.
KT_B = TypeVar('KT_B') # Key type.
VT = TypeVar('VT') # Value type.
VT_A = TypeVar('VT_A') # Value type.
VT_B = TypeVar('VT_B') # Value type.
somevar = 3 #: Sample doc comment for somevar
def any_iter(elements: Iterable[T]) -> Optional[Iterator[T]]:
"""Check whether an iterable will yield any elements.
This method pre-fetches the first element, and reconstructs
an iterator including that element unless no first element was
encountered.
:param elements: Any iterable object.
:return: `None` if the specified iterable yields no elements.
Otherwise an iterator over the specified iterable is returned.
"""
iterator = iter(elements)
try:
e = next(iterator)
except StopIteration:
return None
else:
return chain([e], iterator)
def unique(elements: Iterable[T]) -> Iterator[T]:
"""Iterates over each element only once.
Iterating over ``unique(elements)`` is similar to iterating over
``set(elements)``, except for the following differences:
- It preserves the order of elements.
- It passes each element through before reading the next element.
These differences matter to loops with conditional breaks::
for e in unique(elements):
foo(e)
if bar(e):
break
Note that this code runs without problems if ``elements`` happens to
be an infinite iterator like `itertools.count`. By contrast, the
expression ``set(itertools.count())`` will keep eating memory without
terminating.
The differences also matter if ``elements`` happens to be a large
finite iterable. Suppose it is a sequence of 10.000 elements,
while the ``bar`` condition is satisfied by its 5th element. Then the
example code will only iterate 5 times. By contrast, iterating over
``set(elements)`` would require 10.000 iterations over the sequence
just to fill the set. Furthermore, the subsequent number of calls to
``foo`` would be an undetermined amount between 1 and the length of
the set.
:param elements: An iterable of `hashable` elements.
:return: An iterator returning each element only once.
"""
seen = set()
for e in elements:
if e not in seen:
seen.add(e)
yield e
def skip(exclude: Union[Container[T], Iterable[T]],
source: Iterable[T]) -> Iterator[T]:
"""Iterates over elements from `source` not in `exclude`.
:param exclude: The elements to be skipped.
:param source: The elements to be iterated over.
:return: An iterator object.
If ``exclude`` is a set or set-like object, or if it is a
container but not an iterable, then its ``__contains__`` method
will be used to determine which objects must be skipped.
Otherwise, ``exclude`` will be iterated over precisely once, so it
is safe to pass generators or input streams. What happens next
depends on whether this yields hashable elements:
- If all elements of ``exclude`` are hashable, then they are
placed in a :class:`set` which will be used to determine
whether elements from ``source`` must be skipped.
- If one or more elements of ``exclude`` are not hashable, and
``exclude`` is also a container, then we fall back on its
``__contains__`` method to determine which objects must be
skipped.
- Finally, if ``exclude`` is not a container and neither are all
its elements hashable, then they are placed in a tuple instead.
Keep in mind that looking up each element from ``source`` in this
tuple will lead to quadratic time complexity based on the lengths
of ``exclude`` and ``source``.
"""
if isinstance(exclude, Iterable) and not isinstance(exclude, AbstractSet):
collected = tuple(exclude)
try:
exclude = set(collected)
except TypeError:
if not isinstance(exclude, Container):
exclude = collected
return filter(lambda e: e not in exclude, source)
def skip_none(elements: Iterable[Optional[T]]) -> Iterator[T]:
"""Iterates over all elements that are not `None`.
Aside from being a conventient shorthand, this function makes
it easier for type checkers to convert an optional type into
a type that rules out `None`.
"""
return filter(lambda e: e is not None, elements)
def cap(iterable: Iterable[T], max_len: int) -> Iterator[T]:
"""Iterates over a maximum number of elements."""
iterator = iter(iterable)
for i in range(max_len):
try:
yield next(iterator)
except StopIteration:
break
def nth(iterable: Iterable[Tuple], index: int) -> Iterator:
"""Yields the nth element of each tuple."""
return (tup[index] for tup in iterable)
def first(iterable: Iterable[Tuple]) -> Iterator:
"""Yields the first element of each tuple."""
return nth(iterable, 0)
def last(iterable: Iterable[Tuple]) -> Iterator:
"""Yields the last element of each tuple."""
return nth(iterable, -1)
def starfilter(func: Callable[..., bool],
iterable: Iterable[Tuple]) -> Iterator[Tuple]:
"""Starred companion to the `filter` builtin."""
return (tup for tup in iterable if func(*tup))
def keymap(func: Callable[[KT_A], KT_B],
iterable: Iterable[Tuple[KT_A, VT]]) -> Iterator[Tuple[KT_B, VT]]:
"""Apply a function to the keys of key-value pairs."""
return ((func(k), v) for k, v in iterable)
def valmap(func: Callable[[VT_A], VT_B],
iterable: Iterable[Tuple[KT, VT_A]]) -> Iterator[Tuple[KT, VT_B]]:
"""Apply a function to the values of key-value pairs."""
return ((k, func(v)) for k, v in iterable)
def keyfilter(func: Callable[[KT], bool],
iterable: Iterable[Tuple[KT, VT]]) -> Iterator[Tuple[KT, VT]]:
return ((k, v) for k, v in iterable if func(k))
def valfilter(func: Callable[[VT], bool],
iterable: Iterable[Tuple[KT, VT]]) -> Iterator[Tuple[KT, VT]]:
return ((k, v) for k, v in iterable if func(v))
def iterarg(opt: Optional[Iterable[T]]) -> Iterable[T]:
"""Turn optional iterables into iterables.
:return: An empty tuple if the argument is `None`, otherwise the argument
is passed through.
"""
return () if opt is None else opt
class ClusterBufferError(Exception):
def __init__(self):
super().__init__('Maximum buffer size reached.')
def clustered(func: Callable[[VT], KT], iterable: Iterable[VT],
buffer_max: int = 0) -> Iterator[Tuple[KT, Iterator[VT]]]:
"""Divides objects into different clusters."""
key_buffer = collections.deque()
val_buffers: Dict[KT, collections.deque] = {}
buffer_size = 0
# Generate key-value pairs and maintain buffers
def source_generator() -> Iterator[Tuple[KT, VT]]:
for val in iterable:
key = func(val)
if key not in val_buffers:
val_buffers[key] = collections.deque()
key_buffer.append(key)
yield key, val
source_iter = source_generator()
# Read from the specified buffer
def buffered_generator(buffer: collections.deque) -> Iterator:
nonlocal buffer_size
while True:
try:
val = buffer.popleft()
except IndexError:
try:
key, val = next(source_iter)
except StopIteration:
return
val_buffers[key].append(val)
if buffer_max:
buffer_size += 1
if buffer_size == buffer_max:
raise ClusterBufferError()
else:
if buffer_max:
buffer_size -= 1
yield val
# Generate values for the specified key
def sub_generator(key: KT) -> Iterator[VT]:
yield from buffered_generator(val_buffers[key])
# Generate a sub iterator for each new key in the key buffer
for k in buffered_generator(key_buffer):
yield k, sub_generator(k)
|
/sav.bo-0.1.0.tar.gz/sav.bo-0.1.0/sav/bo/itertools.py
| 0.918151 | 0.489626 |
itertools.py
|
pypi
|
import os
import shutil
import stat
from abc import abstractmethod
from collections import OrderedDict
from io import StringIO
from pathlib import Path
from subprocess import PIPE, STDOUT, DEVNULL, run
from typing import TypeVar, Any, Iterable, Mapping, Generic, Sequence
from sav.bo.apriori import Initialized, lazy_default, SingletonGenericMeta
from sav.bo.cmd import Commander, Completed, Capture
from sav.bo.names import enum_names
from sav.bo.textio import (Fifo, StandardStreams, StandardStreamsTuple,
RedirStreams, SystemStreams)
PathT = TypeVar('PathT')
class Terminal(Commander, Generic[PathT]):
"""Terminal interface.
This abstract base class defines methods for text-based input,
output, and subprocesses.
"""
@property
@abstractmethod
def streams(self) -> StandardStreams:
pass
@property
@abstractmethod
def cwd(self) -> PathT:
"""A path object to the terminal's current working directory."""
pass
@cwd.setter
@abstractmethod
def cwd(self, value: PathT) -> None:
pass
@abstractmethod
def get_path(self, *pathsegments) -> PathT:
pass
@abstractmethod
def walk(self, root: PathT) -> Iterable[PathT]:
pass
@abstractmethod
def rmtree(self, path: PathT, **options) -> None:
pass
@abstractmethod
def print(self, *objects, file: Any = None, **options) -> None:
pass
@abstractmethod
def input(self, prompt: Any = None, stdin: Any = None,
stdout: Any = None) -> str:
pass
def choose(self, prompt: Any, alternatives: Mapping[str, Any],
caseless: bool = True) -> Any:
"""Ask the user to choose from alternatives.
The alternatives argument should be a mapping from
accepted possible input strings to possible return values.
"""
adjusted = (
OrderedDict(
((k.casefold(), v) for k, v in alternatives.items()))
if caseless else alternatives)
altlist = None
while True:
inputstr = self.input(prompt)
key = inputstr.casefold() if caseless else inputstr
try:
return adjusted[key]
except KeyError:
self.print('Choice not recognized:', repr(inputstr))
if not altlist:
altlist = list(alternatives.keys())
prompt = "Please enter '{}' or '{}': ".format(
"', '".join(altlist[:-1]), altlist[-1])
def confirm(self, prompt: str, infix: str = ' (y/n) ',
caseless: bool = True) -> bool:
return self.choose(
prompt + infix, OrderedDict([('y', True), ('n', False)]),
caseless)
def remove_readonly(func, path, _):
"""Clear the readonly bit and reattempt the removal"""
os.chmod(path, stat.S_IWRITE)
func(path)
class _OutputHandler:
"""Helper class that handles an output or error stream.
.. attribute:: passed
The value that was passed to the ``stdout`` or ``stderr``
parameter of :meth:`SystemTerminal.run`.
.. attribute:: hasfileno
Whether the output stream has a file desriptor.
"""
def __init__(self, passed: Any) -> None:
self.passed = passed
try:
passed.fileno()
except (OSError, AttributeError):
self.hasfileno = False
else:
self.hasfileno = True
@property
def arg(self) -> Any:
"""The argument to be passed to the subprocess library.
Returns the value that should be passed to the ``stdout``
or ``stderr`` parameter of :func:`subprocess.run`.
"""
conversion = {Capture.pipe: PIPE,
Capture.stdout: STDOUT,
Capture.devnull: DEVNULL}
if self.passed is None:
# No stream redirection
return None
elif isinstance(self.passed, Capture):
# Convert special value
return conversion[self.passed]
elif self.hasfileno:
# Filedescriptor stream
# We pass this on and let the OS handle it
return self.passed
else:
# Non-filedescriptor stream
# We will capture this and redirect manually
return PIPE
def convert_result(self, result: Any) -> Any:
if (result is not None) and (self.passed is not Capture.pipe):
self.passed.write(result)
return None
else:
return result
class SystemTerminal(Terminal[Path], metaclass=SingletonGenericMeta):
"""System terminal interface.
Stateless singleton object which provides the standard
interface by wrapping around the print and subprocess
library functions.
"""
@property
def streams(self) -> SystemStreams:
return SystemStreams()
@property
def cwd(self) -> Path:
return Path.cwd()
@cwd.setter
def cwd(self, value) -> None:
os.chdir(str(value))
def get_path(self, *pathsegments) -> Path:
return Path(*pathsegments)
def walk(self, root):
for path_str, dirnames, filenames in os.walk(str(root)):
yield self.get_path(path_str), dirnames, filenames
def rmtree(self, path, onerror=remove_readonly, **options):
shutil.rmtree(str(path), onerror=onerror, **options)
def print(self, *objects, file=None, **options):
if file is not None:
options['file'] = file
print(*objects, **options)
def input(self, prompt=None, stdin=None, stdout=None):
if prompt is not None:
if (stdin is None) and (stdout is None):
return input(prompt)
else:
stdout.write(prompt)
if stdin is None:
return input()
line = stdin.readline()
if line == '':
raise EOFError()
return line.rstrip('\r\n')
# noinspection PyMethodMayBeStatic
def run_no_check(self, args: Sequence[str], stdout: Any = None,
stderr: Any = None, cwd: Any = None,
**options) -> Completed:
oh, eh = _OutputHandler(stdout), _OutputHandler(stderr)
if cwd is not None:
options['cwd'] = str(cwd)
completed = run(args, stdout=oh.arg, stderr=eh.arg,
universal_newlines=True, **options)
return Completed(args=args, returncode=completed.returncode,
stdout=oh.convert_result(completed.stdout),
stderr=eh.convert_result(completed.stderr))
class OuterTerminal(Initialized, Terminal[PathT]):
def __init__(self, *, inner_term: Terminal = None, **super_kwargs) -> None:
super().__init__(**super_kwargs)
self.inner_term = lazy_default(inner_term, SystemTerminal)
def run_no_check(self, args: Sequence[str], **options) -> Completed:
return self.inner_term.run_no_check(args, **options)
@property
def streams(self) -> StandardStreams:
return self.inner_term.streams
def print(self, *objects, **options) -> None:
self.inner_term.print(*objects, **options)
def input(self, prompt: Any = None, stdin: Any = None,
stdout: Any = None) -> str:
return self.inner_term.input(prompt, stdin, stdout)
@property
def cwd(self) -> Any:
return self.inner_term.cwd
@cwd.setter
def cwd(self, value: Any) -> None:
self.inner_term.cwd = value
def get_path(self, *pathsegments) -> Any:
return self.inner_term.get_path(*pathsegments)
def walk(self, root: Any) -> Iterable:
return self.inner_term.walk(root)
def rmtree(self, path: Any, **options) -> None:
self.inner_term.rmtree(path, **options)
class RedirTerminal(OuterTerminal):
def __init__(self, *, redir: StandardStreams, **super_kwargs) -> None:
super().__init__(**super_kwargs)
self._streams = RedirStreams(redir=redir, default=super().streams)
@property
def streams(self) -> StandardStreams:
return self._streams
def print(self, *objects, file: Any = None, **options) -> None:
super().print(
*objects,
file=lazy_default(file, lambda: self._streams.redir.stdout),
**options
)
def input(self, prompt: Any = None, stdin: Any = None,
stdout: Any = None) -> str:
return super().input(
prompt,
lazy_default(stdin, lambda: self._streams.redir.stdin),
lazy_default(stdout, lambda: self._streams.redir.stdout)
)
def run_no_check(self, args: Sequence[str], stdout: Any = None,
stderr: Any = None, **options) -> Completed:
return super().run_no_check(
args,
stdout=lazy_default(stdout, lambda: self._streams.redir.stdout),
stderr=lazy_default(stderr, lambda: self._streams.redir.stderr),
**options
)
class FifoTerminalBackend(Initialized):
def __init__(self, *, inner_term: Terminal = None,
**super_kwargs) -> None:
super().__init__(**super_kwargs)
self.in_writer = Fifo()
redir = StandardStreamsTuple(self.in_writer.inner_stream,
Fifo(), Fifo())
self.terminal = RedirTerminal(inner_term=inner_term, redir=redir)
@property
def out_reader(self) -> StringIO:
return self.terminal.streams.stdout.inner_stream
@property
def err_reader(self) -> StringIO:
return self.terminal.streams.stderr.inner_stream
def __enter__(self) -> 'FifoTerminalBackend':
return self
# noinspection PyUnusedLocal
def __exit__(self, exc_type, exc_val, exc_tb) -> None:
self.close()
def close(self) -> None:
self.in_writer.close()
self.terminal.streams.stdout.close()
self.terminal.streams.stderr.close()
def to_inner(self) -> None:
self.terminal.inner_term.streams.stdout.write(self.out_reader.read())
self.terminal.inner_term.streams.stderr.write(self.err_reader.read())
class WorkDirTerminal(OuterTerminal):
"""A terminal that maintains its own working directory."""
def __init__(self, *, work_dir: Any = None, **super_kwargs) -> None:
super().__init__(**super_kwargs)
self.work_dir = lazy_default(work_dir, lambda: self.inner_term.cwd)
def run_no_check(self, args: Sequence[str], cwd: Any = None,
**options) -> Completed:
return super().run_no_check(
args,
cwd=lazy_default(cwd, lambda: self.work_dir),
**options
)
@property
def cwd(self) -> Any:
return self.work_dir
@cwd.setter
def cwd(self, value) -> None:
self.work_dir = value
Verbosity = enum_names('normal, verbose, logging, debug')
class VerboseTerminal(OuterTerminal):
def __init__(self, *, verbosity: int = Verbosity.normal,
**super_kwargs) -> None:
super().__init__(**super_kwargs)
self.verbosity = verbosity
def print(self, *objects, verbosity: int = Verbosity.normal,
**options) -> None:
if self.verbosity >= verbosity:
super().print(*objects, **options)
def run_no_check(self, args: Sequence[str], **options) -> Completed:
self.print("Calling subprocess:", args, verbosity=Verbosity.logging)
return super().run_no_check(args, **options)
|
/sav.bo-0.1.0.tar.gz/sav.bo-0.1.0/sav/bo/terminals.py
| 0.730386 | 0.208763 |
terminals.py
|
pypi
|
from abc import ABCMeta, abstractmethod
from importlib import import_module
from inspect import getmembers, isclass, isfunction
from pathlib import Path
from types import FunctionType, ModuleType
from typing import Union, Any, Type, Optional, NamedTuple, List
from sav.bo.apriori import Initialized
DEFAULT_TESTS_UNQUAL = 'tests' #: The default name for tests subpackages.
class Definitions(NamedTuple):
"""A structure of callables defined by a module."""
functions: List[FunctionType] #: List of function objects.
classes: List[type] #: List of class objects.
exceptions: List[Type[Exception]] #: List of exception classes.
class Inspectors(NamedTuple):
"""A structure of inspectors for submodules.
.. attribute: nonpack
Inspectors for non-package modules.
A sequence of instances of the appropriate subclass of
:class:`ModuleInspector`.
.. attribute:
Inspectors for subpackages.
A sequence of instances of the appropriate subclass of
:class:`PackageInspector`.
"""
nonpack: List
pack: List
ModuleRef = Union[ModuleType, str]
def get_module(ref: ModuleRef) -> ModuleType:
return ref if isinstance(ref, ModuleType) else import_module(ref)
class ModuleInspector(Initialized):
"""Module inspection base class.
:param module: The name or module object for the module to be
inspected.
:raises ImportError: If the module cannot be found.
"""
def __init__(self, *, module: ModuleRef,
tests_unqual: str = DEFAULT_TESTS_UNQUAL,
**super_kwargs) -> None:
super().__init__(**super_kwargs)
self.tests_unqual = tests_unqual
self.module: ModuleType = get_module(module)
@property
def name(self) -> str:
"""The dotted name of our module."""
return self.module.__name__
@property
def is_test(self) -> bool:
"""Whether our module is a test module."""
return self.tests_unqual in self.name.split('.')
class DefinitionsInspector(ModuleInspector, metaclass=ABCMeta):
"""ABC for inspection of function and class definitions.
:param module: The name or module object for the module to be
inspected.
"""
def __init__(self, *, module: ModuleRef,
tests_unqual: str = DEFAULT_TESTS_UNQUAL,
**super_kwargs) -> None:
super().__init__(module=module, tests_unqual=tests_unqual,
**super_kwargs)
self._defs: Optional[Definitions] = None
@abstractmethod
def select_def(self, name: str) -> bool:
"""Whether to inspect the definition with the specified name."""
pass
def defines(self, obj: Any) -> bool:
"""Whether the specified object is defined in our module."""
try:
return obj.__module__ == self.name
except AttributeError:
return False
@property
def defs(self) -> Definitions:
"""A structure of callables defined in our module."""
if self._defs is None:
defs = Definitions([], [], [])
for name, member in getmembers(self.module):
if self.defines(member) and self.select_def(name):
members = (
(defs.exceptions if issubclass(member, Exception)
else defs.classes)
if isclass(member) else
(defs.functions if isfunction(member) else None)
)
if members is not None and (member.__name__ == name):
members.append(member)
self._defs = defs
return self._defs
class PackageInspector(ModuleInspector, metaclass=ABCMeta):
"""Inspect a whole tree of subpackages and modules.
:param module: The name or module object for the package to be
inspected.
"""
def __init__(self, *, module: ModuleRef,
tests_unqual: str = DEFAULT_TESTS_UNQUAL,
**super_kwargs) -> None:
super().__init__(module=module, tests_unqual=tests_unqual,
**super_kwargs)
self._subs: Optional[Inspectors] = None
def select_submodule(self, is_pack: bool, unqual: str) -> bool:
"""Whether to import the submodule with the specified name.
:param is_pack: Whether the submodule is a package
:param unqual: The unqualified submodule name.
"""
return True
@abstractmethod
def create_subinspector(self, is_pack: bool, module: ModuleType) -> Any:
"""Should return a new inspector for the specified submodule."""
pass
@property
def subs(self) -> Inspectors:
"""A structure of subpackages and submodules."""
if self._subs is None:
self._subs = Inspectors([], [])
skip = {'__init__', '__pycache__'}
for d in self.module.__path__:
for p in Path(d).iterdir():
unqual = p.stem
if unqual in skip:
continue
if p.is_dir():
is_pack = True
elif p.suffix == '.py':
is_pack = False
else:
continue
skip.add(unqual)
if not self.select_submodule(is_pack, unqual):
continue
mod = import_module(self.name + '.' + unqual)
sub = self.create_subinspector(is_pack, mod)
self._subs[is_pack].append(sub)
return self._subs
|
/sav.bo-0.1.0.tar.gz/sav.bo-0.1.0/sav/bo/inspect.py
| 0.915408 | 0.374076 |
inspect.py
|
pypi
|
from abc import ABCMeta, abstractmethod
from typing import TypeVar, Any, GenericMeta, Mapping, Callable, Optional, Dict
T = TypeVar('T')
class SingletonMeta(type):
"""Singleton metaclass.
Specify this class as the metaclass of your user defined class in
order to turn it into a singleton class. If ``Foo`` is a singleton
class, then every call to ``Foo()`` will return the same instance
of ``Foo``. The instance is created the first time that ``Foo()``
is invoked.
This metaclass expects that no arguments will be
passed to the singleton constructor, since that would allow
different constructor calls with different arguments to be made.
If arguments are passed to a singleton constructor, this metaclass
will intercept them and raise a :exc:`TypeError`. Thus, no
constructor arguments are ever passed on to the ``__new__`` method
of the singleton class.
If the construction of the singleton instance of ``Foo`` requires
certain arguments to be passed to the ``__new__`` method of the
superclass of ``Foo``, then ``Foo`` should define a ``__new__``
method which receives no arguments but which does pass the
required values to the ``__new__`` method of its superclass.
You should only define a singleton class in the following scenario:
- You need something that is completely immutable and global.
- You need it to also be of a more generic type of which
there are other instances.
"""
_instances: Dict[type, Any] = {}
def __call__(cls, *args, **kwargs) -> Any:
if args or kwargs:
raise TypeError('Arguments were passed to singleton constructor.')
try:
instance = cls._instances[cls]
except KeyError:
instance = super().__call__()
cls._instances[cls] = instance
return instance
class SingletonABCMeta(SingletonMeta, ABCMeta):
"""Metaclass for singleton implementations of ABCs."""
pass
class SingletonGenericMeta(SingletonABCMeta, GenericMeta):
"""Metaclass for singleton implementations of generic classes."""
pass
def find_super(obj: Any, cls: type, name: str) -> Optional[type]:
"""Search for next class in the MRO that defines some attribute.
:param obj: The object whose MRO should be searched.
:param cls: Only classes that succeed this class in the MRO
will be searched.
:param name: The name of the attribute to look for.
:return: The first successor class in the MRO to define the
specified attribute, or `None` if none could be found.
"""
mro = obj.__class__.__mro__
for c in mro[mro.index(cls) + 1:]:
# Check class dict
# This excludes inherited attributes
if name in c.__dict__:
return c
return None
def check_unexpected_super(obj: Any, cls: type, name: str) -> None:
"""Check that a method is not defined higher up in the MRO.
If the method is defined only by :class:`object` then this method
also returns without raising an error.
:raise TypeError: if ``name`` is defined by a class that appears
somewhere between ``cls`` and :class:`object` in the method
resolution order of ``obj``.
"""
cls = find_super(obj, cls, name)
if (cls is None) or (cls is object):
return
s = 'Unexpected super method: {}.{}.{}'
s = s.format(cls.__module__, cls.__qualname__, name)
raise TypeError(s)
def check_unexpected_kwargs(kwargs: Mapping[str, Any]) -> None:
"""Checks whether unexpected unique arguments were received."""
if kwargs:
raise TypeError('Unexpected keyword arguments: {}'.format(kwargs))
class Initialized:
"""Base class that provides a buck stopper ``__init__`` method.
:param unexpected_kwargs: unique arguments passed on by downstream
``__init__`` methods
:raises TypeError: if ``unprocessed`` is non-empty, or if any
class preceding this class in the MRO of the instance defines
an __init__ method (other than :class:`object`).
This class may be used as the common ancestor for classes that
implement cooperative multiple inheritance of the ``__init__``
method.
Note that this class does not call any implementations of the
``__init__`` method further up in the MRO, but instead raises
an exception when the next ``__init__`` method in the MRO is
not ``object.__init__``. Thus, you should make sure that any other
classes that follow ``Initialized`` in the MRO of your classes are
effectively mixins that do not override ``object.__init__``.
"""
def __init__(self, **unexpected_kwargs) -> None:
check_unexpected_super(self, Initialized, '__init__')
check_unexpected_kwargs(unexpected_kwargs)
def lazy_default(passed: Any, func: Callable[..., Any], *args,
**kwargs) -> Any:
"""Return a specified value or evaluate a default expression.
Consider the following statements::
self.foo = Foo() if foo is None else foo
return (Bar(x=3, y=5) if self.bar is None else self.bar)
In both cases, an expression is evaluated which is a logical
function of what we might call the primary expression ``E`` and
the default expression ``D``. The latter is evaluated if the
former evaluates to ``None``. The compound expression has the
following form::
D if E is None else E
There are two problems with this expression. The first is that
we have to mention E twice. When E is a long variable name, this
will make the line unneccesarily long. Furthermore, if E is an
expression instead of a variable, it will be evaluated twice! To
avoid this, we might have to evaluate the value of E and assign
it to a temporary variable first, which is also not elegant.
The second problem is that it is so common to use None as the
value for E that triggers the evaluation of D that it would be
nice if we could omit it. Instead of the format above, it would
be nice if Python would allow the following expression::
default(E, D)
Note, however, that this would require `default` to be a special
language construct, because if it were a regular function, Python
would have to evaluate ``D`` every time the function were called,
instead of only in those cases where ``E`` turned to be ``None``.
To make it work with a regular function, the second argument must
not be a value, but a factory function. This allows for two easy
usage patterns. The first is to use a ``lambda`` expression::
lazy_default(E, lambda: D)
The second is to pass a callable name and, optionally, any
positional or keyword arguments to call it with::
# calls Foo() if foo is None
self.foo = lazy_default(foo, Foo)
# calls Bar(x=3, y=5) if self.bar is None
return lazy_default(self.bar, Bar, x=3, y=5)
:param passed: The value to be passed through unless it is ``None``.
:param func: The function to call if a default value is needed.
:param args: Any positional arguments to pass to the default function.
:param kwargs: Any keyword arguments to pass to the default function.
:return: The specified or the default value.
"""
return func(*args, **kwargs) if passed is None else passed
class Cache(Initialized, metaclass=ABCMeta):
def __init__(self, **super_kwargs) -> None:
super().__init__(**super_kwargs)
self.outdated: bool = True
@abstractmethod
def update(self) -> None:
pass
def refresh(self) -> None:
self.update()
self.outdated = False
def check(self) -> None:
if self.outdated:
self.refresh()
|
/sav.bo-0.1.0.tar.gz/sav.bo-0.1.0/sav/bo/apriori.py
| 0.962125 | 0.468547 |
apriori.py
|
pypi
|
import sys
from abc import ABCMeta, abstractmethod
from collections import Sequence
from io import TextIOBase, StringIO, SEEK_END
from typing import Any, List, MutableSequence, Optional
from sav.bo.apriori import Initialized, lazy_default, SingletonABCMeta
class Writable(TextIOBase, metaclass=ABCMeta):
"""ABC for writable text streams."""
@abstractmethod
def writetext(self, s: str) -> None:
"""Implement this method to create a writable text stream."""
pass
def write(self, s: str) -> int:
"""Implementation of :meth:`io.TextIOBase.write`.
:return: The length of ``s``.
The specification of :meth:`io.TextIOBase.write` requires
that we return the number of characters written, in order
to preserve consistency with :meth:`io.RawIOBase.write`
and :meth:`io.BufferedIOBase.write`, which both return
the number of bytes written.
Note, however, that this return value is only really
meaningful in the case of :meth:`io.RawIOBase.write`.
When a raw stream is opened in non-blocking mode, an
implementation of this method is supposed to return
the number of bytes actually written when it failed
to write all the bytes that were passed to it.
By contrast, when buffered binary and text streams need
to flush their buffers in non-blocking mode, any failure
to write all characters without blocking is supposed to
raise a :exc:`BlockingIOError` and store the number of
characters actually written in the
:attr:`BlockingIOError.characters_written` attribute.
Hence, whenever an implementation of
:meth:`io.BufferedIOBase.write` or
:meth:`io.TextIOBase.write` does return without raising
an error, its return value must by definition always
equal the number of bytes or characters that were passed
to it.
Since the caller already knows this number before calling
our method, this return value seems to be useful only
in dynamic or generic contexts where the caller might be
writing to both raw and buffered streams without
distinction. Although such use cases seem rare, we shall
conform to the interface by returning the length of the
argument, while delegating the actual writing to the
abstract :meth:`writetext` method, which specifies no
return value.
"""
self.writetext(s)
return len(s)
def writable(self) -> bool:
return True
class Appender(Writable):
"""Append written text to a sequence.
This adapts the sequence interface to the writable interface.
.. attribute:: data
Sequence that written text is appended to.
"""
def __init__(self, data: MutableSequence[str] = None,
**super_kwargs) -> None:
super().__init__(**super_kwargs)
self.data = lazy_default(data, list)
def writetext(self, s: str) -> None:
self.data.append(s)
class OuterStream(Initialized, TextIOBase):
"""Base class for wrappers around text streams.
Note that this is different from :class:`io.TextIOWrapper`, which
is a text wrapper around a binary stream.
.. attribute:: inner_stream
The inner text stream object. We use duck typing because which
methods this object needs to provide depends on the situation.
.. attribute:: close_inner
Whether to close the inner stream when the outer stream is
closed. False by default.
"""
def __init__(self, inner_stream: Any, **super_kwargs) -> None:
super().__init__(**super_kwargs)
self.inner_stream = inner_stream
self.close_inner: bool = False
def close(self) -> None:
super().close()
if self.close_inner:
self.inner_stream.close()
class OuterWritable(OuterStream, Writable):
"""Base class for writable wrappers around text streams.
.. attribute:: direct_writelines
If this is `True`, then our :meth:`writelines` method will
directly write to the inner stream. Otherwise it will
delegate to the :meth:`writetext` method of the outer stream.
Note that if your derived outer stream class overrides
the :meth:`writetext` method, then a value of `True` would
cause :meth:`writelines` to bypass the functionality of your
modified :meth:`writetext` method. Therefore, the default value
of this field is `False`.
Classes which allow the outer :meth:`writetext` method to call
the outer :meth:`writelines` method should typically set this
field to `True` in order to prevent infinite loops.
"""
def __init__(self, inner_stream: Any, **super_kwargs) -> None:
super().__init__(inner_stream=inner_stream, **super_kwargs)
self.direct_writelines: bool = False
def writetext(self, s: str) -> None:
self.inner_stream.write(s)
def writelines(self, lines: List[str]) -> None:
if self.direct_writelines:
try:
self.inner_stream.writelines(lines)
except AttributeError:
for line in lines:
self.inner_stream.write(line)
else:
super().writelines(lines)
class LineWriter(OuterWritable):
"""Line-buffered text writer.
Text written to this stream is split into lines and written
to the inner stream`s ``writelines`` method (if available) or
written to its ``write`` method one line at a time. If the last
character written to this stream before it is closed was not a
line ending, then all remaining characters are passed on to the
inner stream`s ``write`` method.
"""
def __init__(self, inner_stream: Any, **super_kwargs) -> None:
super().__init__(inner_stream=inner_stream, **super_kwargs)
self.direct_writelines = True
self.__line_buffer: Optional[StringIO] = None
def writetext(self, s: str) -> None:
"""Split text into different lines before writing them.
:param s: The text to be split into lines. Characters beyond
the last newline character will be buffered. The buffer
is either combined with characters before the next newline
in subsequent strings passed to this method, or written
to the inner stream when this stream is closed.
"""
# Return directly if there is nothing to be written.
if not s:
return
# This will return at least one line of text.
lines = s.splitlines(keepends=True)
# Invoke the splitlines method again, without keepends this
# time, upon the last line in the list, to strip any
# newline characters at the end of it if there were any.
# This trick ensures consistency with whatever list of
# newline characters the splitlines method might employ.
stripped_last_line = lines[-1].splitlines()[0]
# If the stripped line equals the unstripped line, then our
# last line did not end with a newline character.
has_residue = (stripped_last_line == lines[-1])
# Remove the residue from the lines to be written.
if has_residue:
lines.pop()
# Write all lines that have newline characters.
if lines:
self.writelines(lines)
# Write the residue to the buffer
if has_residue:
if self.__line_buffer is None:
self.__line_buffer = StringIO()
self.__line_buffer.write(stripped_last_line)
def writelines(self, lines: List[str]) -> None:
"""Write lines to the inner stream.
:param lines: A list of strings. Each string must end with a
line ending and contain no other line endings. This also
means none of the strings can be empty. The list may be
empty however. If both this list and the current line
buffer are non-empty, a copy of the list will be made
and the contents of the line buffer will be prepended
to the first line.
"""
if not lines:
return
lb = self.__line_buffer
if lb is not None:
lines = lines[:]
lb.write(lines[0])
lines[0] = lb.getvalue()
lb.close()
self.__line_buffer = None
super().writelines(lines)
def close(self):
"""Flush line buffer and close the stream."""
lb = self.__line_buffer
if lb is not None:
self.inner_stream.write(lb.getvalue())
lb.close()
super().close()
class Fifo(OuterWritable):
"""Text stream wrapper implementing a FIFO buffer.
Writes to an inner stream without advancing its position,
allowing the text to be read back from the inner stream.
.. attribute:: inner_stream
The inner stream, inherited from :class:`OuterStream`,
should be readable and writable, and will by
default be a :class:`io.StringIO` buffer.
.. attribute:: close_inner
Inherited from :class:`OuterStream`, but `True` by default
instead of `False`. Thus, if no inner stream is passed to
our constructor and a new string buffer is opened when
our stream is opened, then that buffer will also be closed
when our stream is closed. In most cases this is desirable,
since a FIFO buffer is meant to be read from while it is
still open for writing on the other end.
"""
def __init__(self, inner_stream: Any = None, **super_kwargs) -> None:
super().__init__(inner_stream=lazy_default(inner_stream, StringIO),
**super_kwargs)
self.close_inner = True
def writetext(self, s: str) -> None:
"""Write to the end of the inner stream."""
inner = self.inner_stream
offset = inner.tell() # Save current position
inner.seek(0, SEEK_END) # Move to end of stream
inner.write(s) # Write text at the end
inner.seek(offset) # Move back to saved position
class StandardStreams(Sequence):
def __len__(self) -> int:
return 3
@property
def stdin(self) -> Any:
return self[0]
@property
def stdout(self) -> Any:
return self[1]
@property
def stderr(self) -> Any:
return self[2]
class StandardStreamsTuple(tuple, StandardStreams):
def __new__(cls, stdin: Any = None, stdout: Any = None,
stderr: Any = None):
return tuple.__new__(cls, (stdin, stdout, stderr))
class RedirStreams(Initialized, StandardStreams):
def __init__(self, *, redir: StandardStreams,
default: StandardStreams, **super_kwargs) -> None:
super().__init__(**super_kwargs)
self.redir = redir
self.default = default
def __getitem__(self, item: int) -> Any:
return lazy_default(self.redir[item], lambda: self.default[item])
class SystemStreams(StandardStreams, metaclass=SingletonABCMeta):
def __getitem__(self, item: int) -> Any:
return getattr(sys, ('stdin', 'stdout', 'stderr')[item])
|
/sav.bo-0.1.0.tar.gz/sav.bo-0.1.0/sav/bo/textio.py
| 0.834677 | 0.359814 |
textio.py
|
pypi
|
import sys
from abc import abstractmethod, ABCMeta
from contextlib import (ExitStack, redirect_stdout, redirect_stderr,
contextmanager)
from enum import Enum
from runpy import run_module
from typing import Any, Sequence
from sav.bo.apriori import SingletonABCMeta, Initialized
class Capture(Enum):
pipe = -1
stdout = -2
devnull = -3
class Completed(Initialized):
"""Contains the results of a command after being run."""
def __init__(self, *, args: Sequence[str], returncode: int = 0,
stdout: str = None, stderr: str = None,
**super_kwargs) -> None:
super().__init__(**super_kwargs)
self.args = args
self.returncode = returncode
self.stdout = stdout
self.stderr = stderr
def check_returncode(self) -> None:
if self.returncode:
raise CommandError(self)
class CommandError(Exception):
"""Raised when a command returned an error code."""
def __init__(self, completed: Completed) -> None:
super().__init__('Command finished with error code {}'.format(
completed.returncode))
self.completed = completed
class Commander(metaclass=ABCMeta):
"""Abstract base class for objects that can run commands."""
@abstractmethod
def run_no_check(self, args: Sequence[str], **options) -> Completed:
"""Implement this method to run commands.
Derived classes should implement this method to interpret
the specified command according to their purpose. Client
code interacting with a commander object should not invoke
this method directly, however, but call :meth:`run` or
:meth:`query` instead.
"""
pass
def run(self, *args, stdout: Any = None, stderr: Any = None,
check: bool = False, **options) -> Completed:
"""Run a command and return its result.
Note that the interface of this method intentionally mimics that
of :meth:`subprocess.run`, except that the arguments are passed
as a varargs parameter and that options may have different types.
:param args: Command arguments. These will be converted to strings
before being passed on, so for example integers are also fine.
:param stdout: May either be a text output stream, ``None``, or
one of the values ``Capture.pipe`` or ``Capture.devnull``.
:param stderr: May either be a text output stream, ``None``, or
a ``Capture`` value.
:param check: If ``True``, the :meth:`Completed.check_returncode`
method will be called on the return object, raising an
exception in the case of a nonzero return code.
:param options: Any keyword arguments to be passed on.
:return: The result object from the executed command.
"""
args = tuple(map(str, args))
completed = self.run_no_check(args, stdout=stdout, stderr=stderr,
**options)
if check:
completed.check_returncode()
return completed
def query(self, *args, **options) -> str:
"""Run a command and return text captured from stdout."""
return self.run(*args, stdout=Capture.pipe, check=True,
**options).stdout
class Program(Commander, metaclass=ABCMeta):
"""Adapts the system interface of the running python script.
Create a python program by deriving from this class and implementing
the :meth:``Commander.run_no_check`` method. One thing you can do
within this method is to call ``run`` or ``eval_sys`` on itself to
restart the program with a different argument list.
"""
def eval_sys(self, *extra_args) -> int:
"""Use the arguments passed to the running python script.
:param extra_args: Further arguments to be added to the
system arguments. To completely change the argument list,
call the :meth:``Commander.run`` method instead.
"""
args = list(sys.argv[1:])
args += extra_args
return self.run(*args).returncode
def exec_sys(self) -> None:
"""Run the program and exit python after completion.
Call this method in your main module to transfer execution
of your python script to the `Program` object and pass the
arguments to it that were passed to the script.
"""
sys.exit(self.eval_sys())
class ModuleRunner(Commander, metaclass=SingletonABCMeta):
"""Adapts a python module to the `Commander` interface.
The ``ModuleRunner()`` expression returns a `Commander` object
whose :meth:`Commander.run` method expects the first argument
to be a python module name. Invoking this method allows you to
run any module as if it were the main module and capture its
output and return value in the form of a `Completed` object.
"""
def run_no_check(self, args: Sequence[str], stdout: Any = None,
stderr: Any = None, **options) -> Completed:
"""Runs a module as if it were the main module."""
# Raise error if args is empty
mod_name = args[0]
# Raise error if capture
for writer in stdout, stderr:
if isinstance(writer, Capture):
raise NotImplementedError('Capture not supported yet.')
@contextmanager
def replace_args():
prev_args = sys.argv[1:]
sys.argv[1:] = args[1:]
yield
sys.argv[1:] = prev_args
with ExitStack() as stack:
stack.enter_context(replace_args())
if stdout is not None:
stack.enter_context(redirect_stdout(stdout))
if stderr is not None:
stack.enter_context(redirect_stderr(stderr))
try:
run_module(mod_name, alter_sys=True)
except SystemExit as err:
code = err.code
else:
code = 0
return Completed(args=args, returncode=code)
|
/sav.bo-0.1.0.tar.gz/sav.bo-0.1.0/sav/bo/cmd.py
| 0.765944 | 0.268315 |
cmd.py
|
pypi
|
from abc import abstractmethod, ABCMeta
from argparse import ArgumentParser
from io import StringIO
from pathlib import Path
from typing import Mapping, Any, Sequence, Iterable, Optional
from sav.bo import cmd
from sav.bo.apriori import check_unexpected_kwargs, Initialized, lazy_default
from sav.bo.cmd import Completed, Capture
from sav.bo.textio import StandardStreamsTuple
from sav.bo.terminals import (Terminal, RedirTerminal, SystemTerminal,
VerboseTerminal)
class Command(Initialized):
"""Base class for console command definitions."""
def __init__(self, *, parser_options: Mapping[str, Any]=None,
**super_kwargs) -> None:
super().__init__(**super_kwargs)
self.parser_options: Mapping[str, Any] = lazy_default(parser_options,
dict)
def config_parser(self, parser: ArgumentParser) -> None:
"""Configure the argument parser for this command.
This implementation does nothing and acts both as
the specification of the interface and as the root
of any potential multiple inheritance diamond diagrams.
All implementations of this method should invoke
their super methods.
"""
pass
class Performer(Command, metaclass=ABCMeta):
"""An object that can perform a command after it is parsed."""
@abstractmethod
def perform(self, parsed: Any, terminal: Terminal[Path]) -> Optional[int]:
"""Perform the command.
:param terminal: The terminal to perform upon.
:param parsed: The object returned by the parser.
:return: Error code or None
"""
return
class Program(Performer, cmd.Program, metaclass=ABCMeta):
"""Abstract base class for command-line programs."""
def __init__(self, *, terminal: Terminal[Path]=None,
parser_options: Mapping[str, Any]=None,
**super_kwargs) -> None:
super().__init__(parser_options=parser_options, **super_kwargs)
self.terminal: Terminal[Path] = lazy_default(terminal, SystemTerminal)
self.__parser: Optional[ArgumentParser] = None
@property
def parser(self) -> ArgumentParser:
"""Construct, configure, and cache the parser for reuse."""
if self.__parser is None:
p = ArgumentParser(**self.parser_options)
self.config_parser(p)
self.__parser = p
return self.__parser
def run_no_check(self, args: Sequence[str], stdout: Any = None,
stderr: Any = None, **unexpected_kwargs) -> Completed:
"""Implements :meth:`sav.bo.cmd.Commander.run_no_check`."""
check_unexpected_kwargs(unexpected_kwargs)
# Initialize local variables.
params = stdout, stderr
streams = [None, None]
values = [None, None]
redir_mode = (stdout is not None) or (stderr is not None)
# Set up redirect mode
# TODO: Use a context manager to handle shutdown with exceptions
if redir_mode:
for i, param in enumerate(params):
streams[i] = StringIO() if param is Capture.pipe else param
redir = StandardStreamsTuple(None, *streams)
terminal = RedirTerminal(redir=redir,
inner_term=self.terminal)
else:
terminal = self.terminal
# Evaluate the command
returncode = self.perform(self.parser.parse_args(args), terminal)
if returncode is None:
returncode = 0
# Shut down redirect mode and collect values
if redir_mode:
for i, param in enumerate(params):
if param is Capture.pipe:
values[i] = streams[i].getvalue()
streams[i].close()
# Return result object
return Completed(args=args, returncode=returncode,
stdout=values[0], stderr=values[1])
class Subcommand(Command):
"""A branch or leaf in the subcommand tree."""
def __init__(self, *, name: str,
parser_options: Mapping[str, Any]=None,
**super_kwargs) -> None:
super().__init__(parser_options=parser_options, **super_kwargs)
self.name: str = name
class Supercommand(Command):
"""A collection of subcommands."""
def __init__(self, *, subcommands: Iterable[Subcommand],
parser_options: Mapping[str, Any]=None,
**super_kwargs) -> None:
super().__init__(parser_options=parser_options, **super_kwargs)
self.subcommands = tuple(subcommands)
def config_parser(self, parser: ArgumentParser) -> None:
"""Implements :meth:`Command.config_parser`."""
super().config_parser(parser)
subparsers = parser.add_subparsers()
for subcommand in self.subcommands:
subparser = subparsers.add_parser(subcommand.name,
**subcommand.parser_options)
subcommand.config_parser(subparser)
class Action(Subcommand, Performer, metaclass=ABCMeta):
"""A leaf in the subcommand tree."""
def config_parser(self, parser: ArgumentParser) -> None:
"""Implements :meth:`Command.config_parser`.
Subclasses should override this method to further
configure their parsers. However, make sure they
also invoke this implementation through a call to
``super()``.
"""
super().config_parser(parser)
parser.set_defaults(selected_action=self)
class Repertoire(Subcommand, Supercommand):
"""A branch of subcommands."""
pass
class Agent(Program, Supercommand):
"""The root of a subcommand tree."""
def perform(self, parsed: Any, terminal: Terminal[Path]):
"""Implements :meth:`Performer.perform`."""
try:
action = parsed.selected_action
except AttributeError:
terminal.print('No action selected.')
return self.eval_sys('-h')
else:
return action.perform(parsed, terminal)
class VerboseCommand(Command):
"""Mixin class that adds verbosity handling to commands.
You can mix this class with either :class:`sav.bo.cli.Program`
to add the verbosity switch to a command line program or with
:class:`sav.bo.cli.actions.Action` to add verbosity handling
for a specific subcommand.
"""
def config_parser(self, parser: ArgumentParser):
"""Implements :meth:`sav.bo.cli.Command.config_parser`."""
super().config_parser(parser)
parser.add_argument(
"-v", "--verbosity",
action="count", default=0,
help="increase output verbosity")
def perform(self, parsed: Any, terminal: Terminal[Path]):
"""Implements :meth:`sav.bo.cli.Performer.perform`."""
vt = VerboseTerminal(verbosity=parsed.verbosity, inner_term=terminal)
return self.perform_verbosity(parsed, vt)
@abstractmethod
def perform_verbosity(self, parsed: Any, terminal: Terminal[Path]):
"""Implement this method to construct your custom command.
:param parsed: This will have a ``verbosity`` field.
:param terminal: This will either be a
:class:`sav.bo.terminals.verbosity.VerboseTerminal`
or another terminal wrapped around it.
"""
pass
|
/sav.bo-0.1.0.tar.gz/sav.bo-0.1.0/sav/bo/cli.py
| 0.877765 | 0.170854 |
cli.py
|
pypi
|
Rationale
=========
Introduction
------------
Apache Hadoop is an industry standard and widely adopted MapReduce implementation.
The aim of this project is to enable users to easily provision and manage Hadoop clusters on OpenStack.
It is worth mentioning that Amazon provides Hadoop for several years as Amazon Elastic MapReduce (EMR) service.
Savanna aims to provide users with simple means to provision Hadoop clusters
by specifying several parameters like Hadoop version, cluster topology, nodes hardware details
and a few more. After user fills in all the parameters, Savanna deploys the cluster in a few minutes.
Also Savanna provides means to scale already provisioned cluster by adding/removing worker nodes on demand.
The solution will address following use cases:
* fast provisioning of Hadoop clusters on OpenStack for Dev and QA;
* utilization of unused compute power from general purpose OpenStack IaaS cloud;
* "Analytics as a Service" for ad-hoc or bursty analytic workloads (similar to AWS EMR).
Key features are:
* designed as an OpenStack component;
* managed through REST API with UI available as part of OpenStack Dashboard;
* support for different Hadoop distributions:
* pluggable system of Hadoop installation engines;
* integration with vendor specific management tools, such as Apache Ambari or Cloudera Management Console;
* predefined templates of Hadoop configurations with ability to modify parameters.
Details
-------
The Savanna product communicates with the following OpenStack components:
* Horizon - provides GUI with ability to use all of Savanna’s features;
* Keystone - authenticates users and provides security token that is used to work with the OpenStack,
hence limiting user abilities in Savanna to his OpenStack privileges;
* Nova - is used to provision VMs for Hadoop Cluster;
* Glance - Hadoop VM images are stored there, each image containing an installed OS and Hadoop;
the pre-installed Hadoop should give us good handicap on node start-up;
* Swift - can be used as a storage for data that will be processed by Hadoop jobs.
.. image:: images/openstack-interop.png
:width: 800 px
:scale: 99 %
:align: left
General Workflow
----------------
Savanna will provide two level of abstraction for API and UI based on the addressed use cases:
cluster provisioning and analytics as a service.
For the fast cluster provisioning generic workflow will be as following:
* select Hadoop version;
* select base image with or without pre-installed Hadoop:
* for base images without Hadoop pre-installed Savanna will support pluggable deployment engines integrated with vendor tooling;
* define cluster configuration, including size and topology of the cluster and setting the different type of Hadoop parameters (e.g. heap size):
* to ease the configuration of such parameters mechanism of configurable templates will be provided;
* provision the cluster: Savanna will provision VMs, install and configure Hadoop;
* operation on the cluster: add/remove nodes;
* terminate the cluster when it’s not needed anymore.
For analytic as a service generic workflow will be as following:
* select one of predefined Hadoop versions;
* configure the job:
* choose type of the job: pig, hive, jar-file, etc.;
* provide the job script source or jar location;
* select input and output data location (initially only Swift will be supported);
* select location for logs;
* set limit for the cluster size;
* execute the job:
* all cluster provisioning and job execution will happen transparently to the user;
* cluster will be removed automatically after job completion;
* get the results of computations (for example, from Swift).
User’s Perspective
------------------
While provisioning cluster through Savanna, user operates on three types of entities: Node Group Templates, Cluster Templates and Clusters.
A Node Group Template describes a group of nodes within cluster. It contains a list of hadoop processes that will be launched on each instance in a group.
Also a Node Group Template may provide node scoped configurations for those processes.
This kind of templates encapsulates hardware parameters (flavor) for the node VM and configuration for Hadoop processes running on the node.
A Cluster Template is designed to bring Node Group Templates together to form a Cluster.
A Cluster Template defines what Node Groups will be included and how many instances will be created in each.
Some of Hadoop Configurations can not be applied to a single node, but to a whole Cluster, so user can specify this kind of configurations in a Cluster Template.
Savanna enables user to specify which processes should be added to an anti-affinity group within a Cluster Template. If a process is included into an anti-affinity
group, it means that VMs where this process is going to be launched should be scheduled to different hardware hosts.
The Cluster entity represents a Hadoop Cluster. It is mainly characterized by VM image with pre-installed Hadoop which
will be used for cluster deployment. User may choose one of pre-configured Cluster Templates to start a Cluster.
To get access to VMs after a Cluster has started, user should specify a keypair.
Savanna provides several constraints on Hadoop cluster topology. JobTracker and NameNode processes could be run either on a single
VM or two separate ones. Also cluster could contain worker nodes of different types. Worker nodes could run both TaskTracker and DataNode,
or either of these processes alone. Savanna allows user to create cluster with any combination of these options,
but it will not allow to create a non working topology, for example: a set of workers with DataNodes, but without a NameNode.
Each Cluster belongs to some tenant determined by user. Users have access only to objects located in
tenants they have access to. Users could edit/delete only objects they created. Naturally admin users have full access to every object.
That way Savanna complies with general OpenStack access policy.
Integration with Swift
----------------------
The Swift service is a standard object storage in OpenStack environment, analog of Amazon S3. As a rule it is deployed
on bare metal machines. It is natural to expect Hadoop on OpenStack to process data stored there. There are a couple
of enhancements on the way which can help there.
First, a FileSystem implementation for Swift: `HADOOP-8545 <https://issues.apache.org/jira/browse/HADOOP-8545>`_.
With that thing in place, Hadoop jobs can work with Swift
as naturally as with HDFS.
On the Swift side, we have the change request: `Change I6b1ba25b <https://review.openstack.org/#/c/21015/>`_ (merged).
It implements the ability to list endpoints for an object, account or container, to make it possible to integrate swift
with software that relies on data locality information to avoid network overhead.
To get more information on how to enable Swift support see :doc:`userdoc/hadoop-swift`.
Pluggable Deployment and Monitoring
-----------------------------------
In addition to the monitoring capabilities provided by vendor-specific Hadoop management tooling, Savanna will provide pluggable integration with external monitoring systems such as Nagios or Zabbix.
Both deployment and monitoring tools will be installed on stand-alone VMs, thus allowing a single instance to manage/monitor several clusters at once.
|
/savanna-0.3.tar.gz/savanna-0.3/doc/source/overview.rst
| 0.895557 | 0.797162 |
overview.rst
|
pypi
|
Elastic Data Processing (EDP)
=============================
Overview
--------
Savanna's Elastic Data Processing facility or :dfn:`EDP` allows the execution of MapReduce jobs on clusters created from Savanna. EDP supports:
* execution of Hive scripts, Pig scripts and Jar files (:dfn:`job binaries`)
* storage of job binaries in Swift or Savanna's own database
* access to input and output data sources in Swift
* configuration of jobs at submission time
* execution of jobs on existing clusters or transient clusters
Interfaces
----------
The EDP features can be used from the Savanna web UI which is described in the :doc:`../horizon/dashboard.user.guide`.
The EDP features also can be used directly by a client through the :doc:`rest_api_v1.1_EDP`.
EDP Concepts
------------
Savanna EDP uses a collection of simple objects to define and execute MapReduce jobs. These objects are stored in the Savanna database when they
are created, allowing them to be reused. This modular approach with database persistence allows code and data to be reused across multiple jobs.
The essential components of a job are:
* executable code to run
* input data to process
* an output data location
* any additional configuration parameters needed for the job run
These components are supplied through the objects described below.
Job Binaries
++++++++++++
A :dfn:`Job Binary` object stores a URL to a single Pig script, Hive script, or Jar file and any credentials needed to retrieve the file. The file may be stored in the Savanna internal database or in Swift.
Job binaries in the Savanna database are stored as raw bytes in a :dfn:`Job Binary Internal` object. If you review the REST API or the code you will see references to it. This object's sole purpose is to store a file for later retrieval. No extra credentials need to be supplied for files stored internally.
Job binaries may be stored in Swift as well. Currently, the Swift service must be running as part of the same OpenStack installation where Savanna is running. Credentials (username and password) must be supplied that allow Savanna to authenticate to Swift and retrieve the file.
There is currently a configurable limit on the size of a single job binary that may be retrieved by Savanna. This limit is 5MB and may be set with the *job_binary_max_KB* setting in the :file:`savanna.conf` configuration file.
Note, the URLs used by Savanna have special prefixes and are not intended to be used to reference job binaries outside of Savanna.
Jobs
++++
A :dfn:`Job` object specifies the type of the job and lists all of the individual Job Binary objects that are required for execution. This supports code reuse since an individual Job Binary may be referenced by multiple Jobs. A Job object may specify a main binary and supporting libraries.
Currently for Jar job types, the main binary is *not* applicable. All binaries should be specified as supporting libraries, and the mapper and reducer classes *must* be specified with configuration parameters. See the :ref:`edp_workflow` section for notes on setting mapper and reducer configs.
If the job type is Hive or Pig, a main binary *must* be specified even when supporting libraries are specified.
Data Sources
++++++++++++
A :dfn:`Data Source` object stores a URL which designates the location of input or output data and any credentials needed to access the location. Currently Savanna supports input and output data in Swift. Currently, the Swift service must be running as part of the same OpenStack installation where Savanna is running.
Job Execution
+++++++++++++
A :dfn:`Job Execution` object pulls other objects together to execute the job. It specifies a Job object, input Data Source, output Data Source, any necessary configuration parameters, and the cluster on which to run. The Job Execution object also reports status about the job as it runs.
.. _edp_workflow:
Workflow
--------
The general workflow for defining and executing a MapReduce job in Savanna is essentially the same whether using the web UI or the REST API.
1. Launch a cluster from Savanna if there is not one already available
2. Create all of the Job Binaries needed to run the job, stored in the Savanna database or in Swift
+ When using the REST API and internal storage of job binaries, there is an extra step here to first create the Job Binary Internal objects
+ Once the Job Binary Internal objects are created, Job Binary objects may be created which refer to them via a savanna-db:// URL
3. Create a Job object which references the Job Binaries created in step 2
4. Create an input Data Source which points to the data you wish to process
5. Create an output Data Source which points to the location for output data
6. Create a Job Execution object specifying the Job object, input Data Source, output Data Source, and configuration parameters
+ When using the web UI this is done with the :guilabel:`Launch On Existing Cluster` or :guilabel:`Launch on New Cluster` buttons on the Jobs tab
+ When using the REST API this is done via the */jobs/<job_id>/execute* method
+ In both cases Savanna constructs and launches a Job Execution object as a single step on behalf of the user based on the supplied values
**Important!**
If the job type is Jar, the mapper and reducer classes *must* be specified as configuration parameters. This can be done on the :guilabel:`Configure` tab during job launch through the web UI or through the *job_configs* parameter when using the */jobs/<job_id>/execute* REST method.
+-------------------------+-----------------------------------------+
| Parameter | Example Value |
+=========================+=========================================+
| mapred.mapper.class | org.apache.oozie.example.SampleMapper |
+-------------------------+-----------------------------------------+
| mapred.reducer.class | org.apache.oozie.example.SampleReducer |
+-------------------------+-----------------------------------------+
The workflow is simpler when using existing objects. For example, to construct a new job which uses existing binaries and input data a user may only need to perform steps 3, 5, and 6 above. Of course, to repeat the same job multiple times a user would need only step 6.
EDP Technical Considerations
============================
There are a several things in EDP which require attention in order
to work properly. They are listed on this page.
Transient Clusters
------------------
EDP allows running jobs on transient clusters. That way the cluster is created
specifically for the job and is shut down automatically once the job is
finished.
Two config parameters control the behaviour of periodic clusters:
* periodic_enable - if set to 'False', Savanna will do nothing to a transient
cluster once the job it was created for is completed. If it is set to
'True', then the behaviour depends on the value of the next parameter.
* use_identity_api_v3 - set it to 'False' if your OpenStack installation
does not provide Keystone API v3. In that case Savanna will not terminate
unneeded clusters. Instead it will set their state to 'AwaitingTermination'
meaning that they could be manually deleted by a user. If the parameter is
set to 'True', Savanna will itself terminate the cluster. The limitation is
caused by lack of 'trusts' feature in Keystone API older than v3.
If both parameters are set to 'True', Savanna works with transient clusters in
the following manner:
1. When a user requests for a job to be executed on a transient cluster,
Savanna creates such a cluster.
2. Savanna drops the user's credentials once the cluster is created but
prior to that it creates a trust allowing it to operate with the
cluster instances in the future without user credentials.
3. Once a cluster is not needed, Savanna terminates its instances using the
stored trust. Savanna drops the trust after that.
|
/savanna-0.3.tar.gz/savanna-0.3/doc/source/userdoc/edp.rst
| 0.92968 | 0.814053 |
edp.rst
|
pypi
|
Hortonworks Data Plaform Plugin
===============================
The Hortonworks Data Platform (HDP) Savanna plugin provides a way to provision HDP clusters on OpenStack using templates in a single click and in an easily repeatable fashion. As seen from the architecture diagram below, the Savanna controller serves as the glue between Hadoop and OpenStack. The HDP plugin mediates between the Savanna controller and Apache Ambari in order to deploy and configure Hadoop on OpenStack. Core to the HDP Plugin is Apache Ambari that is used as the orchestrator for deploying the HDP stack on OpenStack.
.. image:: ../images/hdp-plugin-architecture.png
:width: 800 px
:scale: 80 %
:align: center
The HDP plugin uses Ambari Blueprints aka templates for cluster provisioning.
Apache Ambari Blueprints (aka Cluster templates)
------------------------------------------------
Apache Ambari Blueprints is a portable document definition, which provides a complete definition for an Apache Hadoop cluster, including cluster topology, components, services and their configurations. Ambari Blueprints can be consumed by the HDP plugin to instantiate a Hadoop cluster on OpenStack. The benefits of this approach is that it allows for Hadoop clusters to be configured and deployed using an Ambari native format that can be used with as well as outside of OpenStack allowing for clusters to be re-instantiated in a variety of environments.
For more information about Apache Ambari Blueprints, refer to: https://issues.apache.org/jira/browse/AMBARI-1783. Note that Apache Ambari Blueprints are not yet finalized.
Operation
---------
The HDP Plugin performs the following four primary functions during cluster creation:
1. Software deployment - the plugin orchestrates the deployment of the required software to the target VMs
2. Services Installation - the Hadoop services configured for the node groups within the cluster are installed on the associated VMs
3. Services Configuration - the plugin merges the default configuration values and user provided configurations for each installed service to the cluster
4. Services Start - the plugin invokes the appropriate APIs to indicate to the Ambari Server that the cluster services should be started
Images
------
The Savanna HDP plugin can make use of either minimal (operating system only) images or pre-populated HDP images. The base requirement for both is that the image is cloud-init enabled and contains a supported operating system (see http://docs.hortonworks.com/HDPDocuments/HDP1/HDP-1.2.4/bk_hdp1-system-admin-guide/content/sysadminguides_ha_chap2_3.html).
The advantage of a pre-populated image is that provisioning time is accelerated, as packages do not need to be downloaded and installed which make up the majority of the time spent in the provisioning cycle.
As with the provided pre-populated image, a pre-populated image can install any of the following packages:
* hadoop-libhdfs
* hadoop-native
* hadoop-pipes
* hadoop-sbin
* hadoop-lzo
* hadoop-lzo-native
* mysql-server
* httpd
* net-snmp
* net-snmp-utils
* perl-Net-SNMP
* nagios
* fping
* nagios-plugins
* hdp_mon_nagios_addons
* ganglia-gmetad
* gweb hdp_mon_ganglia_addons
* ganglia-gmond
* python-rrdtool.x86_64
* glibc glibc.i686
* appropriate JDK satisfying Ambari requirement
* epel-release
Any packages that are not installed in a pre-populated image will automatically be installed during the HDP provisioning process.
There are two VM images provided for use with the HDP Plugin:
1. `centos-6_64-hdp-1.3.qcow2 <http://public-repo-1.hortonworks.com/savanna/images/centos-6_4-64-hdp-1.3.qcow2>`_: This image contains most of the requisite packages necessary for HDP deployment. The packages contained herein correspond to the HDP 1.3 release. The operating system is a minimal CentOS 6.4 cloud-init enabled install. This image can only be used to provision HDP 1.3 hadoop clusters.
2. `centos-6-64-hdp-vanilla.qcow2 <http://public-repo-1.hortonworks.com/savanna/images/centos-6_4-64-vanilla.qcow2>`_: This image provides only a minimal install of CentOS 6.4 and is cloud-init enabled. This image can be used to provision any versions of HDP supported by Savanna.
HDP plugin requires an image to be tagged in Savanna Image Registry with
two tags: 'hdp' and '<hdp version>' (e.g. '1.3.2').
Also in the Image Registry you will need to specify username for an image.
It should be 'root' for both images.
Please refer to the reference VM image provided for specific details.
Limitations
-----------
The HDP plugin currently has the following limitations:
* Currently, the HDP plugin provides support for HDP 1.3. Once HDP2 is released, support for this version will be provided.
* Swift integration is not yet implemented.
* It is not possible to decrement the number of node-groups or hosts per node group in a Savanna generated cluster.
* Only the following services are available to be deployed via Savanna:
* Ambari
* Nagios
* Ganglia
* HDFS
* MAPREDUCE
Note: Other services may be added using Ambari after initial cluster creation.
Cluster Validation
------------------
Prior to Hadoop cluster creation, the HDP plugin will perform the following validation checks to ensure a successful Hadoop deployment:
* Ensure the existence of a NAMENODE process in the cluster
* Ensure the existence of a JOBTRACKER should any TASKTRACKER be deployed to the cluster
* Ensure the deployment of one Ambari Server instance to the cluster
* Ensure that each defined node group had an associated Ambari Agent configured
The HDP Plugin and Savanna Support
----------------------------------
A Hortonworks supported version of HDP OpenStack plugin will become available at a future date. For more information, please contact Hortonworks.
|
/savanna-0.3.tar.gz/savanna-0.3/doc/source/userdoc/hdp_plugin.rst
| 0.827515 | 0.835215 |
hdp_plugin.rst
|
pypi
|
# ORM
<p>
<a href="https://travis-ci.org/encode/orm">
<img src="https://travis-ci.org/encode/orm.svg?branch=master" alt="Build Status">
</a>
<a href="https://codecov.io/gh/encode/orm">
<img src="https://codecov.io/gh/encode/orm/branch/master/graph/badge.svg" alt="Coverage">
</a>
<a href="https://pypi.org/project/orm/">
<img src="https://badge.fury.io/py/orm.svg" alt="Package version">
</a>
</p>
The `orm` package is an async ORM for Python, with support for Postgres,
MySQL, and SQLite. ORM is built with:
* [SQLAlchemy core][sqlalchemy-core] for query building.
* [`databases`][databases] for cross-database async support.
* [`typesystem`][typesystem] for data validation.
Because ORM is built on SQLAlchemy core, you can use Alembic to provide
database migrations.
**ORM is still under development: We recommend pinning any dependencies with `orm~=0.1`**
**Note**: Use `ipython` to try this from the console, since it supports `await`.
```python
import databases
import orm
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Note(orm.Model):
__tablename__ = "notes"
__database__ = database
__metadata__ = metadata
id = orm.Integer(primary_key=True)
text = orm.String(max_length=100)
completed = orm.Boolean(default=False)
# Create the database
engine = sqlalchemy.create_engine(str(database.url))
metadata.create_all(engine)
# .create()
await Note.objects.create(text="Buy the groceries.", completed=False)
await Note.objects.create(text="Call Mum.", completed=True)
await Note.objects.create(text="Send invoices.", completed=True)
# .all()
notes = await Note.objects.all()
# .filter()
notes = await Note.objects.filter(completed=True).all()
# exact, iexact, contains, icontains, lt, lte, gt, gte, in
notes = await Note.objects.filter(text__icontains="mum").all()
# .get()
note = await Note.objects.get(id=1)
# .update()
await note.update(completed=True)
# .delete()
await note.delete()
# 'pk' always refers to the primary key
note = await Note.objects.get(pk=2)
note.pk # 2
```
ORM supports loading and filtering across foreign keys...
```python
import databases
import orm
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Album(orm.Model):
__tablename__ = "album"
__metadata__ = metadata
__database__ = database
id = orm.Integer(primary_key=True)
name = orm.String(max_length=100)
class Track(orm.Model):
__tablename__ = "track"
__metadata__ = metadata
__database__ = database
id = orm.Integer(primary_key=True)
album = orm.ForeignKey(Album)
title = orm.String(max_length=100)
position = orm.Integer()
# Create some records to work with.
malibu = await Album.objects.create(name="Malibu")
await Track.objects.create(album=malibu, title="The Bird", position=1)
await Track.objects.create(album=malibu, title="Heart don't stand a chance", position=2)
await Track.objects.create(album=malibu, title="The Waters", position=3)
fantasies = await Album.objects.create(name="Fantasies")
await Track.objects.create(album=fantasies, title="Help I'm Alive", position=1)
await Track.objects.create(album=fantasies, title="Sick Muse", position=2)
# Fetch an instance, without loading a foreign key relationship on it.
track = await Track.objects.get(title="The Bird")
# We have an album instance, but it only has the primary key populated
print(track.album) # Album(id=1) [sparse]
print(track.album.pk) # 1
print(track.album.name) # Raises AttributeError
# Load the relationship from the database
await track.album.load()
assert track.album.name == "Malibu"
# This time, fetch an instance, loading the foreign key relationship.
track = await Track.objects.select_related("album").get(title="The Bird")
assert track.album.name == "Malibu"
# Fetch instances, with a filter across an FK relationship.
tracks = Track.objects.filter(album__name="Fantasies")
assert len(tracks) == 2
# Fetch instances, with a filter and operator across an FK relationship.
tracks = Track.objects.filter(album__name__iexact="fantasies")
assert len(tracks) == 2
# Limit a query
tracks = await Track.objects.limit(1).all()
assert len(tracks) == 1
```
## Data types
The following keyword arguments are supported on all field types.
* `primary_key`
* `allow_null`
* `default`
* `index`
* `unique`
All fields are required unless one of the following is set:
* `allow_null` - Creates a nullable column. Sets the default to `None`.
* `allow_blank` - Allow empty strings to validate. Sets the default to `""`.
* `default` - Set a default value for the field.
The following column types are supported.
See TypeSystem for [type-specific validation keyword arguments][typesystem-fields].
* `orm.String(max_length)`
* `orm.Text()`
* `orm.Boolean()`
* `orm.Integer()`
* `orm.Float()`
* `orm.Date()`
* `orm.Time()`
* `orm.DateTime()`
* `orm.JSON()`
[sqlalchemy-core]: https://docs.sqlalchemy.org/en/latest/core/
[databases]: https://github.com/encode/databases
[typesystem]: https://github.com/encode/typesystem
[typesystem-fields]: https://www.encode.io/typesystem/fields/
|
/savannah-0.0.1.tar.gz/savannah-0.0.1/README.md
| 0.525856 | 0.849847 |
README.md
|
pypi
|
import typing
import sqlalchemy
import typesystem
from typesystem.schemas import SchemaMetaclass
from orm.exceptions import MultipleMatches, NoMatch
from orm.fields import ForeignKey
FILTER_OPERATORS = {
"exact": "__eq__",
"iexact": "ilike",
"contains": "like",
"icontains": "ilike",
"in": "in_",
"gt": "__gt__",
"gte": "__ge__",
"lt": "__lt__",
"lte": "__le__",
}
class ModelMetaclass(SchemaMetaclass):
def __new__(
cls: type, name: str, bases: typing.Sequence[type], attrs: dict
) -> type:
new_model = super(ModelMetaclass, cls).__new__( # type: ignore
cls, name, bases, attrs
)
if attrs.get("__abstract__"):
return new_model
tablename = attrs["__tablename__"]
metadata = attrs["__metadata__"]
pkname = None
columns = []
for name, field in new_model.fields.items():
if field.primary_key:
pkname = name
columns.append(field.get_column(name))
new_model.__table__ = sqlalchemy.Table(tablename, metadata, *columns)
new_model.__pkname__ = pkname
return new_model
class QuerySet:
ESCAPE_CHARACTERS = ['%', '_']
def __init__(self, model_cls=None, filter_clauses=None, select_related=None, limit_count=None):
self.model_cls = model_cls
self.filter_clauses = [] if filter_clauses is None else filter_clauses
self._select_related = [] if select_related is None else select_related
self.limit_count = limit_count
def __get__(self, instance, owner):
return self.__class__(model_cls=owner)
@property
def database(self):
return self.model_cls.__database__
@property
def table(self):
return self.model_cls.__table__
def build_select_expression(self):
tables = [self.table]
select_from = self.table
for item in self._select_related:
model_cls = self.model_cls
select_from = self.table
for part in item.split("__"):
model_cls = model_cls.fields[part].to
select_from = sqlalchemy.sql.join(select_from, model_cls.__table__)
tables.append(model_cls.__table__)
expr = sqlalchemy.sql.select(tables)
expr = expr.select_from(select_from)
if self.filter_clauses:
if len(self.filter_clauses) == 1:
clause = self.filter_clauses[0]
else:
clause = sqlalchemy.sql.and_(*self.filter_clauses)
expr = expr.where(clause)
if self.limit_count:
expr = expr.limit(self.limit_count)
return expr
def filter(self, **kwargs):
filter_clauses = self.filter_clauses
select_related = list(self._select_related)
for key, value in kwargs.items():
if "__" in key:
parts = key.split("__")
# Determine if we should treat the final part as a
# filter operator or as a related field.
if parts[-1] in FILTER_OPERATORS:
op = parts[-1]
field_name = parts[-2]
related_parts = parts[:-2]
else:
op = "exact"
field_name = parts[-1]
related_parts = parts[:-1]
model_cls = self.model_cls
if related_parts:
# Add any implied select_related
related_str = "__".join(related_parts)
if related_str not in select_related:
select_related.append(related_str)
# Walk the relationships to the actual model class
# against which the comparison is being made.
for part in related_parts:
model_cls = model_cls.fields[part].to
column = model_cls.__table__.columns[field_name]
else:
op = "exact"
column = self.table.columns[key]
# Map the operation code onto SQLAlchemy's ColumnElement
# https://docs.sqlalchemy.org/en/latest/core/sqlelement.html#sqlalchemy.sql.expression.ColumnElement
op_attr = FILTER_OPERATORS[op]
has_escaped_character = False
if op in ["contains", "icontains"]:
has_escaped_character = any(c for c in self.ESCAPE_CHARACTERS
if c in value)
if has_escaped_character:
# enable escape modifier
for char in self.ESCAPE_CHARACTERS:
value = value.replace(char, f'\\{char}')
value = f"%{value}%"
if isinstance(value, Model):
value = value.pk
clause = getattr(column, op_attr)(value)
clause.modifiers['escape'] = '\\' if has_escaped_character else None
filter_clauses.append(clause)
return self.__class__(
model_cls=self.model_cls,
filter_clauses=filter_clauses,
select_related=select_related,
limit_count=self.limit_count
)
def select_related(self, related):
if not isinstance(related, (list, tuple)):
related = [related]
related = list(self._select_related) + related
return self.__class__(
model_cls=self.model_cls,
filter_clauses=self.filter_clauses,
select_related=related,
limit_count=self.limit_count
)
async def exists(self) -> bool:
expr = self.build_select_expression()
expr = sqlalchemy.exists(expr).select()
return await self.database.fetch_val(expr)
def limit(self, limit_count: int):
return self.__class__(
model_cls=self.model_cls,
filter_clauses=self.filter_clauses,
select_related=self._select_related,
limit_count=limit_count
)
async def count(self) -> int:
expr = self.build_select_expression()
expr = sqlalchemy.func.count().select().select_from(expr)
return await self.database.fetch_val(expr)
async def all(self, **kwargs):
if kwargs:
return await self.filter(**kwargs).all()
expr = self.build_select_expression()
rows = await self.database.fetch_all(expr)
return [
self.model_cls.from_row(row, select_related=self._select_related)
for row in rows
]
async def get(self, **kwargs):
if kwargs:
return await self.filter(**kwargs).get()
expr = self.build_select_expression().limit(2)
rows = await self.database.fetch_all(expr)
if not rows:
raise NoMatch()
if len(rows) > 1:
raise MultipleMatches()
return self.model_cls.from_row(rows[0], select_related=self._select_related)
async def create(self, **kwargs):
# Validate the keyword arguments.
fields = self.model_cls.fields
required = [key for key, value in fields.items() if not value.has_default()]
validator = typesystem.Object(
properties=fields, required=required, additional_properties=False
)
kwargs = validator.validate(kwargs)
# Remove primary key when None to prevent not null constraint in postgresql.
pkname = self.model_cls.__pkname__
pk = self.model_cls.fields[pkname]
if kwargs[pkname] is None and pk.allow_null:
del kwargs[pkname]
# Build the insert expression.
expr = self.table.insert()
expr = expr.values(**kwargs)
# Execute the insert, and return a new model instance.
instance = self.model_cls(kwargs)
instance.pk = await self.database.execute(expr)
return instance
class Model(typesystem.Schema, metaclass=ModelMetaclass):
__abstract__ = True
objects = QuerySet()
def __init__(self, *args, **kwargs):
if "pk" in kwargs:
kwargs[self.__pkname__] = kwargs.pop("pk")
super().__init__(*args, **kwargs)
@property
def pk(self):
return getattr(self, self.__pkname__)
@pk.setter
def pk(self, value):
setattr(self, self.__pkname__, value)
async def update(self, **kwargs):
# Validate the keyword arguments.
fields = {key: field for key, field in self.fields.items() if key in kwargs}
validator = typesystem.Object(properties=fields)
kwargs = validator.validate(kwargs)
# Build the update expression.
pk_column = getattr(self.__table__.c, self.__pkname__)
expr = self.__table__.update()
expr = expr.values(**kwargs).where(pk_column == self.pk)
# Perform the update.
await self.__database__.execute(expr)
# Update the model instance.
for key, value in kwargs.items():
setattr(self, key, value)
async def delete(self):
# Build the delete expression.
pk_column = getattr(self.__table__.c, self.__pkname__)
expr = self.__table__.delete().where(pk_column == self.pk)
# Perform the delete.
await self.__database__.execute(expr)
async def load(self):
# Build the select expression.
pk_column = getattr(self.__table__.c, self.__pkname__)
expr = self.__table__.select().where(pk_column == self.pk)
# Perform the fetch.
row = await self.__database__.fetch_one(expr)
# Update the instance.
for key, value in dict(row).items():
setattr(self, key, value)
@classmethod
def from_row(cls, row, select_related=[]):
"""
Instantiate a model instance, given a database row.
"""
item = {}
# Instantiate any child instances first.
for related in select_related:
if "__" in related:
first_part, remainder = related.split("__", 1)
model_cls = cls.fields[first_part].to
item[first_part] = model_cls.from_row(row, select_related=[remainder])
else:
model_cls = cls.fields[related].to
item[related] = model_cls.from_row(row)
# Pull out the regular column values.
for column in cls.__table__.columns:
if column.name not in item:
item[column.name] = row[column]
return cls(item)
def __setattr__(self, key, value):
if key in self.fields:
# Setting a relationship to a raw pk value should set a
# fully-fledged relationship instance, with just the pk loaded.
value = self.fields[key].expand_relationship(value)
super().__setattr__(key, value)
|
/savannah-0.0.1.tar.gz/savannah-0.0.1/orm/models.py
| 0.588416 | 0.155271 |
models.py
|
pypi
|
import typing
import sqlalchemy
import typesystem
class ModelField:
def __init__(
self,
primary_key: bool = False,
index: bool = False,
unique: bool = False,
**kwargs: typing.Any,
) -> None:
if primary_key:
kwargs["allow_null"] = True
super().__init__(**kwargs) # type: ignore
self.primary_key = primary_key
self.index = index
self.unique = unique
def get_column(self, name: str) -> sqlalchemy.Column:
column_type = self.get_column_type()
allow_null = getattr(self, "allow_null", False)
constraints = self.get_constraints()
return sqlalchemy.Column(
name,
column_type,
*constraints,
primary_key=self.primary_key,
nullable=allow_null and not self.primary_key,
index=self.index,
unique=self.unique,
)
def get_column_type(self) -> sqlalchemy.types.TypeEngine:
raise NotImplementedError() # pragma: no cover
def get_constraints(self):
return []
def expand_relationship(self, value):
return value
class String(ModelField, typesystem.String):
def __init__(self, **kwargs):
assert "max_length" in kwargs, "max_length is required"
super().__init__(**kwargs)
def get_column_type(self):
return sqlalchemy.String(length=self.max_length)
class Text(ModelField, typesystem.Text):
def get_column_type(self):
return sqlalchemy.Text()
class Integer(ModelField, typesystem.Integer):
def get_column_type(self):
return sqlalchemy.Integer()
class Float(ModelField, typesystem.Float):
def get_column_type(self):
return sqlalchemy.Float()
class Boolean(ModelField, typesystem.Boolean):
def get_column_type(self):
return sqlalchemy.Boolean()
class DateTime(ModelField, typesystem.DateTime):
def get_column_type(self):
return sqlalchemy.DateTime()
class Date(ModelField, typesystem.Date):
def get_column_type(self):
return sqlalchemy.Date()
class Time(ModelField, typesystem.Time):
def get_column_type(self):
return sqlalchemy.Time()
class JSON(ModelField, typesystem.Any):
def get_column_type(self):
return sqlalchemy.JSON()
class ForeignKey(ModelField, typesystem.Field):
def __init__(self, to, allow_null: bool = False):
super().__init__(allow_null=allow_null)
self.to = to
def validate(self, value, strict=False):
return value.pk
def get_constraints(self):
fk_string = self.to.__tablename__ + "." + self.to.__pkname__
return [sqlalchemy.schema.ForeignKey(fk_string)]
def get_column_type(self):
to_column = self.to.fields[self.to.__pkname__]
return to_column.get_column_type()
def expand_relationship(self, value):
if isinstance(value, self.to):
return value
return self.to({self.to.__pkname__: value})
|
/savannah-0.0.1.tar.gz/savannah-0.0.1/orm/fields.py
| 0.661486 | 0.180431 |
fields.py
|
pypi
|
from openpyxl.styles.differential import DifferentialStyle
from openpyxl.styles import PatternFill, Font
class Dxf:
"""
Contains attributes with template styles for conditional format.
Examples
----------
Calling this class:
from openpyxl.format.rule import Rule, IconSetRule, ColorScaleRule, FormulaRule, CellIsRule, DataBarRule
dxf = Dxf()
Creating some rules using this template styles:
red_rule = Rule(type="cellIs", operator="lessThan", dxf=dxf.red, formula=['25'])
yellow_rule = Rule(type="cellIs", operator="between", dxf=dxf.yellow, formula=['25', '50'])
green_rule = Rule(type="cellIs", operator="greaterThan", dxf=dxf.green, formula=['50'])
blue_rule = Rule(type="containsText", operator="containsText", dxf=dxf.blue, formula=['NOT(ISERROR(SEARCH("TEXTO",A1)))'])
orange_rule = Rule(type="containsText", operator="containsText", dxf=dxf.orange, formula=['NOT(ISERROR(SEARCH("TEXTO",{})))'])
top_rule = Rule(type="top10", dxf=dxf.red, rank=5)
Another rules:
icon_rule = IconSetRule('3TrafficLights2', type='percent', values=[0, 50, 100], reverse=False, showValue=False)
color_scale_rule = ColorScaleRule(start_type='min', start_value=None, start_color='FF8181', end_type='max', end_value=None, end_color='7BFF71')
color_scale_rule = ColorScaleRule(start_type='num', start_value=0, start_color='FF8181', mid_type='num', mid_value=50, mid_color='FFFF57', end_type='num', end_value=100, end_color='7BFF71')
databar_rule = DataBarRule(start_type='min', start_value=None, end_type='max', end_value=None, color='FFC7CE', showValue=True, minLength=10, maxLength=None)
"""
def __init__(self):
self.red = DifferentialStyle(font=Font(color="9C0006"), fill=PatternFill(bgColor="FFC7CE"))
self.yellow = DifferentialStyle(font=Font(color="9C6500"), fill=PatternFill(bgColor="FFEB9C"))
self.green = DifferentialStyle(font=Font(color="006100"), fill=PatternFill(bgColor="C6EFCE"))
self.cyan = DifferentialStyle(font=Font(color="FFFFFF"), fill=PatternFill(bgColor="008B8B"))
self.orange = DifferentialStyle(font=Font(color="000000"), fill=PatternFill(bgColor="F58D16"))
|
/save_excel-0.1.2-py3-none-any.whl/save_excel/dxf/dxf.py
| 0.909901 | 0.302108 |
dxf.py
|
pypi
|
from ..utils.vectorization import len_vectorized
from numpy import array
def check_empty_frame(df):
"""Checks if a dataframe is completely empty (no columns, no rows)"""
if df.shape==(0,0):
return True
else:
return False
def set_value(datatypes):
def decorator(func):
def wrapper(*args, **kwargs):
for n, (arg, datatype) in enumerate(zip(args[1:], datatypes)):
if datatype is None:
if arg is not None:
raise ValueError(f'{func.__name__}: attribute can only be setted to {datatype}')
else:
if not isinstance(arg, datatype):
raise ValueError(f'{func.__name__}: attribute can only be setted to {datatype}')
return func(*args, **kwargs)
return wrapper
return decorator
def max_column_lengths(df, include_header=True, include_index=False):
"""
Returns an array containing the maximun lenght of each column.
Args:
include_header (bool): If include_header is True, the length of column names will be considered.
In case of having multiindex in the columns only the last row will be considered.
include_index (bool): If include_index is True, index will be considered.
"""
if include_index==True:
# Add index to the frame
df_copy = df.reset_index().copy()
else:
# Ignore index
df_copy = df.copy()
# Calculate max length of values for each column
# If the dataframe has no values, will create a list of 0 to avoid an error
if len(df_copy)>0:
max_values_lengths = len_vectorized(df_copy.values.astype(str)).max(axis=0)
else:
max_values_lengths = [0 for x in df_copy.columns]
if include_header==True:
if len(df_copy.columns.names)>1:
# Calculate lengths of column names (having multiindex)
max_header_lengths = len_vectorized(array(list(zip(*df_copy.columns))[-1]).astype(str))
else:
# Calculate lengths of column names
max_header_lengths = len_vectorized(df_copy.columns.astype(str))
# Returns max lengths of the comparison between values and column names
return array([max_values_lengths, max_header_lengths]).max(axis=0)
else:
return max_values_lengths
def validate_path(path):
"""Validates and returns path"""
if path is None:
return ''
elif path.endswith('/'):
return path
else:
return path + '/'
def get_column_names(df):
column_names = ['' if col is None else col for col in df.columns.names]
if len(column_names)>1:
column_names = [tuple(column_names)]
return column_names
|
/save_excel-0.1.2-py3-none-any.whl/save_excel/utils/utils.py
| 0.654011 | 0.520253 |
utils.py
|
pypi
|
from ..utils import EXCEL_ALPHABET, cell_from_coordinates
from itertools import product
class Shape:
def __init__(self, width, length):
if length==0 or width==0:
self.__shape = None
else:
self.__shape = (length, width)
@property
def shape(self):
return self.__shape
def __repr__(self):
return f'Shape({self.__shape})'
class Coordinates:
def __init__(self, startcol, startrow):
if startcol<0 or startrow<0:
raise ValueError('startcol and startrow must be greater or equal to 0')
else:
self.__coord = (startcol, startrow)
@property
def coord(self):
return self.__coord
@coord.setter
def coord(self, new_coord):
self.__coord = new_coord
def increase(self, values):
self.__coord = tuple((x+y) for x, y in zip(self.__coord, values))
def __eq__(self, other):
return self.__coord==other.__coord
def __repr__(self):
return f'Coordinates({self.__coord})'
class CellRange(Shape, Coordinates):
def __init__(self, startcol=0, startrow=0, width=1, length=1):
Shape.__init__(self, width=width, length=length)
Coordinates.__init__(self, startcol=startcol, startrow=startrow)
if self.shape is None:
self.coord = None
@property
def origin(self):
if self.shape is None:
return None
else:
col, row = self.coord
return cell_from_coordinates(col=col, row=row)
@property
def fin(self):
if self.shape is None:
return None
else:
col, row = self.coord
length, width = self.shape
return cell_from_coordinates(col=col+width-1, row=row+length-1)
@property
def range(self):
if self.shape is None:
return None
else:
return f'{self.origin}:{self.fin}'
@property
def rows(self):
"""
Returns rows as coordinates
"""
if self.shape is None:
return []
else:
for row in range(self.coord[1], self.coord[1] + self.shape[0]):
yield row
@property
def cols(self):
"""
Returns columns as coordinates
"""
if self.shape is None:
return []
else:
for col in range(self.coord[0], self.coord[0] + self.shape[1]):
yield col
@property
def cells(self):
"""
Returns individual cells from the range
"""
col_list = [EXCEL_ALPHABET[r] for r in self.cols]
row_list = [str(r+1) for r in self.rows]
for cell in product(col_list, row_list):
yield ''.join(cell)
def __repr__(self):
if self.shape is None:
return f'CellRange(None)'
else:
return f'CellRange({self.range})'
|
/save_excel-0.1.2-py3-none-any.whl/save_excel/layout/cell_range.py
| 0.71889 | 0.220657 |
cell_range.py
|
pypi
|
from datetime import datetime
from pathlib import Path
from typing import Any, Dict, List, Tuple
import yaml
from appdirs import user_data_dir
from save_scummer.utils import format_timestamp, get_dir_files_by_date, get_dir_size, normalize_path
DATA_DIR = Path(user_data_dir()).joinpath('save-scummer')
CONFIG_PATH = DATA_DIR.joinpath('config.yml')
DEFAULT_BACKUP_DIR = DATA_DIR.joinpath('backups')
DEFAULT_CONFIG: Dict = {'games': {}}
def read_config() -> Dict[str, Any]:
"""Read config from the config file"""
if not CONFIG_PATH.is_file():
DATA_DIR.mkdir(parents=True, exist_ok=True)
return DEFAULT_CONFIG
with CONFIG_PATH.open() as f:
return yaml.safe_load(f)
CONFIG = read_config()
GAMES = list(CONFIG['games'].keys())
def add_game(title: str, source: str, clean_restore: bool = False):
CONFIG['games'].setdefault(title, {})
CONFIG['games'][title]['source'] = source
CONFIG['games'][title]['clean_restore'] = clean_restore
write_config(CONFIG)
def get_game_dirs(title: str) -> Tuple[Path, Path]:
"""Get the source and backup directories for the given game"""
source_dir = CONFIG['games'].get(title, {}).get('source')
if not source_dir:
raise ValueError(f'Game {title} not configured')
# Get custom backup directory, if different from default
backup_base_dir = CONFIG.get('backup_dir') or DEFAULT_BACKUP_DIR
backup_dir = Path(backup_base_dir).joinpath(title)
backup_dir.mkdir(parents=True, exist_ok=True)
return normalize_path(source_dir), normalize_path(backup_dir)
def list_games() -> List[Dict[str, str]]:
"""Get formatted info on configured games and their backups
Returns:
A list of dicts containing formatted metadata
"""
return [list_game(game) for game in CONFIG['games']]
def list_game(title: str, extra_details: bool = False) -> Dict[str, str]:
"""Get formatted info on a single game and its backups"""
metadata = CONFIG['games'][title]
source_pattern, backup_dir = get_game_dirs(title)
backup_files = get_dir_files_by_date(backup_dir)
# Format backup size and date/time info
game_info = {
'Title': title,
'Total backups': f'{len(backup_files)} ({get_dir_size(backup_dir)})',
'Last saved': format_timestamp(metadata.get('last_save_time')),
}
if extra_details:
game_info['Last backed up'] = format_timestamp(metadata.get('last_backup_time'))
game_info['Source directory'] = str(source_pattern)
game_info['Backup directory'] = str(backup_dir)
formatted_files = [f'{i}:\t {f.name}' for i, f in enumerate(backup_files.keys())]
game_info['Backup files'] = '\n' + '\n'.join(formatted_files)
return game_info
def update_metadata(title: str, last_save_time: datetime):
"""Store metadata for a given game on the date/time of the last save (source) and backup"""
CONFIG['games'][title]['last_save_time'] = last_save_time.isoformat()
CONFIG['games'][title]['last_backup_time'] = datetime.now().isoformat()
write_config(CONFIG)
def write_config(new_config: Dict[str, Any]):
"""Write updated config to the config file"""
with CONFIG_PATH.open('w') as f:
yaml.safe_dump(new_config, f)
|
/save_scummer-0.0.1-py3-none-any.whl/save_scummer/config.py
| 0.664867 | 0.162546 |
config.py
|
pypi
|
from datetime import datetime, timedelta
from dateutil.parser import parse as parse_date
from os.path import getmtime
from pathlib import Path
from typing import Dict, Iterable, Union
from pytimeparse import parse as parse_time
StrOrPath = Union[Path, str]
DATETIME_FORMAT = '%Y-%m-%d %H:%M'
def format_file_size(n_bytes: int) -> str:
"""Given a number of bytes, return in human-readable format"""
filesize = float(n_bytes)
for unit in ['bytes', 'KB', 'MB', 'GB', 'TB']:
if filesize >= 1024 and unit != 'TB':
filesize /= 1024
else:
return f'{filesize:.2f} {unit}'
return f'{filesize:.2f} {unit}'
def format_timestamp(dt: Union[str, datetime]) -> str:
"""Reformat a datetime string into a common format, along with time elapsed since that time.
Time elapsed is in human-readable form, e.g. "5 minutes ago" or "2 days ago."
Adapted from: https://stackoverflow.com/a/1551394
"""
if not dt:
return 'never'
if not isinstance(dt, datetime):
dt = parse_date(dt)
diff = datetime.now() - dt
if diff.days == 0:
if diff.seconds < 60:
time_elapsed = f'{diff.seconds} seconds ago'
elif diff.seconds < 3600:
time_elapsed = f'{int(diff.seconds / 60)} minutes ago'
else:
time_elapsed = f'{int(diff.seconds / 3600)} hours ago'
elif diff.days == 1:
time_elapsed = 'yesterday'
else:
time_elapsed = f'{diff.days} days ago'
return f'{dt.strftime(DATETIME_FORMAT)} ({time_elapsed})'
def get_datetime_by_age(age: str) -> datetime:
age_delta = timedelta(seconds=parse_time(age))
return datetime.now() - age_delta
def get_dir_files_by_date(path: Path) -> Dict[Path, datetime]:
"""Get all files in the specified directory, sorted by creation date (desc),
along with the parsed datetime.
"""
try:
files = list(path.iterdir())
except IOError:
return {}
files = sorted(files, key=getmtime, reverse=True)
return {path: datetime.fromtimestamp(path.stat().st_mtime) for path in files}
def get_dir_size(path: Path) -> str:
"""Get (non-recursive) sum of file sizes in the given directory, in human-readable format"""
try:
file_sizes = [f.stat().st_size for f in path.iterdir()]
except IOError:
return '0 bytes'
return format_file_size(sum(file_sizes))
def get_latest_modified(paths: Iterable[Path]) -> datetime:
"""Get the most recent 'modified on' timestamp (ISO format) from the paths given.
For a save directory with multiple files, this is the best indicator of when the save was
created, as not all files may be modified with each save.
"""
datetimes = [datetime.fromtimestamp(path.stat().st_mtime) for path in paths]
return max(datetimes).replace(microsecond=0)
def normalize_path(path: StrOrPath) -> Path:
return Path(path).expanduser().resolve()
|
/save_scummer-0.0.1-py3-none-any.whl/save_scummer/utils.py
| 0.804981 | 0.453746 |
utils.py
|
pypi
|
from datetime import datetime
from dateutil.parser import parse as parse_date
from glob import glob
from logging import getLogger
from pathlib import Path
from shutil import rmtree
from typing import Dict, List, Tuple, Union
from zipfile import ZIP_DEFLATED, ZipFile
from save_scummer.config import CONFIG, get_game_dirs, update_metadata
from save_scummer.utils import (
StrOrPath,
format_file_size,
format_timestamp,
get_datetime_by_age,
get_dir_files_by_date,
get_latest_modified,
normalize_path,
)
logger = getLogger(__name__)
def get_included_files(source_pattern: StrOrPath) -> List[Tuple[Path, Path]]:
"""Get a list of files to backup, resolving user paths and glob patterns.
Returns:
List of ``(absolute_path, relative_path)``
"""
# Default to recursive include w/ subdirs if a glob pattern is not specified
source_pattern = str(normalize_path(source_pattern))
if not source_pattern.endswith('*'):
source_pattern += '/**'
base_dir = source_pattern.rsplit('/', 1)[0]
abs_paths = [normalize_path(path) for path in glob(source_pattern, recursive=True)]
return [(path, path.relative_to(base_dir)) for path in abs_paths if str(path) != base_dir]
def make_backup(title: str, short_desc: str = None) -> str:
"""Make a backup for the specified game. Backup will be named using the time the last save
was created, optionally with a short description.
Returns:
Status message
"""
logger.info(f'Starting backup for {title}')
source_pattern, backup_dir = get_game_dirs(title)
paths = get_included_files(source_pattern)
if not paths:
raise ValueError('No files are in the specified path')
# Determine backup path & filename
last_save_time = get_latest_modified([path[0] for path in paths])
short_desc = '-' + short_desc.lower().replace(' ', '_') if short_desc else ''
archive_path = backup_dir.joinpath(f'{title}-{last_save_time.isoformat()}{short_desc}.zip')
# Write paths inside archive relative to base (source) path
with ZipFile(archive_path, 'w', compression=ZIP_DEFLATED) as f:
for abs_path, rel_path in paths:
logger.debug(f'Writing {abs_path} -> {rel_path}')
f.write(abs_path, rel_path)
update_metadata(title, last_save_time)
archive_size = format_file_size(archive_path.stat().st_size)
msg = (
f'Backing up {len(paths)} files saved {format_timestamp(last_save_time)}.\n'
f'Backup created: {archive_path} ({archive_size}).'
)
logger.info(msg)
return msg
def restore_backup(
title: str, filename: str = None, index: int = None, age: str = None, date: str = None
) -> str:
"""Restore a backup matching the given specification(s).
Makes a backup of current state before overwriting.
Args:
title: Title of game or application
filename: Absolute or relative path to backup archive
index: Index of backup to restore
age: Min age of backup to restore (as a time expression string)
date: Max date of backup to restore (as a timestamp string)
Returns:
Status message
"""
logger.info(f'Starting restore for {title}')
source_dir, backup_dir = get_game_dirs(title)
backups = get_dir_files_by_date(backup_dir)
backup_paths = list(backups.keys())
n_backups = len(backup_paths)
# Choose backup to restore based on specifier(s)
if filename:
archive = Path(filename)
elif index:
if abs(index) > n_backups:
raise ValueError(f'Index {index} does not exist; {n_backups} backups are available')
archive = backup_paths[index]
elif age:
archive = get_backup_by_age(backups, age)
elif date:
archive = get_backup_by_date(backups, date)
# If no backup specifiers were given, restore the most recent backup
else:
archive = backup_paths[0]
if not archive.is_absolute():
archive = backup_dir.joinpath(archive)
logger.info(f'Backup file selected: {archive}')
# First backup current files before overwriting, and delete them if clean_restore is specified
make_backup(title, short_desc='pre-restore')
if CONFIG['games'][title].get('clean_restore') is True:
rmtree(source_dir)
# Restore the selected backup
source_dir.mkdir(parents=True, exist_ok=True)
with ZipFile(archive) as f:
f.extractall(source_dir)
return f'Restored backup {archive} to {source_dir}'
def get_backup_by_age(backups: Dict[Path, datetime], age: str) -> Path:
return get_backup_by_date(backups, get_datetime_by_age(age))
def get_backup_by_date(backups: Dict[Path, datetime], target_date: Union[datetime, str]) -> Path:
if not isinstance(target_date, datetime):
target_date = parse_date(target_date)
# Backups are already sorted by date descending; get the first one on or before the target date
for backup_path, creation_date in backups.items():
if creation_date <= target_date:
return backup_path
raise NotImplementedError
|
/save_scummer-0.0.1-py3-none-any.whl/save_scummer/backup.py
| 0.838581 | 0.196865 |
backup.py
|
pypi
|
from __future__ import division, print_function
import os
import json
import logging
from tempfile import NamedTemporaryFile
from subprocess import check_output, CalledProcessError, PIPE
from matplotlib import rcParams
from matplotlib.figure import Figure
from matplotlib.backends.backend_pdf import PdfPages
# Optional write dependencies:
try:
from PIL import Image, PngImagePlugin
except ImportError:
Image = None
try:
from PyPDF2 import PdfFileReader
except ImportError:
PdfFileReader = None
# Python 3
try:
basestring
except NameError:
basestring = (str, bytes)
__all__ = ["savefig"]
# Save a reference to the matplotlib savefig implementation.
mpl_savefig = Figure.savefig
def get_git_info(include_diff=False):
# Check the status to see if there are any uncommitted changes.
if include_diff:
try:
diff = check_output("git diff", shell=True, stderr=PIPE).decode()
except CalledProcessError:
return None
# Get the commit information.
cmd = "git log -1 --date=iso8601 --format=\"format:%H || %ad || %an\""
try:
result = check_output(cmd, shell=True, stderr=PIPE).decode()
except CalledProcessError:
return None
# Build the results dictionary and include changes if there are any.
ret = dict(zip(["git-hash", "git-date", "git-author"],
result.split(" || ")))
if include_diff and len(diff):
ret["git-diff"] = diff
return ret
def savefig_png(self, fn, *args, **kwargs):
include_diff = kwargs.pop("include_diff", False)
# This is a hack to deal with filenames without extensions. Not sure why
# this is necessary.
fn = os.path.splitext(fn)[0] + ".png"
# We'll start by saving the figure because the metadata is going to be
# inserted after the fact.
ret = mpl_savefig(self, fn, *args, **kwargs)
# If PIL isn't installed, we'll just call the standard savefig.
if Image is None:
logging.warn(
"PIL or pillow must be installed to add metadata to PNG files.")
return ret
# Get the git commit information.
git_info = get_git_info(include_diff=include_diff)
if git_info is None:
return ret
# Inject the git info into the figure as metadata.
img = Image.open(fn)
meta = PngImagePlugin.PngInfo()
for k, v in git_info.items():
meta.add_text(k, v)
img.save(fn, "png", pnginfo=meta)
return ret
def savefig_pdf(self, fn, *args, **kwargs):
include_diff = kwargs.pop("include_diff", False)
# Get the git commit information.
git_info = get_git_info(include_diff=include_diff)
if git_info is None:
return mpl_savefig(self, fn, *args, **kwargs)
# Build the PDF object that will take the metadata.
fn = os.path.splitext(fn)[0] + ".pdf"
kwargs["format"] = "pdf"
fig = PdfPages(fn)
# Save the figure.
ret = mpl_savefig(self, fig, *args, **kwargs)
# Add the metadata.
metadata = fig.infodict()
metadata["Keywords"] = json.dumps(git_info, sort_keys=True)
# Commit the changes.
fig.close()
return ret
def savefig(self, fn, *args, **kwargs):
if not isinstance(fn, basestring):
logging.warn("The savefig module only supports filenames.")
return mpl_savefig(self, fn, *args, **kwargs)
# Figure out the format.
ext = os.path.splitext(fn)[1]
fmt = kwargs.get("format", None)
fmt = (fmt if fmt is not None
else ext[1:] if len(ext)
else rcParams["savefig.format"]).lower()
# Deal with the different formats.
if fmt == "png":
return savefig_png(self, fn, *args, **kwargs)
if fmt == "pdf":
return savefig_pdf(self, fn, *args, **kwargs)
# Fall back on the standard savefig if we don't know how to deal with the
# format.
logging.warn("Unsupported savefig format: '{0}'".format(fmt))
return mpl_savefig(self, fn, *args, **kwargs)
def monkey_patch(include_diff=False):
# Monkey patch matplotlib to call our savefig instead of the standard
# version.
def sf(*args, **kwargs):
kwargs["include_diff"] = kwargs.get("include_diff", include_diff)
return savefig(*args, **kwargs)
sf.__doc__ = mpl_savefig.__doc__
Figure.savefig = sf
def get_file_info(fn):
"""
Get the metadata stored in an image file returning ``None`` on failure.
"""
ext = os.path.splitext(fn)[1].lower()
if ext == ".png":
if Image is None:
raise ImportError("PIL or pillow must be installed to read "
"metadata from PNG files.")
img = Image.open(fn)
return img.info
if ext == ".pdf":
if PdfFileReader is None:
raise ImportError("PyPDF2 must be installed to read "
"metadata from PDF files.")
with open(fn, "rb") as f:
pdf = PdfFileReader(f)
di = pdf.getDocumentInfo()
if "/Keywords" not in di:
return None
try:
return json.loads(di["/Keywords"])
except ValueError:
return None
return None
def test_png():
monkey_patch()
import matplotlib.pyplot as pl
# Get the current git info.
git_info = get_git_info()
# Save an empty figure to a temporary file and check that the git info
# gets stored correctly.
with NamedTemporaryFile(suffix=".png") as f:
fn = f.name
pl.savefig(fn)
info = get_file_info(fn)
assert all([v == info[k] for k, v in git_info.items()])
# Now try without a file extension.
with NamedTemporaryFile(suffix=".png") as f:
fn = f.name
pl.savefig(os.path.splitext(fn)[0], format="png")
info = get_file_info(fn)
assert all([v == info[k] for k, v in git_info.items()])
# If the default file-type is PNG, test that too.
if not rcParams["savefig.format"].lower() == "png":
return
with NamedTemporaryFile(suffix=".png") as f:
fn = f.name
pl.savefig(os.path.splitext(fn)[0])
info = get_file_info(fn)
assert all([v == info[k] for k, v in git_info.items()])
def test_pdf():
monkey_patch()
import matplotlib.pyplot as pl
# Get the current git info.
git_info = get_git_info()
# Save an empty figure to a temporary file and check that the git info
# gets stored correctly.
try:
with NamedTemporaryFile(suffix=".pdf", delete=False) as f:
fn = f.name
pl.savefig(fn)
info = get_file_info(fn)
assert all([v == info[k] for k, v in git_info.items()])
finally:
os.unlink(fn)
# Now try without a file extension.
try:
with NamedTemporaryFile(suffix=".pdf", delete=False) as f:
fn = f.name
pl.savefig(os.path.splitext(fn)[0], format="pdf")
info = get_file_info(fn)
assert all([v == info[k] for k, v in git_info.items()])
finally:
os.unlink(fn)
# If the default file-type is PNG, test that too.
if not rcParams["savefig.format"].lower() == "pdf":
return
try:
with NamedTemporaryFile(suffix=".pdf", delete=False) as f:
fn = f.name
pl.savefig(os.path.splitext(fn)[0])
info = get_file_info(fn)
assert all([v == info[k] for k, v in git_info.items()])
finally:
os.unlink(fn)
if __name__ == "__main__":
import sys
import argparse
# Testing.
if "--test" in sys.argv:
print("Testing PNG support...")
test_png()
print("Testing PDF support...")
test_pdf()
sys.exit(0)
# Parse the command line arguments
parser = argparse.ArgumentParser()
parser.add_argument("filename", help="The file to inspect")
parser.add_argument("-d", "--diff", action="store_true",
help="Get the diff.")
args = parser.parse_args()
# Get the file info.
info = get_file_info(args.filename)
if info is None:
print("Couldn't get info from file: {0}".format(args.filename))
sys.exit(0)
# Show the diff if that was requested.
if args.diff:
if "git-diff" in info:
print(info["git-diff"])
sys.exit(0)
print("No diff found.")
# Print the summary.
keys = ["git-hash", "git-date", "git-author"]
for k in keys:
v = info.get(k, None)
if v is None:
print("Missing key: '{0}'".format(k))
else:
print("{0}: {1}".format(k, v))
|
/savefig-0.0.4.tar.gz/savefig-0.0.4/savefig.py
| 0.614394 | 0.221751 |
savefig.py
|
pypi
|
import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces = 50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval*i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2,sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev)
|
/savimonty_distributions-0.1.tar.gz/savimonty_distributions-0.1/savimonty_distributions/Gaussiandistribution.py
| 0.688364 | 0.853058 |
Gaussiandistribution.py
|
pypi
|
import contextlib
class APIContext(object):
"""
Context object passed as the second argument (after self) to Resources and Fields.
The context object provides a hook into the underlying means to translates
resources to / from URIs.
The context has a streaming_response attribute which defaults to False. If
this is set to True the get method of the resource should not return a
dict, but an iterable of strings. It is the job of the resource to make
sure the content_type of the request, ctx.formatter.content_type is
respected. This can be used as a performance improvement when returning
large result sets where fragments of them can be pre-computed/cached and
stitched in to a final result.
"""
def __init__(self, base_uri, root_resource, formatter, request=None):
self.base_uri = base_uri
self.root_resource = root_resource
self.formatter = formatter
self.request = request
self.expiration = None
self._headers_dict = {}
self.object_stack = []
self.streaming_response = False
def resolve_resource_uri(self, uri):
"""
Resolves the resource that corresponds to the current URI,
but only within the same resource tree.
"""
if not uri.startswith(self.base_uri):
return None
return self.resolve_resource_path(uri[len(self.base_uri):])
def resolve_resource_path(self, resource_path):
"""
Resolves a resource using a resource path (not a full URI),
but only within the same resource tree.
"""
resource = self.root_resource
cur_resource_path = ''
for path_fragment in _split_resource_path(resource_path):
cur_resource_path = cur_resource_path + '/' + path_fragment
resource = resource.get_child_resource(self, path_fragment)
if not resource:
return None
if resource.resource_path is None:
resource.resource_path = cur_resource_path
return resource
def build_resource_uri(self, resource):
"""
Given a Resource with a resource_path, provides the correspond URI.
Raises a ValueError if the resource_path of the Resource is None.
"""
resource_path = resource.resource_path
if resource_path is None:
raise ValueError(u'unaddressable resource')
return self.base_uri + resource_path
def set_header(self, header, value):
"""
Updates self.header_dict property for processing in the view where the Response headers should be set from
header_dict
"""
self._headers_dict[header] = value
return self._headers_dict
@property
def headers(self):
if self.expiration:
self.set_header('Expires', self.expiration.isoformat('T'))
return self._headers_dict
def set_expires_header(self, new_expiration):
"""
Keeps a min expiration in memory and sets it on header request
"""
self.expiration = new_expiration if not self.expiration else min(self.expiration, new_expiration)
@contextlib.contextmanager
def target(self, target):
self.push(target)
yield
self.pop()
def push(self, target):
self.object_stack.append(target)
def pop(self):
return self.object_stack.pop()
def peek(self, n=1):
return self.object_stack[-n]
def _split_resource_path(resource_path):
path_fragments = resource_path.split('/')
if path_fragments[-1] == '':
return path_fragments[:-1]
else:
return path_fragments
|
/savory-pie-0.4.11.tar.gz/savory-pie-0.4.11/savory_pie/context.py
| 0.810366 | 0.348091 |
context.py
|
pypi
|
from savory_pie.errors import AuthorizationError
def authorization_adapter(field, ctx, source_dict, target_obj):
"""
Default adapter works on single field (non iterable)
"""
name = field._compute_property(ctx)
source = field.to_python_value(ctx, source_dict[name])
target = field.to_python_value(ctx, field._get(target_obj))
return name, source, target
def datetime_auth_adapter(field, ctx, source_dict, target_obj):
"""
Adapter for fields of date/datetime/time
"""
name = field._compute_property(ctx)
source = field.to_python_value(ctx, source_dict[name])
target = field._get(target_obj)
return name, source, target
def subobject_auth_adapter(field, ctx, source_dict, target_obj):
"""
Adapter for fields of savory_pie.fields.SubObjectResourceField, or subclasses thereof
"""
name = field._compute_property(ctx)
if source_dict[name] is not None:
source = source_dict[name]['resourceUri']
else:
source = None
# this is essentially the same logic as in field.get_subresource(), but
# ignores source_dict as we're only interested in target's resourceUri
target_subobject = getattr(target_obj, field.name)
if target_subobject is not None:
target = ctx.build_resource_uri(field._resource_class(target_subobject))
else:
target = None
return name, source, target
def uri_auth_adapter(field, ctx, source_dict, target_obj):
"""
Authorization adapter for use in fields representing a 1 to many relationship. Is used when you want to prevent
unauthorized users from changing the associations of different models.
"""
name = field._compute_property(ctx)
source_field = source_dict[name]
target_field = getattr(target_obj, field.name)
from savory_pie.fields import URIResourceField, URIListResourceField, IterableField
if source_field and target_field:
if isinstance(field, IterableField):
source = [source_field_item.get('resourceUri', None) for source_field_item in source_field]
target = [ctx.build_resource_uri(field._resource_class(target_item))
for target_item in field.get_iterable(target_field)]
source.sort()
target.sort()
elif isinstance(field, URIResourceField):
source = source_field
target = ctx.build_resource_uri(field._resource_class(target_field))
elif isinstance(field, URIListResourceField):
source = source_field
target = [ctx.build_resource_uri(field._resource_class(target_item))
for target_item in field.get_iterable(target_field)]
source.sort()
target.sort()
else:
raise TypeError('uri_auth_adapter can only be used with fields of type URIResourceField,' +
' URIListResourceField or IterableField')
else:
source = None
target = None
return name, source, target
class authorization(object):
"""
Authorization decorator, takes a permission dictionary key and an adapter function
@auth_adapter: an adapter function that takes ctx, source_dict, target_obj and
returns ctx, target_obj, source, target parameters
Use:
@authorization(adapter)
"""
def __init__(self, auth_adapter):
self.auth_adapter = auth_adapter
def __call__(self, fn):
"""
If the user does not have an the authorization raise an AuthorizationError
"""
def inner(field, ctx, source_dict, target_obj):
permission = field.permission
if permission:
auth_adapter = getattr(permission, 'auth_adapter', None) or self.auth_adapter
name, source, target = auth_adapter(field, ctx, source_dict, target_obj)
if not permission.is_write_authorized(ctx, target_obj, source, target):
raise AuthorizationError(name)
return fn(field, ctx, source_dict, target_obj)
return inner
|
/savory-pie-0.4.11.tar.gz/savory-pie-0.4.11/savory_pie/auth.py
| 0.681833 | 0.156137 |
auth.py
|
pypi
|
from datetime import datetime
def to_datetime(milliseconds):
"""
Converts milliseconds in UTC (e.g., from JS `new Date().getTime()` into Python datetime
"""
try:
value = datetime.utcfromtimestamp(int(milliseconds) / 1000)
if isinstance(value, datetime):
return value
except Exception:
pass
return milliseconds
def to_list(items):
"""
Converts comma-delimited string into list of items
"""
try:
values = items.split(',')
if isinstance(values, list):
return values
except Exception:
pass
return items
def get_exception_message(err):
"""
Python 2 has the 'message' attribute for exceptions, however python 3 does not. This helper method
gives us the ability to sort out which one to use.
:param err: Some exception
:return: The exception message string
"""
try:
# python 2
return err.message
except AttributeError:
# python 3
return str(err)
class ParamsDict(object):
"""
Simple class that wraps a dictionary and returns a list.
This is used because filters support getting a list of values given a parameter,
so when using a filter within a queryset, the filter (in the format of a dict)
needs to support list related functions, this class acts a wrapper around the filter.
Parameters:
``params``
This is a dictionary of parameters
"""
def __init__(self, params):
self._params = params
def keys(self):
return self._params.keys()
def __contains__(self, key):
return key in self._params
def __getitem__(self, key):
if key in self._params:
return self._params.get(key)
else:
raise KeyError
def get(self, key, default=None):
return self._params.get(key, default)
def get_as(self, key, type, default=None):
value = self._params.get(key, None)
return default if value is None else type(value)
def get_list(self, key):
return [self._params[key]]
def get_list_of(self, key, type):
list = self._params.get(key, None)
if list is None:
return []
else:
return [type(x) for x in list]
|
/savory-pie-0.4.11.tar.gz/savory-pie-0.4.11/savory_pie/utils.py
| 0.91151 | 0.379034 |
utils.py
|
pypi
|
try:
import ujson as json
except ImportError:
from warnings import warn
warn('Using plain JSON instead of uJSON, performance may be degraded.')
import json
import logging
from haystack import indexes
from haystack import fields as haystack_fields
from savory_pie.context import APIContext
from savory_pie.django.utils import Related
logger = logging.getLogger(__name__)
class ResourceIndex(indexes.SearchIndex):
def prefetch_related(self, related):
pass
def get_model(self):
return self.resource_class.model_class
def _prefetch_related(self, qs):
related = Related()
ctx = APIContext('', None, None)
self.resource_class.prepare(ctx, related)
self.prefetch_related(related)
return related.prepare(qs)
def index_queryset(self, using=None):
qs = self.get_model().objects.all()
return self._prefetch_related(qs)
def build_queryset(self, start_date=None, end_date=None, using=None):
return self.index_queryset(using=using)
class HaystackField(haystack_fields.CharField):
"""
This field can be used to store the JSON from an API call into a Haystack search database.
It typically wouldn't be indexed (but could be if you wanted). Typical usage:
from haystack import indexes, fields
from savory_pie.django.fields import HaystackField
class FooIndex(indexes.SearchIndex, indexes.Indexable):
foo = fields.CharField(...)
bar = fields.CharField(...)
api = HaystackField(base_uri='/my/api/path/',
formatter=JSONFormatter(),
resource=FooResource)
"""
def __init__(self, *args, **kwargs):
self._formatter = kwargs.pop('formatter', None)
self._ctx = APIContext('SAVORY_PIE_HOSTNAME', None, self._formatter)
self._resource = kwargs.pop('resource', None)
self.indexed = kwargs.pop('indexed', False)
self.stored = kwargs.pop('indexed', True)
super(HaystackField, self).__init__(*args, **kwargs)
def prepare(self, obj):
try:
# python 2
from cStringIO import StringIO
except ImportError:
# python 3
from io import StringIO
api_data = self._resource(obj).get(self._ctx, {})
if self._formatter is None:
return json.dumps(api_data)
else:
output = StringIO()
self._formatter.write_to(api_data, output)
return output.getvalue()
|
/savory-pie-0.4.11.tar.gz/savory-pie-0.4.11/savory_pie/django/haystack_field.py
| 0.55254 | 0.201577 |
haystack_field.py
|
pypi
|
import six
class OpenApiException(Exception):
"""The base exception class for all OpenAPIExceptions"""
class ApiTypeError(OpenApiException, TypeError):
def __init__(self, msg, path_to_item=None, valid_classes=None,
key_type=None):
""" Raises an exception for TypeErrors
Args:
msg (str): the exception message
Keyword Args:
path_to_item (list): a list of keys an indices to get to the
current_item
None if unset
valid_classes (tuple): the primitive classes that current item
should be an instance of
None if unset
key_type (bool): False if our value is a value in a dict
True if it is a key in a dict
False if our item is an item in a list
None if unset
"""
self.path_to_item = path_to_item
self.valid_classes = valid_classes
self.key_type = key_type
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiTypeError, self).__init__(full_msg)
class ApiValueError(OpenApiException, ValueError):
def __init__(self, msg, path_to_item=None):
"""
Args:
msg (str): the exception message
Keyword Args:
path_to_item (list) the path to the exception in the
received_data dict. None if unset
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiValueError, self).__init__(full_msg)
class ApiAttributeError(OpenApiException, AttributeError):
def __init__(self, msg, path_to_item=None):
"""
Raised when an attribute reference or assignment fails.
Args:
msg (str): the exception message
Keyword Args:
path_to_item (None/list) the path to the exception in the
received_data dict
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiAttributeError, self).__init__(full_msg)
class ApiKeyError(OpenApiException, KeyError):
def __init__(self, msg, path_to_item=None):
"""
Args:
msg (str): the exception message
Keyword Args:
path_to_item (None/list) the path to the exception in the
received_data dict
"""
self.path_to_item = path_to_item
full_msg = msg
if path_to_item:
full_msg = "{0} at {1}".format(msg, render_path(path_to_item))
super(ApiKeyError, self).__init__(full_msg)
class ApiException(OpenApiException):
def __init__(self, status=None, reason=None, http_resp=None):
if http_resp:
self.status = http_resp.status
self.reason = http_resp.reason
self.body = http_resp.data
self.headers = http_resp.getheaders()
else:
self.status = status
self.reason = reason
self.body = None
self.headers = None
def __str__(self):
"""Custom error messages for exception"""
error_message = "({0})\n"\
"Reason: {1}\n".format(self.status, self.reason)
if self.headers:
error_message += "HTTP response headers: {0}\n".format(
self.headers)
if self.body:
error_message += "HTTP response body: {0}\n".format(self.body)
return error_message
class NotFoundException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(NotFoundException, self).__init__(status, reason, http_resp)
class UnauthorizedException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(UnauthorizedException, self).__init__(status, reason, http_resp)
class ForbiddenException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(ForbiddenException, self).__init__(status, reason, http_resp)
class ServiceException(ApiException):
def __init__(self, status=None, reason=None, http_resp=None):
super(ServiceException, self).__init__(status, reason, http_resp)
def render_path(path_to_item):
"""Returns a string representation of a path"""
result = ""
for pth in path_to_item:
if isinstance(pth, six.integer_types):
result += "[{0}]".format(pth)
else:
result += "['{0}']".format(pth)
return result
|
/savvihub-0.1.18.tar.gz/savvihub-0.1.18/openapi_client/exceptions.py
| 0.692226 | 0.268594 |
exceptions.py
|
pypi
|
from pathlib import Path
from typing import Any
from typing import Callable
from typing import TypeVar
import typer
from typing_extensions import Annotated
from .cli_manager import CLIManager
from .cli_manager import ProjectExistsError
cwd = Path.cwd()
app = typer.Typer()
state = {"verbose": False}
FuncT = TypeVar("FuncT", bound=Callable[..., Any])
def typed_command(func: FuncT) -> FuncT:
"""Add type hints to the decorators."""
typed_func: FuncT = app.command()(func)
return typed_func
def typed_callback(func: FuncT) -> FuncT:
"""Add type hints to the decorators."""
typed_func: FuncT = app.callback()(func)
return typed_func
@typed_command
def new_project(
project_name: Annotated[str, typer.Argument(help="The name of the new project.")],
project_dir: Annotated[
Path, typer.Option(..., help="The project directory path. Default is the current directory.")
] = cwd,
) -> None:
"""Create a new savvy-rest project."""
if project_dir is None:
current_directory = Path.cwd()
project_dir = typer.prompt(
f"Please provide the project directory (press Enter to use the default [{current_directory}])",
default=current_directory,
type=Path,
)
try:
result = CLIManager.new_project(project_dir, project_name)
typer.echo(result)
if state["verbose"]:
CLIManager.generate_project_tree(project_dir, project_name)
except ProjectExistsError as err:
typer.echo(f"Error: {err}")
@typed_command
def new_route(name: Annotated[str, typer.Argument(help="The name of the new route.")]) -> None:
"""Create a new route in the project."""
print(f"Creating a new route {name}")
@typed_command
def new_scenario(
route_name: Annotated[str, typer.Argument(help="The name of the route.")],
scenario_name: Annotated[str, typer.Argument(help="The name of the new scenario.")],
) -> None:
"""Create a new scenario for a route."""
print(f"Creating a new scenario {scenario_name} for a route {route_name}")
@typed_command
def update_config(
environment_name: Annotated[str, typer.Argument(help="The name of the environment for updating the config.")],
update: Annotated[
str,
typer.Option(
...,
help="The new values for target environment configuration in dictionary format string, ex. {'config_name': 'new_value'}.", # noqa
),
] = "",
) -> None:
"""Update the configuration for a specific environment."""
print(f"Updating config {update} for a environment {environment_name}")
@typed_command
def run(
environment_name: Annotated[str, typer.Option(..., help="The environment name to run the tests in.")] = 'stage',
parallel_count: Annotated[int, typer.Option(..., help="The number of scenarios to run in parallel mode.")] = 0,
tags: Annotated[str, typer.Option(..., help="Execute scenarios with these comma-separated tags.")] = "",
routes: Annotated[str, typer.Option(..., help="Execute scenarios belonging to these comma-separated routes.")] = "",
filters_: Annotated[str, typer.Option(..., help="Execute scenarios matching the specified filter condition.")] = "",
) -> None:
"""Run tests in the specified environment."""
print(
f"Running scenarios with configuration:: environment_name:{environment_name}, parallel_count:{parallel_count}, tags:{tags}, routes:{routes}, filters:{filters_}" # noqa
)
@typed_callback
def verbose_callback(verbose: bool = False) -> None:
"""Enable verbose output."""
if verbose:
print("Will write verbose output")
state["verbose"] = True
if __name__ == "__main__":
app()
|
/savvy_rest-0.0.1-py3-none-any.whl/savvy_rest/cli/cli.py
| 0.839931 | 0.304481 |
cli.py
|
pypi
|
from __future__ import annotations
import os
import signal
import sys
from distutils.spawn import find_executable
from argo_client.connection import ServerConnection, DynamicSocketProcess, HttpProcess, ManagedProcess
from argo_client.interaction import Interaction, Command
from .commands import *
from .option import *
from typing import Optional, Union, Any, List, TextIO
# FIXME cryptol_path isn't always used...?
def connect(command: Union[str, ServerConnection, None] = None,
*,
cryptol_path: Optional[str] = None,
persist: bool = False,
url : Optional[str] = None,
reset_server : bool = False,
verify : Union[bool, str] = True,
log_dest : Optional[TextIO] = None) -> SAWConnection:
"""
Connect to a (possibly new) Saw server process.
:param command: A command to launch a new Saw server in socket mode (if
provided).
:param url: A URL at which to connect to an already running SAW HTTP server.
: param reset_server: If ``True``, the server that is connected to will be
reset. (This ensures any states from previous server usages have been cleared.)
If no parameters specifying how to connect to the server are provided, the
following are attempted in order:
1. If the environment variable ``SAW_SERVER`` is set and referse to an
executable, it is assumed to be a SAW server and will be used for a new
``socket`` connection.
2. If the environment variable ``SAW_SERVER_URL`` is set, it is assumed to
be the URL for a running SAW server in ``http`` mode and will be
connected to.
3. If an executable ``saw-remote-api`` is available on the ``PATH`` it is
assumed to be a SAW server and will be used for a new ``socket``
connection.
"""
c = None
if command is not None:
if url is not None:
raise ValueError("A SAW server URL cannot be specified with a command currently.")
c = SAWConnection(command, log_dest=log_dest)
elif url is not None:
c = SAWConnection(ServerConnection(HttpProcess(url, verify=verify)), log_dest=log_dest)
elif (command := os.getenv('SAW_SERVER')) is not None and (command := find_executable(command)) is not None:
c = SAWConnection(command+" socket", log_dest=log_dest) # SAWConnection(ServerConnection(StdIOProcess(command+" stdio")))
elif (url := os.getenv('SAW_SERVER_URL')) is not None:
c = SAWConnection(ServerConnection(HttpProcess(url, verify=verify)), log_dest=log_dest)
elif (command := find_executable('saw-remote-api')) is not None:
c = SAWConnection(command+" socket", log_dest=log_dest)
else:
raise ValueError(
"""saw.connection.connect requires one of the following:",
1) a command to launch a SAW server is the first positional argument,
2) a URL to connect to a running SAW server is provided via the `url` keyword argument,
3) the environment variable `SAW_SERVER` must refer to a valid server executable, or
4) the environment variable `SAW_SERVER_URL` must refer to the URL of a running SAW server.""")
if reset_server:
SAWResetServer(c)
return c
class SAWConnection:
"""A representation of a current user state in a session with SAW."""
most_recent_result: Optional[Interaction]
server_connection: ServerConnection
proc: Optional[ManagedProcess]
def __init__(self,
command_or_connection: Union[str, ServerConnection],
*,
persist: bool = False,
log_dest : Optional[TextIO] = None) -> None:
self.proc = None
self.most_recent_result = None
self.persist = persist
if isinstance(command_or_connection, str):
self.proc = DynamicSocketProcess(command_or_connection, persist=self.persist)
self.server_connection = ServerConnection(self.proc)
else:
self.server_connection = command_or_connection
if log_dest:
self.logging(on=True,dest=log_dest)
def reset(self) -> None:
"""Resets the connection, causing its unique state on the server to be freed (if applicable).
After a reset a connection may be treated as if it were a fresh connection with the server if desired."""
SAWReset(self)
self.most_recent_result = None
def reset_server(self) -> None:
"""Resets the server, causing all states on the server to be freed."""
SAWResetServer(self)
self.most_recent_result = None
def disconnect(self) -> None:
"""Clears the state from the server and closes any underlying
server/connection process launched by this object."""
self.reset()
if not self.persist and self.proc and (pid := self.proc.pid()):
pgid = os.getpgid(pid)
os.kill(pgid, signal.SIGKILL)
self.proc = None
def logging(self, on : bool, *, dest : TextIO = sys.stderr) -> None:
"""Whether to log received and transmitted JSON."""
self.server_connection.logging(on=on,dest=dest)
def __del__(self) -> None:
# when being deleted, ensure we don't have a lingering state on the server
if not self.persist:
if self.proc and (pid := self.proc.pid()):
os.killpg(os.getpgid(pid), signal.SIGKILL)
def pid(self) -> Optional[int]:
"""Return the PID of the running server process."""
if self.proc is not None:
return self.proc.pid()
else:
return None
def running(self) -> bool:
"""Return whether the underlying server process is still running."""
if self.proc is not None:
return self.proc.running()
else:
return False
def protocol_state(self) -> Any:
if self.most_recent_result is None:
return None
else:
return self.most_recent_result.state()
# Protocol messages
def cryptol_load_file(self, filename: str, timeout : Optional[float] = None) -> Command:
self.most_recent_result = CryptolLoadFile(self, filename, timeout)
return self.most_recent_result
def create_ghost_variable(self, name: str, server_name: str, timeout : Optional[float] = None) -> Command:
"""Create an instance of the `CreateGhostVariable` command. Documentation on
the purpose and use of this command is associated with the top-level
`create_ghost_variable` function.
"""
self.most_recent_result = CreateGhostVariable(self, name, server_name, timeout)
return self.most_recent_result
def jvm_load_class(self, name: str, class_name: str, timeout : Optional[float] = None) -> Command:
"""Create an instance of the `JVMLoadClass` command. Documentation on the purpose
and use of this command is associated with the top-level `jvm_load_class`
function.
"""
self.most_recent_result = JVMLoadClass(self, name, class_name, timeout)
return self.most_recent_result
def jvm_verify(self,
class_name: str,
method_name: str,
lemmas: List[str],
check_sat: bool,
contract: Any,
script: ProofScript,
lemma_name: str,
timeout : Optional[float] = None) -> Command:
"""Create an instance of the `JVMVerify` command. Documentation on the purpose
and use of this command is associated with the top-level `jvm_assume`
function.
"""
self.most_recent_result = \
JVMVerify(self, class_name, method_name, lemmas, check_sat, contract, script, lemma_name, timeout)
return self.most_recent_result
def jvm_assume(self,
class_name: str,
method_name: str,
contract: Any,
lemma_name: str,
timeout : Optional[float] = None) -> Command:
"""Create an instance of the `JVMAssume` command. Documentation on the purpose
and use of this command is associated with the top-level `jvm_assume`
function.
"""
self.most_recent_result = \
JVMAssume(self, class_name, method_name, contract, lemma_name, timeout)
return self.most_recent_result
def llvm_load_module(self, name: str, bitcode_file: str, timeout : Optional[float] = None) -> Command:
self.most_recent_result = LLVMLoadModule(self, name, bitcode_file, timeout)
return self.most_recent_result
def llvm_verify(self,
module: str,
function: str,
lemmas: List[str],
check_sat: bool,
contract: Any,
script: ProofScript,
lemma_name: str,
timeout : Optional[float] = None) -> Command:
self.most_recent_result = \
LLVMVerify(self, module, function, lemmas, check_sat, contract, script, lemma_name, timeout)
return self.most_recent_result
def llvm_assume(self,
module: str,
function: str,
contract: Any,
lemma_name: str,
timeout : Optional[float] = None) -> Command:
"""Create an instance of the `LLVMAssume` command. Documentation on the purpose
and use of this command is associated with the top-level `llvm_assume`
function.
"""
self.most_recent_result = \
LLVMAssume(self, module, function, contract, lemma_name, timeout)
return self.most_recent_result
def yosys_import(self, name: str, path: str, timeout : Optional[float] = None) -> Command:
self.most_recent_result = YosysImport(self, name, path, timeout)
return self.most_recent_result
def yosys_verify(self,
imp: str,
module: str,
preconds: List[str],
spec: str,
lemmas: List[str],
script: ProofScript,
lemma_name: str,
timeout : Optional[float] = None) -> Command:
self.most_recent_result = \
YosysVerify(self, imp, module, preconds, spec, lemmas, script, lemma_name, timeout)
return self.most_recent_result
def yosys_import_sequential(self, name: str, path: str, module: str, timeout : Optional[float] = None) -> Command:
self.most_recent_result = YosysImportSequential(self, name, path, module, timeout)
return self.most_recent_result
def yosys_extract_sequential(self, name: str, module: str, cycles: int, timeout : Optional[float] = None) -> Command:
self.most_recent_result = YosysExtractSequential(self, name, module, cycles, timeout)
return self.most_recent_result
def prove(self,
goal: cryptoltypes.CryptolJSON,
proof_script: ProofScript,
timeout : Optional[float] = None) -> Command:
"""Create an instance of the `Prove` command. Documentation on the purpose and
use of this command is associated with the top-level `prove` function.
"""
self.most_recent_result = Prove(self, goal, proof_script, timeout)
return self.most_recent_result
def eval_int(self,
expr: cryptoltypes.CryptolJSON,
timeout : Optional[float] = None) -> Command:
"""Create an instance of the `EvalInt` command. Documentation on the purpose and
use of this command is associated with the top-level `eval_int` function.
"""
self.most_recent_result = EvalInt(self, expr, timeout)
return self.most_recent_result
def eval_bool(self,
expr: cryptoltypes.CryptolJSON,
timeout : Optional[float] = None) -> Command:
"""Create an instance of the `EvalBool` command. Documentation on the purpose and
use of this command is associated with the top-level `eval_bool` function.
"""
self.most_recent_result = EvalBool(self, expr, timeout)
return self.most_recent_result
def set_option(self,
option : SAWOption,
value : bool,
timeout : Optional[float] = None) -> Command:
"""Set a boolean-valued SAW option."""
self.most_recent_result = SAWSetOption(self, option, value, timeout)
return self.most_recent_result
|
/saw_client-1.0.0.tar.gz/saw_client-1.0.0/saw_client/connection.py
| 0.562177 | 0.225907 |
connection.py
|
pypi
|
from abc import ABCMeta, abstractmethod
from typing import Any, List
class Prover(metaclass=ABCMeta):
@abstractmethod
def to_json(self) -> Any: pass
class ABC(Prover):
def to_json(self) -> Any:
return { "name": "abc" }
class ABC_Verilog(Prover):
def to_json(self) -> Any:
return { "name": "w4-abc-verilog" }
class ABC_SMTLib(Prover):
def to_json(self) -> Any:
return { "name": "w4-abc-smtlib" }
class ABC_SBV(Prover):
def to_json(self) -> Any:
return { "name": "sbv-abc" }
class Boolector(Prover):
def to_json(self) -> Any:
return { "name": "boolector" }
class Boolector_SBV(Prover):
def to_json(self) -> Any:
return { "name": "sbv-boolector" }
class RME(Prover):
def to_json(self) -> Any:
return { "name": "rme" }
class UnintProver(Prover):
def __init__(self, name : str, unints : List[str]) -> None:
self.name = name
self.unints = unints
def to_json(self) -> Any:
return { "name": self.name, "uninterpreted functions": self.unints }
class CVC4(UnintProver):
def __init__(self, unints : List[str]) -> None:
super().__init__("w4-cvc4", unints)
class CVC5(UnintProver):
def __init__(self, unints : List[str]) -> None:
super().__init__("w4-cvc5", unints)
class Yices(UnintProver):
def __init__(self, unints : List[str]) -> None:
super().__init__("w4-yices", unints)
class Z3(UnintProver):
def __init__(self, unints : List[str]) -> None:
super().__init__("w4-z3", unints)
class CVC4_SBV(UnintProver):
def __init__(self, unints : List[str]) -> None:
super().__init__("sbv-cvc4", unints)
class CVC5_SBV(UnintProver):
def __init__(self, unints : List[str]) -> None:
super().__init__("sbv-cvc5", unints)
class Yices_SBV(UnintProver):
def __init__(self, unints : List[str]) -> None:
super().__init__("sbv-yices", unints)
class Z3_SBV(UnintProver):
def __init__(self, unints : List[str]) -> None:
super().__init__("sbv-z3", unints)
class ProofTactic(metaclass=ABCMeta):
@abstractmethod
def to_json(self) -> Any: pass
class UseProver(ProofTactic):
def __init__(self, prover : Prover) -> None:
self.prover = prover
def to_json(self) -> Any:
return { "tactic": "use prover",
"prover": self.prover.to_json() }
class Unfold(ProofTactic):
def __init__(self, names : List[str]) -> None:
self.names = names
def to_json(self) -> Any:
return { "tactic": "unfold", "names": self.names }
class EvaluateGoal(ProofTactic):
def __init__(self, names : List[str]) -> None:
self.names = names
def to_json(self) -> Any:
return { "tactic": "evaluate goal", "names": self.names }
# TODO: add "simplify"
class Admit(ProofTactic):
def to_json(self) -> Any:
return { "tactic": "admit" }
class BetaReduceGoal(ProofTactic):
def to_json(self) -> Any:
return { "tactic": "beta reduce goal" }
class Trivial(ProofTactic):
def to_json(self) -> Any:
return { "tactic": "trivial" }
class ProofScript:
def __init__(self, tactics : List[ProofTactic]) -> None:
self.tactics = tactics
def to_json(self) -> Any:
return { 'tactics': [t.to_json() for t in self.tactics] }
abc = UseProver(ABC())
abc_smtlib = UseProver(ABC_SMTLib())
abc_verilog = UseProver(ABC_Verilog())
rme = UseProver(RME())
boolector = UseProver(Boolector())
def cvc4(unints : List[str]) -> ProofTactic:
return UseProver(CVC4(unints))
def cvc5(unints : List[str]) -> ProofTactic:
return UseProver(CVC5(unints))
def yices(unints : List[str]) -> ProofTactic:
return UseProver(Yices(unints))
def z3(unints : List[str]) -> ProofTactic:
return UseProver(Z3(unints))
|
/saw_client-1.0.0.tar.gz/saw_client-1.0.0/saw_client/proofscript.py
| 0.710729 | 0.46557 |
proofscript.py
|
pypi
|
from itertools import chain
from typing import Dict, Any, List, Iterable, Type
from argo_client.interaction import ArgoException
class SAWException(Exception):
data: Dict[str, Any]
code: int
stdout: str
stderr: str
def __init__(self, ae: ArgoException) -> None:
super().__init__(ae.message)
self.data = ae.data
self.code = ae.code
self.stdout = ae.stdout
self.stderr = ae.stderr
# The exception gets fields for each data field in the ArgoException
def __getattr__(self, attr: str) -> Any:
self.data.get(attr)
def __dir__(self) -> Iterable[str]:
return chain(super().__dir__(), [str(k) for k in self.data.keys()])
def __str__(self) -> str:
lines: List[str] = []
for k, v in self.data.items():
lines.append(f"{k}: {v}")
return '\n'.join(lines)
def make_saw_exception(ae: ArgoException) -> SAWException:
"""Convert an ArgoException to its corresponding SAWException, failing with
the original ArgoException if the code for this ArgoException does not
correspond to a SAWException.
"""
specific_exception_class = error_code_table.get(ae.code)
if specific_exception_class is not None:
return specific_exception_class(ae)
else:
raise ae
# Server value errors:
class ServerValueError(SAWException): pass
class NoServerValue(ServerValueError): pass
class NotACryptolEnvironment(ServerValueError): pass
class NotAnLLVMModule(ServerValueError): pass
class NotAnLLVMSetupScript(ServerValueError): pass
class NotAnLLVMSetupValue(ServerValueError): pass
class NotAnLLVMMethodSpecification(ServerValueError): pass
class NotAnLLVMMethodSpecIR(ServerValueError): pass
class NotAJVMClass(ServerValueError): pass
class NotAJVMMethodSpecIR(ServerValueError): pass
class NotASimpset(ServerValueError): pass
class NotATerm(ServerValueError): pass
class NotAYosysTheorem(ServerValueError): pass
# Setup errors:
class SetupError(SAWException): pass
class NotSettingUpCryptol(SetupError): pass
class NotSettingUpCrucibleLLVM(SetupError): pass
class NotAtTopLevel(SetupError): pass
# Loading errors:
class LoadingError(SAWException): pass
class CantLoadLLVMModule(LoadingError): pass
# Verification errors:
class VerificationError(SAWException): pass
# Cryptol errors:
class CryptolError(SAWException): pass
# The canonical mapping from Argo error codes to SAW exceptions:
error_code_table : Dict[int, Type[SAWException]] = {
# Server value errors:
10000: NoServerValue,
10010: NotACryptolEnvironment,
10020: NotAnLLVMModule,
10030: NotAnLLVMSetupScript,
10040: NotAnLLVMSetupValue,
10040: NotAnLLVMMethodSpecification,
10050: NotAnLLVMMethodSpecIR,
10060: NotASimpset,
10070: NotATerm,
10080: NotAJVMClass,
10090: NotAJVMMethodSpecIR,
10130: NotAYosysTheorem,
# Setup errors:
10100: NotSettingUpCryptol,
10110: NotSettingUpCrucibleLLVM,
10120: NotAtTopLevel,
# Loading errors:
10200: CantLoadLLVMModule,
# Verification errors:
10300: VerificationError,
# Cryptol errors:
11000: CryptolError,
}
|
/saw_client-1.0.0.tar.gz/saw_client-1.0.0/saw_client/exceptions.py
| 0.813979 | 0.170923 |
exceptions.py
|
pypi
|
import abc
class Block:
def __init__(self, block):
self.block_id = block.block_id
self.previous_id = block.previous_id
self.signer_id = block.signer_id
self.block_num = block.block_num
self.payload = block.payload
self.summary = block.summary
class Service(metaclass=abc.ABCMeta):
'''Provides methods that allow the consensus engine to issue commands
and requests.'''
# -- P2P --
@abc.abstractmethod
def send_to(self, receiver_id, message_type, payload):
'''Send a consensus message to a specific connected peer.
Args:
receiver_id (bytes)
message_type (str)
payload (bytes)
'''
@abc.abstractmethod
def broadcast(self, message_type, payload):
'''Broadcast a message to all connected peers.
Args:
message_type (str)
payload (bytes)
'''
# -- Block Creation --
@abc.abstractmethod
def initialize_block(self, previous_id):
'''Initialize a new block with PREVIOUS_ID and begin adding batches to
it. If no PREVIOUS_ID is specified, the current head will be
used.
Args:
previous_id (bytes or None)
'''
@abc.abstractmethod
def summarize_block(self):
'''Stop adding batches to the current block and return a summary of its
contents.
Return:
bytes
'''
@abc.abstractmethod
def finalize_block(self, data):
'''Insert the given consensus data into the block and sign it. If this
call is successful, the consensus engine will receive the block
afterwards.
Args:
data (bytes)
Return:
bytes
'''
@abc.abstractmethod
def cancel_block(self):
'''Stop adding batches to the current block and abandon it.'''
# -- Block Directives --
@abc.abstractmethod
def check_blocks(self, priority):
'''Update the prioritization of blocks to check to PRIORITY.
Args:
priority (list[bytes])
'''
@abc.abstractmethod
def commit_block(self, block_id):
'''Update the block that should be committed.
Args:
block_id (bytes)
'''
@abc.abstractmethod
def ignore_block(self, block_id):
'''Signal that this block is no longer being committed.
Args:
block_id (bytes)
'''
@abc.abstractmethod
def fail_block(self, block_id):
'''Mark this block as invalid from the perspective of consensus.
Args:
block_id (bytes)
'''
# -- Queries --
@abc.abstractmethod
def get_blocks(self, block_ids):
'''Retrive consensus-related information about blocks.
Args:
block_ids (list[bytes])
Return:
dict[bytes, block]
'''
@abc.abstractmethod
def get_chain_head(self):
'''Retrieve consensus-related information about the chain head.
Return:
block
'''
@abc.abstractmethod
def get_settings(self, block_id, settings):
'''Read the value of settings as of the given block.
Args:
block_id (bytes)
settings (list[str])
Return:
dict[str, str]
'''
@abc.abstractmethod
def get_state(self, block_id, addresses):
'''Read values in state as of the given block.
Args:
block_id (bytes)
addresses (list[str])
Return:
dict[str, bytes]
'''
|
/sawtooth-sdk-1.2.5.tar.gz/sawtooth-sdk-1.2.5/sawtooth_sdk/consensus/service.py
| 0.842766 | 0.281943 |
service.py
|
pypi
|
from sawtooth_sdk.protobuf.validator_pb2 import Message
from sawtooth_sdk.protobuf import state_context_pb2
from sawtooth_sdk.protobuf import events_pb2
from sawtooth_sdk.processor.exceptions import InternalError
from sawtooth_sdk.processor.exceptions import AuthorizationException
class Context:
"""
Context provides an interface for getting, setting, and deleting
validator state. All validator interactions by a handler should be
through a Context instance.
Attributes:
_stream (sawtooth.client.stream.Stream): client grpc communication
_context_id (str): the context_id passed in from the validator
"""
def __init__(self, stream, context_id):
self._stream = stream
self._context_id = context_id
def get_state(self, addresses, timeout=None):
"""
get_state queries the validator state for data at each of the
addresses in the given list. The addresses that have been set
are returned in a list.
Args:
addresses (list): the addresses to fetch
timeout: optional timeout, in seconds
Returns:
results (list): a list of Entries (address, data), for the
addresses that have a value
Raises:
AuthorizationException
"""
request = state_context_pb2.TpStateGetRequest(
context_id=self._context_id,
addresses=addresses)
response_string = self._stream.send(
Message.TP_STATE_GET_REQUEST,
request.SerializeToString()).result(timeout).content
response = state_context_pb2.TpStateGetResponse()
response.ParseFromString(response_string)
if response.status == \
state_context_pb2.TpStateGetResponse.AUTHORIZATION_ERROR:
raise AuthorizationException(
'Tried to get unauthorized address: {}'.format(addresses))
entries = response.entries if response is not None else []
results = [e for e in entries if len(e.data) != 0]
return results
def set_state(self, entries, timeout=None):
"""
set_state requests that each address in the provided dictionary be
set in validator state to its corresponding value. A list is
returned containing the successfully set addresses.
Args:
entries (dict): dictionary where addresses are the keys and data is
the value.
timeout: optional timeout, in seconds
Returns:
addresses (list): a list of addresses that were set
Raises:
AuthorizationException
"""
state_entries = [
state_context_pb2.TpStateEntry(address=e, data=entries[e])
for e in entries
]
request = state_context_pb2.TpStateSetRequest(
entries=state_entries,
context_id=self._context_id).SerializeToString()
response = state_context_pb2.TpStateSetResponse()
response.ParseFromString(
self._stream.send(Message.TP_STATE_SET_REQUEST,
request).result(timeout).content)
if response.status == \
state_context_pb2.TpStateSetResponse.AUTHORIZATION_ERROR:
addresses = [e.address for e in state_entries]
raise AuthorizationException(
'Tried to set unauthorized address: {}'.format(addresses))
return response.addresses
def delete_state(self, addresses, timeout=None):
"""
delete_state requests that each of the provided addresses be unset
in validator state. A list of successfully deleted addresses
is returned.
Args:
addresses (list): list of addresses to delete
timeout: optional timeout, in seconds
Returns:
addresses (list): a list of addresses that were deleted
Raises:
AuthorizationException
"""
request = state_context_pb2.TpStateDeleteRequest(
context_id=self._context_id,
addresses=addresses).SerializeToString()
response = state_context_pb2.TpStateDeleteResponse()
response.ParseFromString(
self._stream.send(Message.TP_STATE_DELETE_REQUEST,
request).result(timeout).content)
if response.status == \
state_context_pb2.TpStateDeleteResponse.AUTHORIZATION_ERROR:
raise AuthorizationException(
'Tried to delete unauthorized address: {}'.format(addresses))
return response.addresses
def add_receipt_data(self, data, timeout=None):
"""Add a blob to the execution result for this transaction.
Args:
data (bytes): The data to add.
"""
request = state_context_pb2.TpReceiptAddDataRequest(
context_id=self._context_id,
data=data).SerializeToString()
response = state_context_pb2.TpReceiptAddDataResponse()
response.ParseFromString(
self._stream.send(
Message.TP_RECEIPT_ADD_DATA_REQUEST,
request).result(timeout).content)
if response.status == state_context_pb2.TpReceiptAddDataResponse.ERROR:
raise InternalError(
"Failed to add receipt data: {}".format((data)))
def add_event(self, event_type, attributes=None, data=None, timeout=None):
"""Add a new event to the execution result for this transaction.
Args:
event_type (str): This is used to subscribe to events. It should be
globally unique and describe what, in general, has occured.
attributes (list of (str, str) tuples): Additional information
about the event that is transparent to the validator.
Attributes can be used by subscribers to filter the type of
events they receive.
data (bytes): Additional information about the event that is opaque
to the validator.
"""
if attributes is None:
attributes = []
event = events_pb2.Event(
event_type=event_type,
attributes=[
events_pb2.Event.Attribute(key=key, value=value)
for key, value in attributes
],
data=data,
)
request = state_context_pb2.TpEventAddRequest(
context_id=self._context_id, event=event).SerializeToString()
response = state_context_pb2.TpEventAddResponse()
response.ParseFromString(
self._stream.send(
Message.TP_EVENT_ADD_REQUEST,
request).result(timeout).content)
if response.status == state_context_pb2.TpEventAddResponse.ERROR:
raise InternalError(
"Failed to add event: ({}, {}, {})".format(
event_type, attributes, data))
|
/sawtooth-sdk-1.2.5.tar.gz/sawtooth-sdk-1.2.5/sawtooth_sdk/processor/context.py
| 0.910647 | 0.440409 |
context.py
|
pypi
|
from abc import ABCMeta
from abc import abstractmethod
class NoSuchAlgorithmError(Exception):
"""Thrown when trying to create an algorithm which does not exist.
"""
class SigningError(Exception):
"""Thrown when an error occurs during the signing process.
"""
class ParseError(Exception):
"""Thrown when an error occurs during deserialization of a Private or
Public key from various formats.
"""
class PrivateKey(metaclass=ABCMeta):
"""A private key instance.
The underlying content is dependent on implementation.
"""
@abstractmethod
def get_algorithm_name(self):
"""Returns the algorithm name used for this private key.
"""
@abstractmethod
def as_hex(self):
"""Return the private key encoded as a hex string.
"""
@abstractmethod
def as_bytes(self):
"""Return the private key bytes.
"""
class PublicKey(metaclass=ABCMeta):
"""A public key instance.
The underlying content is dependent on implementation.
"""
@abstractmethod
def get_algorithm_name(self):
"""Returns the algorithm name used for this public key.
"""
@abstractmethod
def as_hex(self):
"""Return the public key encoded as a hex string.
"""
@abstractmethod
def as_bytes(self):
"""Return the public key bytes.
"""
class Context(metaclass=ABCMeta):
"""A context for a cryptographic signing algorithm.
"""
@abstractmethod
def get_algorithm_name(self):
"""Returns the algorithm name.
"""
@abstractmethod
def sign(self, message, private_key):
"""Sign a message
Given a private key for this algorithm, sign the given message bytes
and return a hex-encoded string of the resulting signature.
Args:
message (bytes): the message bytes
private_key (:obj:`PrivateKey`): the private key
Returns:
The signature in a hex-encoded string
Raises:
SigningError: if any error occurs during the signing process
"""
@abstractmethod
def verify(self, signature, message, public_key):
"""Verifies that a signature of a message was produced with the
associated public key.
Args:
signature (str): the hex-encoded signature
message (bytes): the message bytes
public_key (:obj:`PublicKey`): the public key to use for
verification
Returns:
boolean: True if the public key is associated with the signature
for that method, False otherwise
"""
@abstractmethod
def new_random_private_key(self):
"""Generates a new random PrivateKey using this context.
Returns:
(:obj:`PrivateKey`): a random private key
"""
@abstractmethod
def get_public_key(self, private_key):
"""Produce a public key for the given private key.
Args:
private_key (:obj:`PrivateKey`): a private key
Returns:
(:obj:`PublicKey`) the public key for the given private key
"""
|
/sawtooth-sdk-1.2.5.tar.gz/sawtooth-sdk-1.2.5/sawtooth_signing/core.py
| 0.936241 | 0.453262 |
core.py
|
pypi
|
import binascii
import warnings
import secp256k1
from sawtooth_signing.core import SigningError
from sawtooth_signing.core import ParseError
from sawtooth_signing.core import PrivateKey
from sawtooth_signing.core import PublicKey
from sawtooth_signing.core import Context
class Secp256k1PrivateKey(PrivateKey):
def __init__(self, secp256k1_private_key):
self._private_key = secp256k1_private_key
def get_algorithm_name(self):
return "secp256k1"
def as_hex(self):
return binascii.hexlify(self.as_bytes()).decode()
def as_bytes(self):
return bytes(self._private_key.private_key)
@property
def secp256k1_private_key(self):
return self._private_key
@staticmethod
def from_bytes(byte_str):
return Secp256k1PrivateKey(secp256k1.PrivateKey(byte_str))
@staticmethod
def from_hex(hex_str):
try:
return Secp256k1PrivateKey.from_bytes(binascii.unhexlify(hex_str))
except Exception as e:
raise ParseError('Unable to parse hex private key: {}'.format(
e)) from e
@staticmethod
def new_random():
return Secp256k1PrivateKey(secp256k1.PrivateKey())
class Secp256k1PublicKey(PublicKey):
def __init__(self, secp256k1_public_key):
self._public_key = secp256k1_public_key
@property
def secp256k1_public_key(self):
return self._public_key
def get_algorithm_name(self):
return "secp256k1"
def as_hex(self):
return binascii.hexlify(self.as_bytes()).decode()
def as_bytes(self):
with warnings.catch_warnings(): # squelch secp256k1 warning
warnings.simplefilter('ignore')
return self._public_key.serialize()
@staticmethod
def from_bytes(byte_str):
public_key = secp256k1.PublicKey(byte_str, raw=True)
return Secp256k1PublicKey(public_key)
@staticmethod
def from_hex(hex_str):
try:
return Secp256k1PublicKey.from_bytes(binascii.unhexlify(hex_str))
except Exception as e:
raise ParseError('Unable to parse hex public key: {}'.format(
e)) from e
class Secp256k1Context(Context):
def get_algorithm_name(self):
return "secp256k1"
def sign(self, message, private_key):
try:
signature = private_key.secp256k1_private_key.ecdsa_sign(message)
signature = private_key.secp256k1_private_key \
.ecdsa_serialize_compact(signature)
return signature.hex()
except Exception as e:
raise SigningError('Unable to sign message: {}'.format(
str(e))) from e
def verify(self, signature, message, public_key):
try:
if isinstance(signature, str):
signature = bytes.fromhex(signature)
sig = public_key.secp256k1_public_key.ecdsa_deserialize_compact(
signature)
return public_key.secp256k1_public_key.ecdsa_verify(message, sig)
# pylint: disable=broad-except
except Exception:
return False
def new_random_private_key(self):
return Secp256k1PrivateKey.new_random()
def get_public_key(self, private_key):
return Secp256k1PublicKey(private_key.secp256k1_private_key.pubkey)
|
/sawtooth-sdk-1.2.5.tar.gz/sawtooth-sdk-1.2.5/sawtooth_signing/secp256k1.py
| 0.639961 | 0.308451 |
secp256k1.py
|
pypi
|
from sawtooth_signing.core import NoSuchAlgorithmError
from sawtooth_signing.core import ParseError
from sawtooth_signing.core import SigningError
from sawtooth_signing.secp256k1 import Secp256k1Context
class Signer:
"""A convenient wrapper of Context and PrivateKey
"""
def __init__(self, context, private_key):
"""
"""
self._context = context
self._private_key = private_key
self._public_key = None
def sign(self, message):
"""Signs the given message
Args:
message (bytes): the message bytes
Returns:
The signature in a hex-encoded string
Raises:
SigningError: if any error occurs during the signing process
"""
return self._context.sign(message, self._private_key)
def get_public_key(self):
"""Return the public key for this Signer instance.
"""
# Lazy-eval the public key
if self._public_key is None:
self._public_key = self._context.get_public_key(self._private_key)
return self._public_key
class CryptoFactory:
"""Factory for generating Signers.
"""
def __init__(self, context):
self._context = context
@property
def context(self):
"""Return the context that backs this factory instance
"""
return self._context
def new_signer(self, private_key):
"""Create a new signer for the given private key.
Args:
private_key (:obj:`PrivateKey`): a private key
Returns:
(:obj:`Signer`): a signer instance
"""
return Signer(self._context, private_key)
def create_context(algorithm_name):
"""Returns an algorithm instance by name.
Args:
algorithm_name (str): the algorithm name
Returns:
(:obj:`Context`): a context instance for the given algorithm
Raises:
NoSuchAlgorithmError if the algorithm is unknown
"""
if algorithm_name == 'secp256k1':
return Secp256k1Context()
raise NoSuchAlgorithmError("no such algorithm: {}".format(algorithm_name))
|
/sawtooth-sdk-1.2.5.tar.gz/sawtooth-sdk-1.2.5/sawtooth_signing/__init__.py
| 0.905284 | 0.461077 |
__init__.py
|
pypi
|
from abc import ABCMeta
from abc import abstractmethod
class NoSuchAlgorithmError(Exception):
"""Thrown when trying to create an algorithm which does not exist.
"""
class SigningError(Exception):
"""Thrown when an error occurs during the signing process.
"""
class ParseError(Exception):
"""Thrown when an error occurs during deserialization of a Private or
Public key from various formats.
"""
class PrivateKey(metaclass=ABCMeta):
"""A private key instance.
The underlying content is dependent on implementation.
"""
@abstractmethod
def get_algorithm_name(self):
"""Returns the algorithm name used for this private key.
"""
@abstractmethod
def as_hex(self):
"""Return the private key encoded as a hex string.
"""
@abstractmethod
def as_bytes(self):
"""Return the private key bytes.
"""
class PublicKey(metaclass=ABCMeta):
"""A public key instance.
The underlying content is dependent on implementation.
"""
@abstractmethod
def get_algorithm_name(self):
"""Returns the algorithm name used for this public key.
"""
@abstractmethod
def as_hex(self):
"""Return the public key encoded as a hex string.
"""
@abstractmethod
def as_bytes(self):
"""Return the public key bytes.
"""
class Context(metaclass=ABCMeta):
"""A context for a cryptographic signing algorithm.
"""
@abstractmethod
def get_algorithm_name(self):
"""Returns the algorithm name.
"""
@abstractmethod
def sign(self, message, private_key):
"""Sign a message
Given a private key for this algorithm, sign the given message bytes
and return a hex-encoded string of the resulting signature.
Args:
message (bytes): the message bytes
private_key (:obj:`PrivateKey`): the private key
Returns:
The signature in a hex-encoded string
Raises:
SigningError: if any error occurs during the signing process
"""
@abstractmethod
def verify(self, signature, message, public_key):
"""Verifies that a signature of a message was produced with the
associated public key.
Args:
signature (str): the hex-encoded signature
message (bytes): the message bytes
public_key (:obj:`PublicKey`): the public key to use for
verification
Returns:
boolean: True if the public key is associated with the signature
for that method, False otherwise
"""
@abstractmethod
def new_random_private_key(self):
"""Generates a new random PrivateKey using this context.
Returns:
(:obj:`PrivateKey`): a random private key
"""
@abstractmethod
def get_public_key(self, private_key):
"""Produce a public key for the given private key.
Args:
private_key (:obj:`PrivateKey`): a private key
Returns:
(:obj:`PublicKey`) the public key for the given private key
"""
|
/sawtooth-signing-1.1.5.tar.gz/sawtooth-signing-1.1.5/sawtooth_signing/core.py
| 0.936241 | 0.453262 |
core.py
|
pypi
|
import binascii
import warnings
import secp256k1
from sawtooth_signing.core import SigningError
from sawtooth_signing.core import ParseError
from sawtooth_signing.core import PrivateKey
from sawtooth_signing.core import PublicKey
from sawtooth_signing.core import Context
__CONTEXTBASE__ = secp256k1.Base(ctx=None, flags=secp256k1.ALL_FLAGS)
__CTX__ = __CONTEXTBASE__.ctx
class Secp256k1PrivateKey(PrivateKey):
def __init__(self, secp256k1_private_key):
self._private_key = secp256k1_private_key
def get_algorithm_name(self):
return "secp256k1"
def as_hex(self):
return binascii.hexlify(self.as_bytes()).decode()
def as_bytes(self):
return bytes(self._private_key.private_key)
@property
def secp256k1_private_key(self):
return self._private_key
@staticmethod
def from_bytes(byte_str):
return Secp256k1PrivateKey(secp256k1.PrivateKey(byte_str, ctx=__CTX__))
@staticmethod
def from_hex(hex_str):
try:
return Secp256k1PrivateKey.from_bytes(binascii.unhexlify(hex_str))
except Exception as e:
raise ParseError('Unable to parse hex private key: {}'.format(e))
@staticmethod
def new_random():
return Secp256k1PrivateKey(secp256k1.PrivateKey(ctx=__CTX__))
class Secp256k1PublicKey(PublicKey):
def __init__(self, secp256k1_public_key):
self._public_key = secp256k1_public_key
@property
def secp256k1_public_key(self):
return self._public_key
def get_algorithm_name(self):
return "secp256k1"
def as_hex(self):
return binascii.hexlify(self.as_bytes()).decode()
def as_bytes(self):
with warnings.catch_warnings(): # squelch secp256k1 warning
warnings.simplefilter('ignore')
return self._public_key.serialize()
@staticmethod
def from_bytes(byte_str):
public_key = secp256k1.PublicKey(byte_str, raw=True, ctx=__CTX__)
return Secp256k1PublicKey(public_key)
@staticmethod
def from_hex(hex_str):
try:
return Secp256k1PublicKey.from_bytes(binascii.unhexlify(hex_str))
except Exception as e:
raise ParseError('Unable to parse hex public key: {}'.format(e))
class Secp256k1Context(Context):
def __init__(self):
self._ctx = __CTX__
def get_algorithm_name(self):
return "secp256k1"
def sign(self, message, private_key):
try:
signature = private_key.secp256k1_private_key.ecdsa_sign(message)
signature = private_key.secp256k1_private_key \
.ecdsa_serialize_compact(signature)
return signature.hex()
except Exception as e:
raise SigningError('Unable to sign message: {}'.format(str(e)))
def verify(self, signature, message, public_key):
try:
if isinstance(signature, str):
signature = bytes.fromhex(signature)
sig = public_key.secp256k1_public_key.ecdsa_deserialize_compact(
signature)
return public_key.secp256k1_public_key.ecdsa_verify(message, sig)
# pylint: disable=broad-except
except Exception:
return False
def new_random_private_key(self):
return Secp256k1PrivateKey.new_random()
def get_public_key(self, private_key):
return Secp256k1PublicKey(private_key.secp256k1_private_key.pubkey)
|
/sawtooth-signing-1.1.5.tar.gz/sawtooth-signing-1.1.5/sawtooth_signing/secp256k1.py
| 0.619241 | 0.25038 |
secp256k1.py
|
pypi
|
from sawtooth_signing.core import NoSuchAlgorithmError
from sawtooth_signing.core import ParseError
from sawtooth_signing.core import SigningError
from sawtooth_signing.secp256k1 import Secp256k1Context
class Signer:
"""A convenient wrapper of Context and PrivateKey
"""
def __init__(self, context, private_key):
"""
"""
self._context = context
self._private_key = private_key
self._public_key = None
def sign(self, message):
"""Signs the given message
Args:
message (bytes): the message bytes
Returns:
The signature in a hex-encoded string
Raises:
SigningError: if any error occurs during the signing process
"""
return self._context.sign(message, self._private_key)
def get_public_key(self):
"""Return the public key for this Signer instance.
"""
# Lazy-eval the public key
if self._public_key is None:
self._public_key = self._context.get_public_key(self._private_key)
return self._public_key
class CryptoFactory:
"""Factory for generating Signers.
"""
def __init__(self, context):
self._context = context
@property
def context(self):
"""Return the context that backs this factory instance
"""
return self._context
def new_signer(self, private_key):
"""Create a new signer for the given private key.
Args:
private_key (:obj:`PrivateKey`): a private key
Returns:
(:obj:`Signer`): a signer instance
"""
return Signer(self._context, private_key)
def create_context(algorithm_name):
"""Returns an algorithm instance by name.
Args:
algorithm_name (str): the algorithm name
Returns:
(:obj:`Context`): a context instance for the given algorithm
Raises:
NoSuchAlgorithmError if the algorithm is unknown
"""
if algorithm_name == 'secp256k1':
return Secp256k1Context()
raise NoSuchAlgorithmError("no such algorithm: {}".format(algorithm_name))
|
/sawtooth-signing-1.1.5.tar.gz/sawtooth-signing-1.1.5/sawtooth_signing/__init__.py
| 0.905284 | 0.461077 |
__init__.py
|
pypi
|
import datetime, time
import calendar
now = datetime.datetime.now()
def get_time(year=now.year,
month=now.month,
day=now.day,
hour=now.hour,
minute=now.minute,
second=now.second,
week=-1,
last_day_of_month=False,
type="time",
detail=True):
"""
:param year: 年 (默认今年)
:param month: 月 (默认当月)
:param day: 天 (默认今天)
:param hour: 时 (默认当前时间)
:param minute: 分 (默认当前时间)
:param second: 秒 (默认当前时间)
:param week: 星期x (默认-1,如果不等于-1,则day参数无效)
:param last_day_of_month: 每个月的最后一天 (默认False)
:param type: 输出类型 (time / str)
:param detail: 是否输出时分秒? (默认输出时分秒)
:return: 时间
"""
if week != -1:
weekday = datetime.datetime(year, month, day, hour, minute, second)
one_day = datetime.timedelta(days=1)
while weekday.weekday() != 0:
weekday -= one_day
ret = weekday + datetime.timedelta(days=week - 1)
else:
if last_day_of_month: # 每个月的最后一天
day = calendar.monthrange(year, month)[1]
if not detail:
date = datetime.date(year, month, day)
else:
date = datetime.datetime(year, month, day, hour, minute, second)
ret = date if type == "time" else str(date)
return ret
def get_timestamp(detail=True):
"""
获取当前时间戳
:param detail: True输出完整的时间戳/ False输出前10位(小数点之前)
:return: 时间戳
"""
if detail:
ret = time.time()
else:
ret = int(time.time())
return ret
def timestamp_to_str(timestamp, strformat):
"""
时间戳转字符串
:param timestamp: 时间戳
:param strformat: 转换格式 (%Y-%m-%d %H:%M:%S)
:return: 时间字符串
"""
ret = time.strftime(strformat, time.localtime(timestamp))
return ret
def str_to_timestamp(timestr, strformat):
"""
字符串转时间戳
:param timestr: 时间字符串
:param strformat: 转换格式 (%Y-%m-%d %H:%M:%S)
:return: 时间戳 (前10位)
"""
ret = int(time.mktime(time.strptime(timestr, strformat)))
return ret
|
/sawyer-get-time-1.1.0.tar.gz/sawyer-get-time-1.1.0/get_time/__init__.py
| 0.421552 | 0.270757 |
__init__.py
|
pypi
|
import os
import json
import jax
import jax.numpy as jnp
CURDIR = os.path.abspath(os.path.dirname(__file__))
WEIGHTDIR = os.path.join(CURDIR, "weights")
os.makedirs(WEIGHTDIR, exist_ok=True)
def _load_json_weights(filename, default=None):
if not os.path.exists(filename):
return {} if default is None else dict(default)
with open(filename, "r") as file:
weights = {k: jnp.array(v) for k, v in json.load(file).items()}
return weights
neff_norms = _load_json_weights(
filename=os.path.join(WEIGHTDIR, "neff_norms.json"),
default={"x_mean": 0.0, "x_std": 1.0, "y_mean": 0.0, "y_std": 1.0},
)
def save_neff_weights(weights, filename=None):
if filename is None:
L = len(weights) // 2
H = weights["win"].shape[-1] # number of neurons per layer
filename = os.path.join(
WEIGHTDIR, f"neff_weights_L={str(L).zfill(2)}_H={str(H).zfill(3)}.json"
)
with open(filename, "w") as file:
json.dump({k: v.tolist() for k, v in weights.items()}, file)
def load_neff_weights(L, H):
filename = os.path.join(
WEIGHTDIR, f"neff_weights_L={str(L).zfill(2)}_H={str(H).zfill(3)}.json"
)
if not os.path.exists(filename):
raise FileNotFoundError(
f"No weights for neural networks with {L} layers and {H} neurons "
f"per layers found.\nfile not found: '{filename}'"
)
return _load_json_weights(filename)
def load_neff_model(L):
g = globals()
funcname = f"wg_neff_fcnn{str(L).zfill(2)}"
if funcname not in g:
raise NameError(f"neff model '{funcname}' not found")
func = g[funcname]
return func
def generate_random_weights(L, H, key=42):
if isinstance(key, int):
key = jax.random.PRNGKey(key)
[key, *keys] = jax.random.split(key, 1 + 2 * L)
rand = jax.nn.initializers.lecun_normal()
weights = {}
weights["win"] = rand(keys[0], (28, H)) # 28: number of preprocessed dimensions
weights["bin"] = rand(keys[1], (1, H))
for i in range(1, L - 1):
weights[f"w{str(i).zfill(2)}"] = rand(keys[2 * i], (H, H))
weights[f"b{str(i).zfill(2)}"] = rand(keys[2 * i + 1], (1, H))
weights["wout"] = rand(keys[(L - 1) * 2], (H, 1))
weights["bout"] = rand(keys[(L - 1) * 2 + 1], (1, 1))
return weights, key
def wg_preprocess(wl, wg_width, wg_height, wg_sw_angle):
wl, wg_width, wg_height, wg_sw_angle = jnp.broadcast_arrays(wl, wg_width, wg_height, wg_sw_angle)
wg_sw_angle = wg_sw_angle * jnp.pi / 180
wg_sw_sin = jnp.sin(wg_sw_angle)
wg_sw_cos = jnp.cos(wg_sw_angle)
wg_sw_length = wg_height / wg_sw_sin
wg_width_btm = wg_width + 2 * wg_sw_length * wg_sw_cos
x = jnp.stack(
[
wg_sw_angle,
wg_sw_sin,
wg_sw_cos,
wl * 1e6,
wl / wg_width,
wl / wg_width_btm,
wl / wg_height,
wl / wg_sw_length,
wg_width * 1e6,
wg_width / wl,
wg_width / wg_width_btm,
wg_width / wg_height,
wg_width / wg_sw_length,
wg_width_btm * 1e6,
wg_width_btm / wl,
wg_width_btm / wg_width,
wg_width_btm / wg_height,
wg_width_btm / wg_sw_length,
wg_height * 1e6,
wg_height / wl,
wg_height / wg_width,
wg_height / wg_width_btm,
wg_height / wg_sw_length,
wg_sw_length * 1e6,
wg_sw_length / wl,
wg_sw_length / wg_width,
wg_sw_length / wg_width_btm,
wg_sw_length / wg_height,
],
-1,
)
return x
def wg_neff_fcnn02(
weights,
wl,
wg_width,
wg_height,
wg_sw_angle,
):
"""2-layer fully connected neural network predicting wg_strip neff
Args:
weights: the neural network weights dictionary
wl: the wavelength(s)
wg_width: waveguide width
wg_height: waveguide height
wg_sw_angle: waveguide sidewall angle (degrees)
"""
x = wg_preprocess(wl, wg_width, wg_height, wg_sw_angle)
x = (x - neff_norms["x_mean"]) / neff_norms["x_std"]
x = jax.nn.leaky_relu(x @ weights["win"] + weights["bin"])
x = x @ weights["wout"] + weights["bout"]
x = x.ravel() * neff_norms["y_std"] + neff_norms["y_mean"]
return x
|
/wg_straight/wg_straight.py
| 0.683208 | 0.201381 |
wg_straight.py
|
pypi
|
import json
from base64 import urlsafe_b64decode
from datetime import datetime
from enum import Enum
from re import compile
from time import time
from typing import Any, Dict, List, Optional
from pydantic import AnyHttpUrl, AnyUrl, BaseConfig, BaseModel, ConstrainedStr, Extra, Field, root_validator, validator
SIM_STREAMING_URL = "wss://streaming.saxobank.com/sim/openapi/streamingws"
LIVE_STREAMING_URL = "wss://streaming.saxobank.com/openapi/streamingws"
class ClientId(ConstrainedStr):
"""OAuth2.0 ClientId. 32 char string."""
regex = compile(r"^[a-f0-9]{32}$")
class ClientSecret(ClientId):
"""OAuth2.0 CLientSecret. Same as ClientId."""
pass
class HttpsUrl(AnyUrl):
"""HTTPS URL. Override AnyUrl to only allow for secure protocol."""
allowed_schemes = {"https"}
class GrantType(Enum):
"""OAuth grant type. Only supported version is Code."""
CODE = "Code"
class APIEnvironment(Enum):
"""OpenAPI Environment. SIM and LIVE are currently supported."""
SIM = "SIM"
LIVE = "LIVE"
class AuthorizationCode(ConstrainedStr):
"""Auth code. GUID."""
regex = compile(r"^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$")
class RefreshToken(AuthorizationCode):
"""Refresh token. Same as Auth code (GUID)."""
pass
class AuthorizationType(Enum):
"""Supported auth types. Either a auth code or refresh token can be exercised."""
CODE = "authorization_code"
REFRESH_TOKEN = "refresh_token"
class OpenAPIAppConfig(BaseModel):
"""Dataclass for parsing and validating app config objects."""
app_name: str = Field(..., alias="AppName")
grant_type: GrantType = Field(..., alias="GrantType")
client_id: ClientId = Field(..., alias="AppKey")
client_secret: ClientSecret = Field(..., alias="AppSecret")
auth_endpoint: HttpsUrl = Field(..., alias="AuthorizationEndpoint")
token_endpoint: HttpsUrl = Field(..., alias="TokenEndpoint")
api_base_url: HttpsUrl = Field(..., alias="OpenApiBaseUrl")
streaming_url: Optional[HttpsUrl]
redirect_urls: List[AnyHttpUrl] = Field(..., alias="RedirectUrls")
env: Optional[APIEnvironment]
@root_validator
def validate_redirect_urls_contains_localhost(cls, values: Dict) -> Dict:
"""Redirect URLs must at least have 1 localhost available."""
available_hosts = [url.host for url in values["redirect_urls"]]
assert "localhost" in available_hosts, (
"at least 1 'localhost' redirect URL required in app config - " f"hosts: {available_hosts}"
)
return values
@root_validator
def validate_port_configuration_redirect_urls(cls, values: Dict) -> Dict:
"""Port should always be configured for redirect URLs."""
assert all([url.port for url in values["redirect_urls"]]), (
"one or more redirect URLs have no port configured, which is required "
"for grant type 'Code' - ensure a port is configured in the app config "
"object for each URL (example: http://localhost:23432/redirect) - "
f"URLs: {[str(url) for url in values['redirect_urls']]}"
)
return values
@root_validator
def strip_base_url_suffix(cls, values: Dict) -> Dict:
"""Strip forward slash form base URL."""
values["api_base_url"] = values["api_base_url"].rstrip("/")
return values
@root_validator
def derive_env_fields(cls, values: Dict) -> Dict:
"""Set environment and streaming URL based on environment."""
if "sim.logonvalidation" in values["auth_endpoint"]:
values["env"] = APIEnvironment.SIM
values["streaming_url"] = SIM_STREAMING_URL
if "live.logonvalidation" in values["auth_endpoint"]:
values["env"] = APIEnvironment.LIVE
values["streaming_url"] = LIVE_STREAMING_URL
return values
class Config(BaseConfig):
"""No extra config items required."""
extra = Extra.forbid
def __str__(self) -> str:
"""Print app config safely for logging without exposing client secret themselves."""
return str(self.dict(exclude={"client_secret"}))
class TokenData(BaseModel):
"""Dataclass for parsing token data."""
access_token: str
token_type: str
expires_in: int
refresh_token: RefreshToken
refresh_token_expires_in: int
base_uri: Optional[HttpsUrl]
access_token_expiry: Optional[int] = None
refresh_token_expiry: Optional[int] = None
client_key: Optional[str] = None
user_key: Optional[str] = None
session_id: Optional[str] = None
write_permission: Optional[bool] = None
@root_validator(pre=True)
def set_fields_from_token_payload(cls, values: Dict) -> Dict:
"""Set fields from token claims."""
token_bytes = values["access_token"].encode("utf-8")
payload = token_bytes.split(b".")[1]
padded = payload + b"=" * divmod(len(payload), 4)[1]
decoded = urlsafe_b64decode(padded)
claims = json.loads(decoded.decode("utf-8"))
values["access_token_expiry"] = claims["exp"]
values["refresh_token_expiry"] = int(time()) + values["refresh_token_expires_in"]
values["client_key"] = claims["cid"]
values["user_key"] = claims["uid"]
values["session_id"] = claims["sid"]
values["write_permission"] = True if claims["oaa"] == "77770" else False
return values
def __str__(self) -> str:
"""Print token (claims) data safely for logging without exposing tokens themselves."""
return str(self.dict(exclude={"access_token", "refresh_token"}))
class StreamingMessage(BaseModel):
"""Streaming Message."""
msg_id: int
ref_id: str
data: Any
ts: Optional[datetime] = None
@validator("ts", pre=True, always=True)
def set_ts_now(cls, v: datetime) -> datetime:
"""Set datetime automatically for each message."""
return datetime.utcnow()
class NotLoggedInError(Exception):
"""Client is not logged in."""
pass
class TokenExpiredError(Exception):
"""Token has expired and can no longer be used."""
pass
class APIResponseError(Exception):
"""An error occurred while executing the OpenAPI request."""
pass
|
/saxo_apy-0.2.8-py3-none-any.whl/saxo_apy/models.py
| 0.84607 | 0.184125 |
models.py
|
pypi
|
from abc import abstractmethod
from .baseorder import BaseOrder
import saxo_openapi.definitions.orders as OD
from .helper import order_duration_spec
class OnFill(BaseOrder):
"""baseclass for onFill requests."""
ALLOWED_DT = [OD.OrderDurationType.GoodTillCancel,
OD.OrderDurationType.GoodTillDate,
OD.OrderDurationType.DayOrder]
@abstractmethod
def __init__(self,
OrderType,
ManualOrder=False,
OrderDurationType=OD.OrderDurationType.GoodTillCancel,
GTDDate=None):
super(OnFill, self).__init__()
if OrderDurationType not in [OD.OrderDurationType.GoodTillCancel,
OD.OrderDurationType.GoodTillDate,
OD.OrderDurationType.DayOrder]:
raise ValueError("OrderDurationType: {} invalid".format(
OrderDurationType))
self._data.update({"ManualOrder": ManualOrder})
self._data.update({"OrderType": OrderType})
self._data.update({"OrderDuration":
order_duration_spec(OrderDurationType,
self.ALLOWED_DT,
GTDDate)})
class TakeProfitDetails(OnFill):
"""Representation of the specification for a TakeProfitOrder.
It is typically used to specify 'take profit details' for the
'TakeProfitOnFill' parameter of an OrderRequest. From the
details a Limit order will be created with the specified *price*.
The order gets placed when the underlying order gets filled.
The other way to create a TakeProfitOrder is to create it afterwards
on an existing trade. In that case use TakeProfitOrderRequest on
the trade.
"""
def __init__(self,
price,
ManualOrder=False,
OrderDurationType=OD.OrderDurationType.GoodTillCancel,
GTDDate=None):
"""Instantiate TakeProfitDetails.
Parameters
----------
price : float or string (required)
the price to trigger take profit order
OrderDurationType : OrderDurationType (required)
the duration, default is: OrderDurationType.GoodTillCancel
ManualOrder: bool (required)
flag to identify if an order is from an automated origin,
default: False
GTDDate : string or datetime (optional)
GTD-datetime is required if OrderDurationType.GoodTillDate
"""
OrderType = OD.OrderType.Limit
super(TakeProfitDetails, self).__init__(
OrderType=OrderType,
OrderDurationType=OrderDurationType,
ManualOrder=ManualOrder,
GTDDate=GTDDate)
self._data.update({"OrderPrice": price})
class StopLossDetails(OnFill):
"""Representation of the specification for a StopLossOrder.
It is typically used to specify 'stop loss details' for the
'StopLossOnFill' parameter of an OrderRequest. From the
details a Stop order will be created with the specified *price*.
The order gets placed when the underlying order gets filled.
The other way to create a StopLossOrder is to create it afterwards
on an existing trade. In that case use StopLossOrderRequest on
the trade.
"""
def __init__(self,
price,
ManualOrder=False,
OrderDurationType=OD.OrderDurationType.GoodTillCancel,
GTDDate=None):
"""Instantiate StopLossDetails.
Parameters
----------
price : float or string (required)
the price to trigger take profit order
OrderDurationType : OrderDurationType (required)
the duration, default is: OrderDurationType.GoodTillCancel
ManualOrder: bool (required)
flag to identify if an order is from an automated origin,
default: False
GTDDate : string or datetim (optional)
GTD-datetime is required if OrderDurationType.GoodTillDate
"""
OrderType = OD.OrderType.Stop
super(StopLossDetails, self).__init__(
OrderDurationType=OrderDurationType,
ManualOrder=ManualOrder,
OrderType=OrderType,
GTDDate=GTDDate)
self._data.update({"OrderPrice": price})
|
/saxo_openapi-0.6.0.tar.gz/saxo_openapi-0.6.0/saxo_openapi/contrib/orders/onfill.py
| 0.880226 | 0.342654 |
onfill.py
|
pypi
|
from .baseorder import BaseOrder
from .helper import direction_from_amount, order_duration_spec
import saxo_openapi.definitions.orders as OD
from .mixin import OnFillHnd
class LimitOrder(BaseOrder, OnFillHnd):
"""create a LimitOrder.
LimitOrder is used to build the body for a LimitOrder. The body can be
used to pass to the Order endpoint.
"""
# allowed OrderDurationTypes:
ALLOWED_DT = [OD.OrderDurationType.DayOrder,
OD.OrderDurationType.GoodTillDate,
OD.OrderDurationType.GoodTillCancel]
def __init__(self,
Uic,
Amount,
AssetType,
OrderPrice,
ManualOrder=False,
AmountType=OD.AmountType.Quantity,
TakeProfitOnFill=None,
StopLossOnFill=None,
TrailingStopLossOnFill=None,
OrderDurationType=OD.OrderDurationType.DayOrder,
GTDDate=None):
"""
Instantiate a LimitOrder.
Parameters
----------
Uic: int (required)
the Uic of the instrument to trade
Amount: decimal (required)
the number of lots/shares/contracts or a monetary value
if amountType is set to CashAmount
OrderPrice: decimal (required)
the price indicating the limitprice
AssetType: string (required)
the assettype for the Uic
ManualOrder: bool (required)
flag to identify if an order is from an automated origin,
default: False
AmountType: AmountType (optional)
the amountType, defaults to Quantity, see AmountType for
other options
TakeProfitOnFill: TakeProfitDetails instance or dict
the take-profit order specification
StopLosstOnFill: StopLossDetails instance or dict
the stoploss order specification
TrailingStopLosstOnFill: TrailingStopLossDetails instance or dict
the Trailingstoploss order specification
OrderDurationType: string, default DayOrder
the order duration type, check SAXO Bank specs. for details
GTDDate: datetime string (required if order duration is GoodTillDate)
the GTD-datetime
Example
-------
>>> import json
>>> from saxo_openapi import API
>>> import saxo_openapi.endpoints.trading as tr
>>> from saxo_openapi.contrib.orders import LimitOrder
>>>
>>> lo = LimitOrder(Uic=21,
... AssetType=OD.AssetType.FxSpot,
... Amount=10000,
... OrderPrice=1.1025)
>>> print(json.dumps(lo.data, indent=2))
{
"Uic": 21,
"AssetType": "FxSpot",
"Amount": 10000,
"Price": 1.1025,
"BuySell": "Buy",
"OrderType": "Limit",
"ManualOrder": false,
"AmountType": "Quantity",
"OrderDuration": {
"DurationType": "DayOrder"
}
}
>>> # now we have the order specification, create the order request
>>> r = tr.orders.Order(data=lo.data)
>>> # perform the request
>>> rv = client.request(r)
>>> print(rv)
>>> print(json.dumps(rv, indent=4))
{
"OrderId": "76697286"
}
"""
super(LimitOrder, self).__init__()
# by default for a Limit order
da = {
'OrderType': OD.OrderType.Limit,
'AmountType': AmountType,
}
da.update({'OrderDuration': order_duration_spec(OrderDurationType,
self.ALLOWED_DT,
GTDDate)})
# required
self._data.update({"Uic": Uic})
self._data.update({"AssetType": AssetType})
self._data.update({"Amount": abs(Amount)})
self._data.update({"OrderPrice": OrderPrice})
self._data.update({"BuySell": direction_from_amount(Amount)})
self._data.update({"ManualOrder": ManualOrder})
self._data.update(da)
# Handle possible onFill orders via the mixin
self.hndOnFill(TakeProfitOnFill=TakeProfitOnFill,
StopLossOnFill=StopLossOnFill,
TrailingStopLossOnFill=TrailingStopLossOnFill)
@property
def data(self):
"""data property.
return the JSON body.
"""
return super(LimitOrder, self).data
class LimitOrderFxSpot(LimitOrder):
"""LimitOrderFxSpot - LimitOrder for FxSpot only.
The LimitOrderFxSpot lacks the AssetType parameter and only serves
the AssetType FxSpot.
"""
def __init__(self,
Uic,
Amount,
OrderPrice,
ManualOrder=False,
AmountType=OD.AmountType.Quantity,
TakeProfitOnFill=None,
StopLossOnFill=None,
TrailingStopLossOnFill=None,
OrderDurationType=OD.OrderDurationType.DayOrder,
GTDDate=None):
"""
Instantiate a LimitOrderFxSpot.
Parameters
----------
Uic: int (required)
the Uic of the instrument to trade
Amount: decimal (required)
the number of lots/shares/contracts or a monetary value
if amountType is set to CashAmount
OrderPrice: decimal (required)
the price indicating the limitprice
AmountType: AmountType (optional)
the amountType, defaults to Quantity, see AmountType for
other options
ManualOrder: bool (required)
flag to identify if an order is from an automated origin,
default: False
TakeProfitOnFill: TakeProfitDetails instance or dict
the take-profit order specification
StopLosstOnFill: StopLossDetails instance or dict
the stoploss order specification
TrailingStopLosstOnFill: TrailingStopLossDetails instance or dict
the Trailingstoploss order specification
OrderDurationType: string, default DayOrder
the order duration type, check SAXO Bank specs. for details
GTDDate: datetime string (required if order duration is GoodTillDate)
the GTD-datetime
Example
-------
>>> from saxo_openapi import API
>>> from saxo_openapi.contrib.orders import (
... tie_account_to_order,
... LimitOrderFxSpot)
>>> token = "..."
>>> client = API(access_token=token)
>>> order = tie_account_to_order(
... AccountKey,
... LimitOrderFxSpot(Uic=21, Amount=25000, OrderPrice=1.1025))
>>> r = tr.orders.Order(data=order)
>>> rv = client.request(r)
>>> print(json.dumps(rv, indent=2))
{
"OrderId": "76703544"
}
"""
super(LimitOrderFxSpot, self).__init__(
Uic=Uic,
Amount=Amount,
OrderPrice=OrderPrice,
AmountType=AmountType,
ManualOrder=ManualOrder,
AssetType=OD.AssetType.FxSpot,
OrderDurationType=OrderDurationType,
TakeProfitOnFill=TakeProfitOnFill,
StopLossOnFill=StopLossOnFill,
TrailingStopLossOnFill=TrailingStopLossOnFill,
GTDDate=GTDDate)
class LimitOrderStock(LimitOrder):
"""LimitOrderStock - LimitOrder for Stock only.
The LimitOrderStock lacks the AssetType parameter and only serves
the AssetType Stock.
"""
def __init__(self,
Uic,
Amount,
OrderPrice,
AmountType=OD.AmountType.Quantity,
ManualOrder=False,
TakeProfitOnFill=None,
StopLossOnFill=None,
TrailingStopLossOnFill=None,
OrderDurationType=OD.OrderDurationType.DayOrder,
GTDDate=None):
"""
Instantiate a LimitOrderStock.
Parameters
----------
Uic: int (required)
the Uic of the instrument to trade
Amount: decimal (required)
the number of lots/shares/contracts or a monetary value
if amountType is set to CashAmount
OrderPrice: decimal (required)
the price indicating the limitprice
AmountType: AmountType (optional)
the amountType, defaults to Quantity, see AmountType for
other options
ManualOrder: bool (required)
flag to identify if an order is from an automated origin,
default: False
TakeProfitOnFill: TakeProfitDetails instance or dict
the take-profit order specification
StopLosstOnFill: StopLossDetails instance or dict
the stoploss order specification
TrailingStopLosstOnFill: TrailingStopLossDetails instance or dict
the Trailingstoploss order specification
OrderDurationType: string, default DayOrder
the order duration type, check SAXO Bank specs. for details
GTDDate: datetime string (required if order duration is GoodTillDate)
the GTD-datetime
Example
-------
>>> from saxo_openapi import API
>>> from saxo_openapi.contrib.orders import (
... tie_account_to_order,
... LimitOrderStock)
>>> token = "..."
>>> client = API(access_token=token)
>>> order = tie_account_to_order(
... AccountKey,
... LimitOrderStock(Uic=16350, Amount=1000, OrderPrice=28.00))
>>> r = tr.orders.Order(data=order)
>>> rv = client.request(r)
>>> print(json.dumps(rv, indent=2))
{
"OrderId": "76703539"
}
"""
super(LimitOrderStock, self).__init__(
Uic=Uic,
Amount=Amount,
OrderPrice=OrderPrice,
AmountType=AmountType,
ManualOrder=ManualOrder,
AssetType=OD.AssetType.Stock,
OrderDurationType=OrderDurationType,
TakeProfitOnFill=TakeProfitOnFill,
StopLossOnFill=StopLossOnFill,
TrailingStopLossOnFill=TrailingStopLossOnFill,
GTDDate=GTDDate)
|
/saxo_openapi-0.6.0.tar.gz/saxo_openapi-0.6.0/saxo_openapi/contrib/orders/limitorder.py
| 0.863464 | 0.305717 |
limitorder.py
|
pypi
|
from .baseorder import BaseOrder
from .helper import direction_from_amount
import saxo_openapi.definitions.orders as OD
from .mixin import OnFillHnd
class MarketOrder(BaseOrder, OnFillHnd):
"""create a MarketOrder.
MarketOrder is used to build the body for a MarketOrder. The body can be
used to pass to the Order endpoint.
"""
def __init__(self,
Uic,
Amount,
AssetType,
ManualOrder=False,
AmountType=OD.AmountType.Quantity,
TakeProfitOnFill=None,
StopLossOnFill=None,
TrailingStopLossOnFill=None):
"""
Instantiate a MarketOrder.
Parameters
----------
Uic: int (required)
the Uic of the instrument to trade
Amount: decimal (required)
the number of lots/shares/contracts or a monetary value
if amountType is set to CashAmount. A value > 0 means 'buy',
a value < 0 means 'sell'
AssetType: string (required)
the assettype for the Uic
ManualOrder: bool (required)
flag to identify if an order is from an automated origin,
default: False
AmountType: AmountType (optional)
the amountType, defaults to Quantity, see AmountType for
other options
TakeProfitOnFill: TakeProfitDetails instance or dict
the take-profit order specification
StopLosstOnFill: StopLossDetails instance or dict
the stoploss order specification
TrailingStopLosstOnFill: TrailingStopLossDetails instance or dict
the Trailingstoploss order specification
Example
-------
>>> import json
>>> from saxo_openapi import API
>>> import saxo_openapi.endpoints.trading as tr
>>> from saxo_openapi.contrib.orders import MarketOrder
>>> # buy 10k EURUSD (Uic=21)
>>> mo = MarketOrder(Uic=21,
... AssetType=OD.AssetType.FxSpot,
... Amount=10000)
>>> print(json.dumps(mo.data, indent=4))
{
"Uic": 21,
"AssetType": "FxSpot",
"Amount": 10000,
"BuySell": "Buy",
"OrderType": "Market",
"AmountType": "Quantity",
"ManualOrder": False,
"OrderDuration": {
"DurationType": "DayOrder"
}
}
>>> # now we have the order specification, create the order request
>>> r = tr.orders.Order(data=mo.data)
>>> # perform the request
>>> rv = client.request(r)
>>> print(rv)
>>> print(json.dumps(rv, indent=4))
{
"OrderId": "76697286"
}
"""
super(MarketOrder, self).__init__()
# by default for a Market order
da = {
'OrderType': OD.OrderType.Market,
'AmountType': AmountType,
'OrderDuration': { # the default
'DurationType': OD.OrderDurationType.DayOrder
},
}
# required
self._data.update({"Uic": Uic})
self._data.update({"AssetType": AssetType})
self._data.update({"Amount": abs(Amount)})
self._data.update({"BuySell": direction_from_amount(Amount)})
self._data.update({"ManualOrder": ManualOrder})
self._data.update(da)
self.hndOnFill(TakeProfitOnFill=TakeProfitOnFill,
StopLossOnFill=StopLossOnFill,
TrailingStopLossOnFill=TrailingStopLossOnFill)
@property
def data(self):
"""data property.
return the JSON body.
"""
return super(MarketOrder, self).data
class MarketOrderFxSpot(MarketOrder):
"""MarketOrderFxSpot - MarketOrder for FxSpot only.
The MarketOrderFxSpot lacks the AssetType parameter and only serves
the AssetType FxSpot.
"""
def __init__(self,
Uic,
Amount,
ManualOrder=False,
AmountType=OD.AmountType.Quantity,
TakeProfitOnFill=None,
StopLossOnFill=None,
TrailingStopLossOnFill=None):
"""
Instantiate a MarketOrderFxSpot.
Parameters
----------
Uic: int (required)
the Uic of the instrument to trade
Amount: decimal (required)
the number of lots/shares/contracts or a monetary value
if amountType is set to CashAmount
AmountType: AmountType (optional)
the amountType, defaults to Quantity, see AmountType for
other options
ManualOrder: bool (required)
flag to identify if an order is from an automated origin,
default: False
TakeProfitOnFill: TakeProfitDetails instance or dict
the take-profit order specification
StopLosstOnFill: StopLossDetails instance or dict
the stoploss order specification
TrailingStopLosstOnFill: TrailingStopLossDetails instance or dict
the Trailingstoploss order specification
Example
-------
>>> from saxo_openapi import API
>>> from saxo_openapi.contrib.orders import (
... tie_account_to_order,
... MarketOrderFxSpot)
>>> token = "..."
>>> client = API(access_token=token)
>>> order = tie_account_to_order(
... AccountKey,
... MarketOrderFxSpot(Uic=21, Amount=25000))
>>> r = tr.orders.Order(data=req)
>>> rv = client.request(r)
>>> print(json.dumps(rv, indent=2))
{
"OrderId": "76703544"
}
"""
super(MarketOrderFxSpot, self).__init__(
Uic=Uic,
Amount=Amount,
AmountType=AmountType,
ManualOrder=ManualOrder,
AssetType=OD.AssetType.FxSpot,
TakeProfitOnFill=TakeProfitOnFill,
StopLossOnFill=StopLossOnFill,
TrailingStopLossOnFill=TrailingStopLossOnFill)
class MarketOrderStock(MarketOrder):
"""MarketOrderStock - MarketOrder for Stock only.
The MarketOrderStock lacks the AssetType parameter and only serves
the AssetType Stock.
"""
def __init__(self,
Uic,
Amount,
ManualOrder=False,
AmountType=OD.AmountType.Quantity,
TakeProfitOnFill=None,
StopLossOnFill=None,
TrailingStopLossOnFill=None):
"""
Instantiate a MarketOrderStock.
Parameters
----------
Uic: int (required)
the Uic of the instrument to trade
Amount: decimal (required)
the number of lots/shares/contracts or a monetary value
if amountType is set to CashAmount
AmountType: AmountType (optional)
the amountType, defaults to Quantity, see AmountType for
other options
ManualOrder: bool (required)
flag to identify if an order is from an automated origin,
default: False
TakeProfitOnFill: TakeProfitDetails instance or dict
the take-profit order specification
StopLosstOnFill: StopLossDetails instance or dict
the stoploss order specification
TrailingStopLosstOnFill: TrailingStopLossDetails instance or dict
the Trailingstoploss order specification
Example
-------
>>> from saxo_openapi import API
>>> from saxo_openapi.contrib.orders import (
... tie_account_to_order,
... MarketOrderStock)
>>> token = "..."
>>> client = API(access_token=token)
>>> order = tie_account_to_order(
... AccountKey,
... MarketOrderStock(Uic=16350, Amount=1000))
>>> r = tr.orders.Order(data=req)
>>> rv = client.request(r)
>>> print(json.dumps(rv, indent=2))
{
"OrderId": "76703539"
}
"""
super(MarketOrderStock, self).__init__(
Uic=Uic,
Amount=Amount,
AmountType=AmountType,
ManualOrder=ManualOrder,
AssetType=OD.AssetType.Stock,
TakeProfitOnFill=TakeProfitOnFill,
StopLossOnFill=StopLossOnFill,
TrailingStopLossOnFill=TrailingStopLossOnFill)
|
/saxo_openapi-0.6.0.tar.gz/saxo_openapi-0.6.0/saxo_openapi/contrib/orders/marketorder.py
| 0.836788 | 0.295078 |
marketorder.py
|
pypi
|
from datetime import datetime
import saxo_openapi.definitions.orders as OD
def direction_from_amount(Amount):
"""direction_from_amount - determine direction from the sign of the amount.
if Amount > 0 : Buy
if Amount < 0 : Sell
"""
return OD.Direction.Buy if Amount > 0 else OD.Direction.Sell
def direction_invert(direction):
"""direction_invert - Buy becomes Sell, Sell becomes Buy."""
if direction not in [OD.Direction.Buy, OD.Direction.Sell]:
raise ValueError("wrong value for direction: {}".format(direction))
return OD.Direction.Buy if direction == OD.Direction.Sell \
else OD.Direction.Sell
def tie_account_to_order(AccountKey, order):
"""tie_account_to_order - inject the AccountKey in the orderbody.
An order specification is 'anonymous'. To apply it to an account it needs
the AccountKey of the account.
Parameters
----------
AccountKey: string (required)
the accountkey
order: dict representing an orderbody or <...>Order instance
the details of the order.
"""
_r = order.copy() if isinstance(order, dict) else order.data.copy()
# add the key to the orderbody, but ONLY if this is not a positionclose
# body
if "PositionId" not in _r:
_r.update({'AccountKey': AccountKey})
# and add it to related orders in Orders (if any)
if 'Orders' in _r:
for o in _r['Orders']:
o.update({'AccountKey': AccountKey})
return _r
def order_duration_spec(OrderDurationType, allowedDT, GTDDate=None):
"""order_duration_spec - create a SAXO order duration from a date.
This function returns a dict containing the definition of the
duration. In case of an order where the GTDDate is specified the
definition is extended.
Parameters
----------
GTDDate: string or datetime (required if Dur.Type == GTD)
the GTD-datetime
Examples
--------
>>> duration = OD.OrderDurationType.GoodTillDate
>>> d = order_duration_spec(duration, "2017-12-12"))
>>> print(json.dumps(d, indent=2))
{
"DurationType": "GoodTillDate",
"ExpirationDateContainsTime": true,
"ExpirationDateTime": "2017-12-12T00:00"
}
# Or by using datetime ...
>>> d = order_duration_spec(dt, datetime(2017, 12, 12))
>>> print(json.dumps(d, indent=2))
{
"DurationType": "GoodTillDate",
"ExpirationDateContainsTime": true,
"ExpirationDateTime": "2017-12-12T00:00"
}
>>> duration = OD.OrderDurationType.GoodTillCancel
>>> d = order_duration_spec(dt)
>>> print(json.dumps(d, indent=2))
{
"DurationType": "GoodTillCancel"
}
"""
odspec = dict({'DurationType': OrderDurationType})
# allowed OrderDurationTypes:
if OrderDurationType not in allowedDT:
raise ValueError("OrderDurationType: {} is not supported".format(
OrderDurationType))
if OrderDurationType == OD.OrderDurationType.GoodTillDate:
if not GTDDate:
raise ValueError("Missing GTDDate")
_gtdtime = GTDDate
if isinstance(GTDDate, str):
try:
_gtdtime = datetime.strptime(GTDDate, "%Y-%m-%d")
except ValueError:
# a ValueError is raised in case of wrong format
_gtdtime = datetime.strptime(GTDDate, "%Y-%m-%dT%H:%M")
_gtdtime = _gtdtime.strftime("%Y-%m-%dT%H:%M")
odspec.update({'ExpirationDateContainsTime': True,
'ExpirationDateTime': _gtdtime})
return odspec
|
/saxo_openapi-0.6.0.tar.gz/saxo_openapi-0.6.0/saxo_openapi/contrib/orders/helper.py
| 0.77768 | 0.4436 |
helper.py
|
pypi
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.