hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6fcc2a2fdacd2a22f891e4c2ee4f20a75d5a6130 | 1,398 | py | Python | sa/profiles/Cisco/SCOS/profile.py | prorevizor/noc | 37e44b8afc64318b10699c06a1138eee9e7d6a4e | [
"BSD-3-Clause"
]
| 84 | 2017-10-22T11:01:39.000Z | 2022-02-27T03:43:48.000Z | sa/profiles/Cisco/SCOS/profile.py | prorevizor/noc | 37e44b8afc64318b10699c06a1138eee9e7d6a4e | [
"BSD-3-Clause"
]
| 22 | 2017-12-11T07:21:56.000Z | 2021-09-23T02:53:50.000Z | sa/profiles/Cisco/SCOS/profile.py | prorevizor/noc | 37e44b8afc64318b10699c06a1138eee9e7d6a4e | [
"BSD-3-Clause"
]
| 23 | 2017-12-06T06:59:52.000Z | 2022-02-24T00:02:25.000Z | # ---------------------------------------------------------------------
# Vendor: Cisco
# OS: SCOS
# ---------------------------------------------------------------------
# Copyright (C) 2007-2012 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# NOC modules
from noc.core.profile.base import BaseProfile
class Profile(BaseProfile):
name = "Cisco.SCOS"
pattern_more = [(r"--More--", " "), (r"\?\s*\[confirm\]", "\n")]
pattern_unprivileged_prompt = r"^\S+?>"
pattern_syntax_error = r"% invalid input |% Ambiguous command:|% Incomplete command."
# command_disable_pager = "terminal length 0"
command_super = "enable"
command_enter_config = "configure"
command_leave_config = "exit"
command_save_config = "copy running-config startup-config\n"
pattern_prompt = r"^(?P<hostname>[a-zA-Z0-9]\S*?)(?:-\d+)?(?:\(config[^\)]*\))?#"
requires_netmask_conversion = True
convert_mac = BaseProfile.convert_mac_to_cisco
def convert_interface_name(self, interface):
if interface.startswith("Fast"):
return "Fa " + interface[12:].strip()
elif interface.startswith("Giga"):
return "Gi " + interface[15:].strip()
elif interface.startswith("Ten"):
return "Te " + interface[18:].strip()
else:
return interface
| 38.833333 | 89 | 0.537196 | 1,019 | 0.728898 | 0 | 0 | 0 | 0 | 0 | 0 | 648 | 0.463519 |
6fccd64c6d5968278f8f06dd0c3bc69ffe2d9072 | 4,825 | py | Python | airflow/providers/amazon/aws/example_dags/example_hive_to_dynamodb.py | npodewitz/airflow | 511ea702d5f732582d018dad79754b54d5e53f9d | [
"Apache-2.0"
]
| 8,092 | 2016-04-27T20:32:29.000Z | 2019-01-05T07:39:33.000Z | airflow/providers/amazon/aws/example_dags/example_hive_to_dynamodb.py | npodewitz/airflow | 511ea702d5f732582d018dad79754b54d5e53f9d | [
"Apache-2.0"
]
| 2,961 | 2016-05-05T07:16:16.000Z | 2019-01-05T08:47:59.000Z | airflow/providers/amazon/aws/example_dags/example_hive_to_dynamodb.py | npodewitz/airflow | 511ea702d5f732582d018dad79754b54d5e53f9d | [
"Apache-2.0"
]
| 3,546 | 2016-05-04T20:33:16.000Z | 2019-01-05T05:14:26.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This DAG will not work unless you create an Amazon EMR cluster running
Apache Hive and copy data into it following steps 1-4 (inclusive) here:
https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/EMRforDynamoDB.Tutorial.html
"""
import os
from datetime import datetime
from airflow import DAG
from airflow.decorators import task
from airflow.models import Connection
from airflow.providers.amazon.aws.hooks.dynamodb import DynamoDBHook
from airflow.providers.amazon.aws.transfers.hive_to_dynamodb import HiveToDynamoDBOperator
from airflow.utils import db
DYNAMODB_TABLE_NAME = 'example_hive_to_dynamodb_table'
HIVE_CONNECTION_ID = os.getenv('HIVE_CONNECTION_ID', 'hive_on_emr')
HIVE_HOSTNAME = os.getenv('HIVE_HOSTNAME', 'ec2-123-45-67-890.compute-1.amazonaws.com')
# These values assume you set up the Hive data source following the link above.
DYNAMODB_TABLE_HASH_KEY = 'feature_id'
HIVE_SQL = 'SELECT feature_id, feature_name, feature_class, state_alpha FROM hive_features'
@task
def create_dynamodb_table():
client = DynamoDBHook(client_type='dynamodb').conn
client.create_table(
TableName=DYNAMODB_TABLE_NAME,
KeySchema=[
{'AttributeName': DYNAMODB_TABLE_HASH_KEY, 'KeyType': 'HASH'},
],
AttributeDefinitions=[
{'AttributeName': DYNAMODB_TABLE_HASH_KEY, 'AttributeType': 'N'},
],
ProvisionedThroughput={'ReadCapacityUnits': 20, 'WriteCapacityUnits': 20},
)
# DynamoDB table creation is nearly, but not quite, instantaneous.
# Wait for the table to be active to avoid race conditions writing to it.
waiter = client.get_waiter('table_exists')
waiter.wait(TableName=DYNAMODB_TABLE_NAME, WaiterConfig={'Delay': 1})
@task
def get_dynamodb_item_count():
"""
A DynamoDB table has an ItemCount value, but it is only updated every six hours.
To verify this DAG worked, we will scan the table and count the items manually.
"""
table = DynamoDBHook(resource_type='dynamodb').conn.Table(DYNAMODB_TABLE_NAME)
response = table.scan(Select='COUNT')
item_count = response['Count']
while 'LastEvaluatedKey' in response:
response = table.scan(Select='COUNT', ExclusiveStartKey=response['LastEvaluatedKey'])
item_count += response['Count']
print(f'DynamoDB table contains {item_count} items.')
# Included for sample purposes only; in production you wouldn't delete
# the table you just backed your data up to. Using 'all_done' so even
# if an intermediate step fails, the DAG will clean up after itself.
@task(trigger_rule='all_done')
def delete_dynamodb_table():
DynamoDBHook(client_type='dynamodb').conn.delete_table(TableName=DYNAMODB_TABLE_NAME)
# Included for sample purposes only; in production this should
# be configured in the environment and not be part of the DAG.
# Note: The 'hiveserver2_default' connection will not work if Hive
# is hosted on EMR. You must set the host name of the connection
# to match your EMR cluster's hostname.
@task
def configure_hive_connection():
db.merge_conn(
Connection(
conn_id=HIVE_CONNECTION_ID,
conn_type='hiveserver2',
host=HIVE_HOSTNAME,
port=10000,
)
)
with DAG(
dag_id='example_hive_to_dynamodb',
schedule_interval=None,
start_date=datetime(2021, 1, 1),
tags=['example'],
catchup=False,
) as dag:
# Add the prerequisites docstring to the DAG in the UI.
dag.doc_md = __doc__
# [START howto_transfer_hive_to_dynamodb]
backup_to_dynamodb = HiveToDynamoDBOperator(
task_id='backup_to_dynamodb',
hiveserver2_conn_id=HIVE_CONNECTION_ID,
sql=HIVE_SQL,
table_name=DYNAMODB_TABLE_NAME,
table_keys=[DYNAMODB_TABLE_HASH_KEY],
)
# [END howto_transfer_hive_to_dynamodb]
(
configure_hive_connection()
>> create_dynamodb_table()
>> backup_to_dynamodb
>> get_dynamodb_item_count()
>> delete_dynamodb_table()
)
| 36.278195 | 96 | 0.732228 | 0 | 0 | 0 | 0 | 1,732 | 0.358964 | 0 | 0 | 2,613 | 0.541554 |
6fcd19005d9f0c8dc04e8be2bcd4a496b3ee5923 | 74 | py | Python | tle_download/__init__.py | cognitive-space/tle-download | 86e9859eed1e87bf93f33471a665ad2567ebccca | [
"MIT"
]
| null | null | null | tle_download/__init__.py | cognitive-space/tle-download | 86e9859eed1e87bf93f33471a665ad2567ebccca | [
"MIT"
]
| null | null | null | tle_download/__init__.py | cognitive-space/tle-download | 86e9859eed1e87bf93f33471a665ad2567ebccca | [
"MIT"
]
| null | null | null | __version__ = "0.1.0"
from tle_download.main import get_tles, write_tles
| 18.5 | 50 | 0.783784 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 | 0.094595 |
6fcf8180fcc5543d1eb052a5cecb72e8028c0a9e | 4,125 | py | Python | bmi203_hw3/methods.py | cjmathy/bmi203_hw3 | 7d785e12be048a2870b90d704f18d2391f210aec | [
"Apache-2.0"
]
| null | null | null | bmi203_hw3/methods.py | cjmathy/bmi203_hw3 | 7d785e12be048a2870b90d704f18d2391f210aec | [
"Apache-2.0"
]
| null | null | null | bmi203_hw3/methods.py | cjmathy/bmi203_hw3 | 7d785e12be048a2870b90d704f18d2391f210aec | [
"Apache-2.0"
]
| null | null | null | import numpy as np
import random
def align(pair,sequences,scoring_matrix,penalties,aa_dict):
seq1, seq2 = sequences[pair[0]], sequences[pair[1]]
#Create matrix F (containing maximum scores up to cell F(i,j)) and tracking matrix P (containing the tracking flag describing whether the step into the cell was a match or a gap, or if the cell is the end of a local alignment.
F = np.zeros((len(seq1),len(seq2)))
P = np.zeros((len(seq1),len(seq2)))
F, P = fill_scoring_matrix(F,P,seq1,seq2,scoring_matrix,aa_dict,penalties)
return traceback(F,P,seq1,seq2)
def fill_scoring_matrix(F,P,seq1,seq2,scoring_matrix,aa_dict,penalties):
d,e = penalties
it = np.nditer(F, flags=['multi_index'])
while not it.finished:
i,j = it.multi_index
aa1_index = aa_dict[seq1[i]]
aa2_index = aa_dict[seq2[j]]
#Set penalties: opening a gap has penalty d, extending a gap has penalty e, and we do not allow a gap to be opened introduced in one sequence after a gap has occured in the other (because this would effectively allow for jumps from local alignment to local alignment).
#If the cell directly above was arrived at by introducing a gap in seq1, then do not allow a gap in seq2
if P[i-1,j] == 1:
gap_in_seq1,gap_in_seq2 = e,float("inf")
#If the cell directly to the left was arrived at by introducing a gap in seq2, then do not allow a gap in seq1
elif P[i,j-1] == -1:
gap_in_seq1,gap_in_seq2 = float("inf"),e
else:
gap_in_seq1,gap_in_seq2 = d,d
possible_scores = [0,
F[i-1,j-1]+scoring_matrix[aa1_index][aa2_index],
F[i,j-1]-gap_in_seq1,
F[i-1,j]-gap_in_seq2
]
#Find maximum of possible_scores to decide on move
F[i,j] = max(possible_scores)
if F[i,j] == possible_scores[1]: P[i,j] = 0 #If match, set tracking flag to 0
elif F[i,j] == possible_scores[2]: P[i,j] = 1 #If gap in seq1, set tracking flag to 1
else : P[i,j] = -1 #If gap in seq2 (or if max was 0, and alignment ended), set tracking flag to -1
it.iternext()
F[:,0] = 0
F[0,:] = 0
return F,P
def traceback(F,P,seq1,seq2):
#Find max value
max_index = F.argmax()
i,j = np.unravel_index(max_index, F.shape)
score = F[i,j]
alignment = ('','')
while F[i,j] > 0 and i is not 0 and j is not 0:
if P[i,j] == 0: #There must have been a match of the amino acid at seq1[i] and seq2[j]
alignment = seq1[i]+alignment[0],seq2[j]+alignment[1]
i -= 1
j -= 1
elif P[i,j] == 1: #There must have been a gap in seq1
alignment = "_"+alignment[0],seq2[j]+alignment[1]
j -= 1
elif P[i,j] == -1: #There must have been a gap in seq2
alignment = seq1[i]+alignment[0],"_"+alignment[1]
i -= 1
return alignment, score
def get_fp_rate(pos_scores,neg_scores):
#We want to set our threshold as the value of the element in the pos_scores list that is greater than or equal to exactly 30% of the scores in the list. Thus, we sort the pos_scores, and take the value of the 15th element out of the 50 in the list to be the threshold.
pos_scores.sort()
threshold = pos_scores[14]
fp = 0
for score in neg_scores:
if score > threshold:
fp+=1
fp_rate = fp/50.0
return threshold, fp_rate
def get_tp_rate(pos_scores,neg_scores,fpr):
neg_scores.sort()
index = int(50*(1-fpr))-1
threshold = neg_scores[index]
tp = 0
for score in pos_scores:
if score > threshold:
tp+=1
tp_rate = tp/50.0
return tp_rate
def calc_score(alignments,scoremat,penalties,aa_dict):
d,e = penalties
scores = []
for alignment in alignments:
score = 0
for index in range(len(alignment[0])):
score_flag = False
aa1 = alignment[0][index]
aa2 = alignment[1][index]
if aa1 == '_':
if ext_flag == True:
score -= e
score_flag = True
else:
score -= d
ext_flag = True
score_flag = True
else: i = aa_dict[aa1]
if aa2 == '_':
if ext_flag == True:
score -= e
score_flag = True
else:
score -= d
ext_flag = True
score_flag = True
else: j = aa_dict[aa2]
if (score_flag == False):
score += scoremat[i,j]
ext_flag = False
scores.append(score)
return scores
| 28.061224 | 270 | 0.667394 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,371 | 0.332364 |
6fcfd483955455d5e0edc10b34fa44d33f3eefa6 | 4,346 | py | Python | custom_components/netatmo/select.py | mauriziosacca/netatmo_custom | 149a211d7cd6b87db012c5dabd12f34db302f066 | [
"MIT"
]
| 5 | 2020-08-07T11:35:49.000Z | 2020-09-19T03:27:47.000Z | custom_components/netatmo/select.py | mauriziosacca/netatmo_custom | 149a211d7cd6b87db012c5dabd12f34db302f066 | [
"MIT"
]
| 4 | 2020-06-14T06:11:05.000Z | 2020-07-22T10:15:39.000Z | custom_components/netatmo/select.py | mauriziosacca/netatmo_custom | 149a211d7cd6b87db012c5dabd12f34db302f066 | [
"MIT"
]
| 2 | 2020-06-13T23:04:41.000Z | 2020-07-05T14:13:49.000Z | """Support for the Netatmo climate schedule selector."""
from __future__ import annotations
import logging
from homeassistant.components.select import SelectEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import (
CONF_URL_ENERGY,
DATA_SCHEDULES,
DOMAIN,
EVENT_TYPE_SCHEDULE,
MANUFACTURER,
NETATMO_CREATE_SELECT,
)
from .data_handler import HOME, SIGNAL_NAME, NetatmoHome
from .netatmo_entity_base import NetatmoBase
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up the Netatmo energy platform schedule selector."""
print("Setup of select platform")
@callback
def _create_entity(netatmo_home: NetatmoHome) -> None:
entity = NetatmoScheduleSelect(netatmo_home)
_LOGGER.debug("Adding climate select %s", entity)
async_add_entities([entity])
entry.async_on_unload(
async_dispatcher_connect(hass, NETATMO_CREATE_SELECT, _create_entity)
)
class NetatmoScheduleSelect(NetatmoBase, SelectEntity):
"""Representation a Netatmo thermostat schedule selector."""
def __init__(
self,
netatmo_home: NetatmoHome,
) -> None:
"""Initialize the select entity."""
print("NetatmoScheduleSelect", netatmo_home.signal_name)
SelectEntity.__init__(self)
super().__init__(netatmo_home.data_handler)
self._home = netatmo_home.home
self._home_id = self._home.entity_id
self._signal_name = netatmo_home.signal_name
self._publishers.extend(
[
{
"name": HOME,
"home_id": self._home.entity_id,
SIGNAL_NAME: self._signal_name,
},
]
)
self._device_name = self._home.name
self._attr_name = f"{MANUFACTURER} {self._device_name}"
self._model: str = "NATherm1"
self._config_url = CONF_URL_ENERGY
self._attr_unique_id = f"{self._home_id}-schedule-select"
self._attr_current_option = getattr(self._home.get_selected_schedule(), "name")
self._attr_options = [
schedule.name for schedule in self._home.schedules.values()
]
async def async_added_to_hass(self) -> None:
"""Entity created."""
await super().async_added_to_hass()
self.data_handler.config_entry.async_on_unload(
async_dispatcher_connect(
self.hass,
f"signal-{DOMAIN}-webhook-{EVENT_TYPE_SCHEDULE}",
self.handle_event,
)
)
@callback
def handle_event(self, event: dict) -> None:
"""Handle webhook events."""
data = event["data"]
if self._home_id != data["home_id"]:
return
if data["event_type"] == EVENT_TYPE_SCHEDULE and "schedule_id" in data:
self._attr_current_option = getattr(
self.hass.data[DOMAIN][DATA_SCHEDULES][self._home_id].get(
data["schedule_id"]
),
"name",
)
self.async_write_ha_state()
async def async_select_option(self, option: str) -> None:
"""Change the selected option."""
for sid, schedule in self.hass.data[DOMAIN][DATA_SCHEDULES][
self._home_id
].items():
if schedule.name != option:
continue
_LOGGER.debug(
"Setting %s schedule to %s (%s)",
self._home_id,
option,
sid,
)
await self._home.async_switch_schedule(schedule_id=sid)
break
@callback
def async_update_callback(self) -> None:
"""Update the entity's state."""
self._attr_current_option = getattr(self._home.get_selected_schedule(), "name")
self.hass.data[DOMAIN][DATA_SCHEDULES][self._home_id] = self._home.schedules
self._attr_options = [
schedule.name for schedule in self._home.schedules.values()
]
| 32.192593 | 87 | 0.631615 | 3,078 | 0.708237 | 0 | 0 | 1,128 | 0.259549 | 1,456 | 0.335021 | 646 | 0.148642 |
6fcfd615a77be7f31719c843dfcd485b0a7a9fe7 | 349 | py | Python | Olympiad Solutions/URI/1943.py | Ashwanigupta9125/code-DS-ALGO | 49f6cf7d0c682da669db23619aef3f80697b352b | [
"MIT"
]
| 36 | 2019-12-27T08:23:08.000Z | 2022-01-24T20:35:47.000Z | Olympiad Solutions/URI/1943.py | Ashwanigupta9125/code-DS-ALGO | 49f6cf7d0c682da669db23619aef3f80697b352b | [
"MIT"
]
| 10 | 2019-11-13T02:55:18.000Z | 2021-10-13T23:28:09.000Z | Olympiad Solutions/URI/1943.py | Ashwanigupta9125/code-DS-ALGO | 49f6cf7d0c682da669db23619aef3f80697b352b | [
"MIT"
]
| 53 | 2020-08-15T11:08:40.000Z | 2021-10-09T15:51:38.000Z | # Ivan Carvalho
# Solution to https://www.urionlinejudge.com.br/judge/problems/view/1943
#!/usr/bin/env python2.7
# encoding : utf-8
e = int(raw_input())
if e == 1:
print "Top 1"
elif e <= 3:
print "Top 3"
elif e <= 5:
print "Top 5"
elif e <= 10:
print "Top 10"
elif e <= 25:
print "Top 25"
elif e <= 50:
print "Top 50"
else:
print "Top 100"
| 17.45 | 72 | 0.633238 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 183 | 0.524355 |
6fd077e82e7625e41bd526705fa44002dd980b86 | 21,889 | py | Python | skbot/ignition/sdformat/bindings/v15/physics.py | FirefoxMetzger/ropy | c1bcebda223f3af0b6d35e3f4c26d8fd9d26577a | [
"Apache-2.0"
]
| 6 | 2021-03-24T05:54:45.000Z | 2021-07-20T21:03:21.000Z | skbot/ignition/sdformat/bindings/v15/physics.py | FirefoxMetzger/scikit-bot | ee6f1d3451a3c61a6fa122cc42efc4dd67afc9c9 | [
"Apache-2.0"
]
| 31 | 2021-08-12T08:12:58.000Z | 2022-03-21T23:16:36.000Z | skbot/ignition/sdformat/bindings/v15/physics.py | FirefoxMetzger/scikit-bot | ee6f1d3451a3c61a6fa122cc42efc4dd67afc9c9 | [
"Apache-2.0"
]
| 1 | 2021-07-20T20:13:49.000Z | 2021-07-20T20:13:49.000Z | from dataclasses import dataclass, field
from typing import Optional
__NAMESPACE__ = "sdformat/v1.5/physics.xsd"
@dataclass
class Physics:
"""
The physics tag specifies the type and properties of the dynamics engine.
Parameters
----------
max_step_size: Maximum time step size at which every system in
simulation can interact with the states of the world. (was
physics.sdf's dt).
real_time_factor: target simulation speedup factor, defined by ratio
of simulation time to real-time.
real_time_update_rate: Rate at which to update the physics engine
(UpdatePhysics calls per real-time second). (was physics.sdf's
update_rate).
max_contacts: Maximum number of contacts allowed between two
entities. This value can be over ridden by a max_contacts
element in a collision element.
gravity: The gravity vector in m/s^2, expressed in a coordinate
frame defined by the spherical_coordinates tag.
magnetic_field: The magnetic vector in Tesla, expressed in a
coordinate frame defined by the spherical_coordinates tag.
simbody: Simbody specific physics properties
bullet: Bullet specific physics properties
ode: ODE specific physics properties
name: The name of this set of physics parameters.
default: If true, this physics element is set as the default physics
profile for the world. If multiple default physics elements
exist, the first element marked as default is chosen. If no
default physics element exists, the first physics element is
chosen.
type: The type of the dynamics engine. Current options are ode,
bullet, simbody and rtql8. Defaults to ode if left unspecified.
"""
class Meta:
name = "physics"
max_step_size: float = field(
default=0.001,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
real_time_factor: float = field(
default=1.0,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
real_time_update_rate: float = field(
default=1000.0,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
max_contacts: int = field(
default=20,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
gravity: str = field(
default="0 0 -9.8",
metadata={
"type": "Element",
"namespace": "",
"required": True,
"pattern": r"(\s*(-|\+)?(\d+(\.\d*)?|\.\d+|\d+\.\d+[eE][-\+]?[0-9]+)\s+){2}((-|\+)?(\d+(\.\d*)?|\.\d+|\d+\.\d+[eE][-\+]?[0-9]+))\s*",
},
)
magnetic_field: str = field(
default="5.5645e-6 22.8758e-6 -42.3884e-6",
metadata={
"type": "Element",
"namespace": "",
"required": True,
"pattern": r"(\s*(-|\+)?(\d+(\.\d*)?|\.\d+|\d+\.\d+[eE][-\+]?[0-9]+)\s+){2}((-|\+)?(\d+(\.\d*)?|\.\d+|\d+\.\d+[eE][-\+]?[0-9]+))\s*",
},
)
simbody: Optional["Physics.Simbody"] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
},
)
bullet: Optional["Physics.Bullet"] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
},
)
ode: Optional["Physics.Ode"] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
},
)
name: str = field(
default="default_physics",
metadata={
"type": "Attribute",
},
)
default: bool = field(
default=False,
metadata={
"type": "Attribute",
},
)
type: Optional[str] = field(
default=None,
metadata={
"type": "Attribute",
"required": True,
},
)
@dataclass
class Simbody:
"""
Simbody specific physics properties.
Parameters
----------
min_step_size: (Currently not used in simbody) The time duration
which advances with each iteration of the dynamics engine,
this has to be no bigger than max_step_size under physics
block. If left unspecified, min_step_size defaults to
max_step_size.
accuracy: Roughly the relative error of the system.
-LOG(accuracy) is roughly the number of significant digits.
max_transient_velocity: Tolerable "slip" velocity allowed by the
solver when static friction is supposed to hold
object in place.
contact: Relationship among dissipation, coef. restitution, etc.
d = dissipation coefficient (1/velocity) vc =
capture velocity (velocity where e=e_max) vp =
plastic velocity (smallest v where e=e_min) &gt; vc
Assume real COR=1 when v=0. e_min = given minimum
COR, at v &gt;= vp (a.k.a. plastic_coef_restitution)
d = slope = (1-e_min)/vp OR, e_min = 1 - d*vp
e_max = maximum COR = 1-d*vc, reached at v=vc e = 0,
v &lt;= vc = 1 - d*v, vc
&lt; v &lt; vp = e_min,
v &gt;= vp dissipation factor = d*min(v,vp)
[compliant] cor = e
[rigid] Combining rule e = 0,
e1==e2==0 = 2*e1*e2/(e1+e2),
otherwise
"""
min_step_size: float = field(
default=0.0001,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
accuracy: float = field(
default=0.001,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
max_transient_velocity: float = field(
default=0.01,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
contact: Optional["Physics.Simbody.Contact"] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
},
)
@dataclass
class Contact:
"""Relationship among dissipation, coef.
restitution, etc. d = dissipation coefficient (1/velocity) vc = capture velocity (velocity where e=e_max) vp = plastic velocity (smallest v where e=e_min) &gt; vc Assume real COR=1 when v=0. e_min = given minimum COR, at v &gt;= vp (a.k.a. plastic_coef_restitution) d = slope = (1-e_min)/vp OR, e_min = 1 - d*vp e_max = maximum COR = 1-d*vc, reached at v=vc e = 0, v &lt;= vc = 1 - d*v, vc &lt; v &lt; vp = e_min, v &gt;= vp dissipation factor = d*min(v,vp) [compliant] cor = e [rigid] Combining rule e = 0, e1==e2==0 = 2*e1*e2/(e1+e2), otherwise
Parameters
----------
stiffness: Default contact material stiffness
(force/dist or torque/radian).
dissipation: dissipation coefficient to be used in compliant
contact; if not given it is
(1-min_cor)/plastic_impact_velocity
plastic_coef_restitution: this is the COR to be used at high
velocities for rigid impacts; if not given it is 1 -
dissipation*plastic_impact_velocity
plastic_impact_velocity: smallest impact velocity at which
min COR is reached; set to zero if you want the
min COR always to be used
static_friction: static friction (mu_s) as described by this
plot:
http://gazebosim.org/wiki/File:Stribeck_friction.png
dynamic_friction: dynamic friction (mu_d) as described by
this plot:
http://gazebosim.org/wiki/File:Stribeck_friction.png
viscous_friction: viscous friction (mu_v) with units of
(1/velocity) as described by this plot:
http://gazebosim.org/wiki/File:Stribeck_friction.png
override_impact_capture_velocity: for rigid impacts only,
impact velocity at which COR is set to zero;
normally inherited from global default but can
be overridden here. Combining rule: use larger velocity
override_stiction_transition_velocity: This is the largest
slip velocity at which we'll consider a
transition to stiction. Normally inherited
from a global default setting. For a continuous friction
model this is the velocity at which the max
static friction force is reached. Combining
rule: use larger velocity
"""
stiffness: float = field(
default=100000000.0,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
dissipation: float = field(
default=100.0,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
plastic_coef_restitution: float = field(
default=0.5,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
plastic_impact_velocity: float = field(
default=0.5,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
static_friction: float = field(
default=0.9,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
dynamic_friction: float = field(
default=0.9,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
viscous_friction: float = field(
default=0.0,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
override_impact_capture_velocity: float = field(
default=0.001,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
override_stiction_transition_velocity: float = field(
default=0.001,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
@dataclass
class Bullet:
"""
Bullet specific physics properties.
Parameters
----------
solver:
constraints: Bullet constraint parameters.
"""
solver: Optional["Physics.Bullet.Solver"] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
constraints: Optional["Physics.Bullet.Constraints"] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
@dataclass
class Solver:
"""
Parameters
----------
type: One of the following types: sequential_impulse only.
min_step_size: The time duration which advances with each
iteration of the dynamics engine, this has to be no
bigger than max_step_size under physics block. If left
unspecified, min_step_size defaults to max_step_size.
iters: Number of iterations for each step. A higher number
produces greater accuracy at a performance cost.
sor: Set the successive over-relaxation parameter.
"""
type: str = field(
default="sequential_impulse",
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
min_step_size: float = field(
default=0.0001,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
iters: int = field(
default=50,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
sor: float = field(
default=1.3,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
@dataclass
class Constraints:
"""
Bullet constraint parameters.
Parameters
----------
cfm: Constraint force mixing parameter. See the ODE page for
more information.
erp: Error reduction parameter. See the ODE page for more
information.
contact_surface_layer: The depth of the surface layer around
all geometry objects. Contacts are allowed to sink into
the surface layer up to the given depth before coming to
rest. The default value is zero. Increasing this to some
small value (e.g. 0.001) can help prevent jittering
problems due to contacts being repeatedly made and
broken.
split_impulse: Similar to ODE's max_vel implementation. See
http://web.archive.org/web/20120430155635/http://bulletphysics.org/mediawiki-1.5.8/index.php/BtContactSolverInfo#Split_Impulse
for more information.
split_impulse_penetration_threshold: Similar to ODE's
max_vel implementation. See
http://web.archive.org/web/20120430155635/http://bulletphysics.org/mediawiki-1.5.8/index.php/BtContactSolverInfo#Split_Impulse
for more information.
"""
cfm: float = field(
default=0.0,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
erp: float = field(
default=0.2,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
contact_surface_layer: float = field(
default=0.001,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
split_impulse: bool = field(
default=True,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
split_impulse_penetration_threshold: float = field(
default=-0.01,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
@dataclass
class Ode:
"""
ODE specific physics properties.
Parameters
----------
solver:
constraints: ODE constraint parameters.
"""
solver: Optional["Physics.Ode.Solver"] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
constraints: Optional["Physics.Ode.Constraints"] = field(
default=None,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
@dataclass
class Solver:
"""
Parameters
----------
type: One of the following types: world, quick
min_step_size: The time duration which advances with each
iteration of the dynamics engine, this has to be no
bigger than max_step_size under physics block. If left
unspecified, min_step_size defaults to max_step_size.
iters: Number of iterations for each step. A higher number
produces greater accuracy at a performance cost.
precon_iters: Experimental parameter.
sor: Set the successive over-relaxation parameter.
use_dynamic_moi_rescaling: Flag to enable dynamic rescaling
of moment of inertia in constrained directions.
See gazebo pull request 1114 for the implementation of
this feature. https://osrf-
migration.github.io/gazebo-gh-pages/#!/osrf/gazebo/pull-
request/1114
"""
type: str = field(
default="quick",
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
min_step_size: float = field(
default=0.0001,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
iters: int = field(
default=50,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
precon_iters: int = field(
default=0,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
sor: float = field(
default=1.3,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
use_dynamic_moi_rescaling: bool = field(
default=False,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
@dataclass
class Constraints:
"""
ODE constraint parameters.
Parameters
----------
cfm: Constraint force mixing parameter. See the ODE page for
more information.
erp: Error reduction parameter. See the ODE page for more
information.
contact_max_correcting_vel: The maximum correcting
velocities allowed when resolving contacts.
contact_surface_layer: The depth of the surface layer around
all geometry objects. Contacts are allowed to sink into
the surface layer up to the given depth before coming to
rest. The default value is zero. Increasing this to some
small value (e.g. 0.001) can help prevent jittering
problems due to contacts being repeatedly made and
broken.
"""
cfm: float = field(
default=0.0,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
erp: float = field(
default=0.2,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
contact_max_correcting_vel: float = field(
default=100.0,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
contact_surface_layer: float = field(
default=0.001,
metadata={
"type": "Element",
"namespace": "",
"required": True,
},
)
| 35.707993 | 822 | 0.457673 | 21,761 | 0.994152 | 0 | 0 | 21,772 | 0.994655 | 0 | 0 | 12,510 | 0.57152 |
6fd0b30bf860cf4cb13764228ffc837786f5279e | 4,826 | py | Python | lib/surface/help.py | bshaffer/google-cloud-sdk | f587382fd112f238c0d6d5ca3dab8f52d2b5c5f9 | [
"Apache-2.0"
]
| null | null | null | lib/surface/help.py | bshaffer/google-cloud-sdk | f587382fd112f238c0d6d5ca3dab8f52d2b5c5f9 | [
"Apache-2.0"
]
| null | null | null | lib/surface/help.py | bshaffer/google-cloud-sdk | f587382fd112f238c0d6d5ca3dab8f52d2b5c5f9 | [
"Apache-2.0"
]
| null | null | null | # -*- coding: utf-8 -*- #
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A calliope command that prints help for another calliope command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import argparse
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.help_search import search
from googlecloudsdk.command_lib.help_search import search_util
from googlecloudsdk.core import log
_DEFAULT_LIMIT = 5
class Help(base.ListCommand):
"""Search gcloud help text.
If a full gcloud command is specified after the ``help'' operand, {command}
prints a detailed help message for that command.
Otherwise, {command} runs a search for all commands with help text matching
the given argument or arguments. It prints the command name and a summary of
the help text for any command that it finds as a result.
To run a search directly, you can use remainder arguments, following a `--`.
By default, command results are displayed in a table that shows the name
of the command and snippets of the help text that relate to your search terms.
By default, search results are sorted from most to least relevant by default,
using a localized rating based on several heuristics. These heuristics may
change in future runs of this command.
## EXAMPLES
To get the help for the command `gcloud projects describe`, run:
$ {command} projects describe
To search for all commands whose help text contains the word `project`, run:
$ {command} -- project
To search for commands whose help text contains the word `project` and the
string `--foo`, run:
$ {command} -- project --foo
To search and receive more than the default limit of 5 search results, run:
$ {command} --limit=20 -- project
To search for a term and sort the results by a different characteristic, such
as command name, run:
$ {command} --sort-by=name -- project
"""
category = base.GCLOUD_SDK_TOOLS_CATEGORY
@staticmethod
def Args(parser):
parser.display_info.AddTransforms(search_util.GetTransforms())
parser.display_info.AddFormat("""
table[all-box,pager](
commandpath():label='COMMAND',
summary():wrap)
""")
base.URI_FLAG.RemoveFromParser(parser)
base.LIMIT_FLAG.SetDefault(parser, _DEFAULT_LIMIT)
base.SORT_BY_FLAG.SetDefault(parser, '~relevance')
parser.add_argument(
'command',
nargs='*',
help="""\
Sequence of names representing a gcloud group or command name.
If the arguments provide the name of a gcloud command, the full help
text of that command will be displayed. Otherwise, all arguments will
be considered search terms and used to search through all of gcloud's
help text.
""")
parser.add_argument(
'search_terms',
nargs=argparse.REMAINDER,
help="""\
Search terms. The command will return a list of gcloud commands that are
relevant to the searched term. If this argument is provided, the command
will always return a list of search results rather than displaying help
text of a single command.
For example, to search for commands that relate to the term `project` or
`folder`, run:
$ {command} -- project folder
""")
def Run(self, args):
if not args.search_terms:
try:
# --document=style=help to signal the metrics.Help() 'help' label in
# actions.RenderDocumentAction().Action().
self.ExecuteCommandDoNotUse(args.command + ['--document=style=help'])
return None
except Exception: # pylint: disable=broad-except
# In this case, we will treat the arguments as search terms.
pass
results = search.RunSearch(
args.command + (args.search_terms or []),
self._cli_power_users_only)
self._resources_found = len(results)
self._resources_displayed = min(len(results), args.limit)
return results
def Epilog(self, resources_were_displayed):
if not self._resources_found:
return
if resources_were_displayed:
log.status.Print(
'Listed {} of {} items.'.format(self._resources_displayed,
self._resources_found))
else:
log.status.Print('Listed 0 items.')
| 33.513889 | 80 | 0.715292 | 3,780 | 0.783257 | 0 | 0 | 1,248 | 0.258599 | 0 | 0 | 3,205 | 0.664111 |
6fd10f3af083decdb2efd072cc80a08a18e55c62 | 66 | py | Python | algocoin/__init__.py | dendisuhubdy/algo-coin | e324f971cc0db5ebc29d04224d2fdeee13a31ac1 | [
"Apache-2.0"
]
| 252 | 2017-09-01T21:36:08.000Z | 2022-01-08T15:48:31.000Z | algocoin/__init__.py | dendisuhubdy/algo-coin | e324f971cc0db5ebc29d04224d2fdeee13a31ac1 | [
"Apache-2.0"
]
| 75 | 2017-10-10T01:01:19.000Z | 2020-05-04T11:03:20.000Z | algocoin/__init__.py | dendisuhubdy/algo-coin | e324f971cc0db5ebc29d04224d2fdeee13a31ac1 | [
"Apache-2.0"
]
| 61 | 2017-08-31T07:22:25.000Z | 2022-01-08T15:48:38.000Z | from .main import main as run # noqa: F401
__version__ = '0.0.3'
| 22 | 43 | 0.681818 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 19 | 0.287879 |
6fd18ba88e62fdb096046d7f14533a962dedc716 | 283 | py | Python | mmrazor/models/architectures/components/backbones/__init__.py | HIT-cwh/mmrazor | 2dad24044d7f1dad88f20221f8fc071dd40fdd4f | [
"Apache-2.0"
]
| 553 | 2021-12-23T11:43:35.000Z | 2022-03-31T01:04:20.000Z | mmrazor/models/architectures/components/backbones/__init__.py | HIT-cwh/mmrazor | 2dad24044d7f1dad88f20221f8fc071dd40fdd4f | [
"Apache-2.0"
]
| 113 | 2021-12-23T12:09:06.000Z | 2022-03-30T10:13:42.000Z | mmrazor/models/architectures/components/backbones/__init__.py | HIT-cwh/mmrazor | 2dad24044d7f1dad88f20221f8fc071dd40fdd4f | [
"Apache-2.0"
]
| 76 | 2021-12-23T11:48:39.000Z | 2022-03-29T11:24:35.000Z | # Copyright (c) OpenMMLab. All rights reserved.
from .darts_backbone import DartsBackbone
from .searchable_mobilenet import SearchableMobileNet
from .searchable_shufflenet_v2 import SearchableShuffleNetV2
__all__ = ['DartsBackbone', 'SearchableShuffleNetV2', 'SearchableMobileNet']
| 40.428571 | 76 | 0.844523 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 107 | 0.378092 |
6fd19f63f1a65c7260c9a3e5a8928272b7a43f33 | 2,013 | py | Python | tools/ideabuck/scripts/generate_grammar_kit.py | thelvis4/buck | dd55ad3373c1dc01d83bc3780dfc205a923c8088 | [
"Apache-2.0"
]
| 1 | 2021-06-14T22:35:29.000Z | 2021-06-14T22:35:29.000Z | tools/ideabuck/scripts/generate_grammar_kit.py | thelvis4/buck | dd55ad3373c1dc01d83bc3780dfc205a923c8088 | [
"Apache-2.0"
]
| null | null | null | tools/ideabuck/scripts/generate_grammar_kit.py | thelvis4/buck | dd55ad3373c1dc01d83bc3780dfc205a923c8088 | [
"Apache-2.0"
]
| null | null | null | #!/usr/bin/env python
# Copyright 2018-present Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import shutil
import subprocess
# The location of the generate grammar kit script
DIR = os.path.dirname(__file__)
# The location of the plugin directory
PLUGIN_PATH = os.path.abspath(os.path.join(DIR, ".."))
# The location of the grammar-kit directory
GRAMMAR_KIT = os.path.abspath(
os.path.join(DIR, "../../../third-party/java/grammar-kit/")
)
OUT_DIR = os.path.join(PLUGIN_PATH, "gen")
FLEX_OUT_DIR = os.path.join(OUT_DIR, "com/facebook/buck/intellij/ideabuck/lang")
GRAMMAR_KIT_JAR = os.path.join(GRAMMAR_KIT, "grammar-kit.jar")
GRAMMAR_KIT_JFLEX_JAR = os.path.join(GRAMMAR_KIT, "JFlex.jar")
JFLEX_SKELETON = os.path.join(PLUGIN_PATH, "resources/idea-flex.skeleton")
FLEX_FILE = os.path.join(
PLUGIN_PATH, "src/com/facebook/buck/intellij/ideabuck/lang/Buck.flex"
)
BNF_FILE = os.path.join(
PLUGIN_PATH, "src/com/facebook/buck/intellij/ideabuck/lang/Buck.bnf"
)
def subprocess_call(cmd):
print("Running: %s" % (" ".join(cmd)))
subprocess.call(cmd)
shutil.rmtree(OUT_DIR, ignore_errors=True)
subprocess_call(["java", "-jar", GRAMMAR_KIT_JAR, OUT_DIR, BNF_FILE])
subprocess_call(
[
"java",
"-jar",
GRAMMAR_KIT_JFLEX_JAR,
"-sliceandcharat",
"-skel",
JFLEX_SKELETON,
"-d",
FLEX_OUT_DIR,
FLEX_FILE,
]
)
| 29.602941 | 82 | 0.718331 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,052 | 0.522603 |
6fd2040d5d7a06ef6b0a4c0bd8f53d00185458f8 | 5,639 | py | Python | d_parser/d_spider_24int.py | Holovin/D_GrabDemo | 6adb03fb42ae03e7896eb2eacb342cf9660feb92 | [
"MIT"
]
| null | null | null | d_parser/d_spider_24int.py | Holovin/D_GrabDemo | 6adb03fb42ae03e7896eb2eacb342cf9660feb92 | [
"MIT"
]
| 2 | 2018-03-28T19:47:46.000Z | 2021-12-13T20:56:31.000Z | d_parser/d_spider_24int.py | Holovin/D_GrabDemo | 6adb03fb42ae03e7896eb2eacb342cf9660feb92 | [
"MIT"
]
| null | null | null | from lxml import html
from d_parser.d_spider_common import DSpiderCommon
from d_parser.helpers.re_set import Ree
from helpers.url_generator import UrlGenerator
from d_parser.helpers.stat_counter import StatCounter as SC
VERSION = 29
# Warn: Don't remove task argument even if not use it (it's break grab and spider crashed)
# Warn: noinspection PyUnusedLocal
class DSpider(DSpiderCommon):
def __init__(self, thread_number, try_limit=0):
super().__init__(thread_number, try_limit)
# parse categories
def task_initial(self, grab, task):
try:
if self.check_body_errors(grab, task):
yield self.check_errors(task)
return
links = grab.doc.select('//div[@id="main-subitems"]//a')
for link in links:
url = UrlGenerator.get_page_params(self.domain, link.attr('href'), {'onpage': 99999})
yield self.do_task('parse_page', url, DSpider.get_next_task_priority(task))
except Exception as e:
self.process_error(grab, task, e)
finally:
self.process_finally(task)
# parse page
def task_parse_page(self, grab, task):
try:
if self.check_body_errors(grab, task):
yield self.check_errors(task)
# parse items links
items_links = grab.doc.select('//div[@id="catalog-list"]//div[@class="catalog-items"]//a[@property="name"]')
for row in items_links:
link = row.attr('href')
link = UrlGenerator.get_page_params(self.domain, link, {})
yield self.do_task('parse_item', link, DSpider.get_next_task_priority(task))
except Exception as e:
self.process_error(grab, task, e)
finally:
self.process_finally(task)
# parse single item
def task_parse_item(self, grab, task):
try:
if self.check_body_errors(grab, task):
yield self.check_errors(task)
# common block with info
product_info = grab.doc.select('//div[@id="product-info"]')
# parse fields
# A = name
product_name = product_info.select('.//h1').text()
# B = [const]
# C = [const]
# D = [const]
product_count_string = product_info.select('.//div[@class="product-data-storehouse"]').text(default='[not found]')
product_count = '-1'
product_status = '0'
product_unit = 'ед.'
if product_count_string != 'в наличии':
self.log.warning(task, 'Skip item, cuz wrong count {}'.format(product_count_string))
return
# E = price
# if E = "запросить цену и наличие" => zapros
# else => float
product_price = product_info.select('.//span[@itemprop="price"]').text().replace(' ', '')
if product_price == 'Уточняйте':
product_price = '-1'
else:
# E = price (float)
# check if correct price
if not Ree.float.match(product_price):
self.log_warn(SC.MSG_UNKNOWN_PRICE, f'Skip item, cuz wrong price {product_price}', task)
return
# F = vendor code
product_vendor_code = product_info.select('.//div[@class="product-data-articul"]').text()
# G = vendor
product_vendor = product_info.select('.//div[@class="product-data-producer"]').text()
# H = photo url
product_photo_url_raw = product_info.select('.//div[@id="product-images-list"]/div[1]/img[@itemprop="contentUrl"]').attr('src')
product_photo_url = UrlGenerator.get_page_params(self.domain, product_photo_url_raw, {})
# pre I
product_description_part_raw = product_info.select('.//div[@class="product-description description"]/following-sibling::node()[2]')\
.text(default='')\
.replace('$(".description").html(\'', '')\
.replace('\');', '')
# I = description
# this part insert pure html with js, so we need clear all html tags and &-symbols
product_description_part_list = html.fromstring(f'<div>{product_description_part_raw}</div>').xpath('string()')
product_description_part = ''
for row in product_description_part_list:
product_description_part += row
product_description = {'Описание': product_description_part}
table = product_info.select('.//div[@class="product-description table"]/div')
for row in table:
key = row.select('./text()').text()
value = row.select('./span').text()
if key:
product_description[key] = value
# ID
product_id = product_info.select('.//div[@class="product-add-but"]').attr('data-id', '')
# save
self.result.add({
'name': product_name,
'quantity': product_count,
'delivery': product_status,
'measure': product_unit,
'price': product_price,
'sku': product_vendor_code,
'manufacture': product_vendor,
'photo': product_photo_url,
'id': product_id,
'properties': product_description
})
except Exception as e:
self.process_error(grab, task, e)
finally:
self.process_finally(task)
| 36.380645 | 144 | 0.563575 | 5,322 | 0.935819 | 5,108 | 0.898189 | 0 | 0 | 0 | 0 | 1,521 | 0.267452 |
6fd40b146434a9e17e7620f9be3907c90f0f31db | 2,007 | py | Python | agents/displays/mcts_display.py | johink/willsmith | a6bdfff2e3b12770100811002867bf3ed64ad6d3 | [
"MIT"
]
| null | null | null | agents/displays/mcts_display.py | johink/willsmith | a6bdfff2e3b12770100811002867bf3ed64ad6d3 | [
"MIT"
]
| null | null | null | agents/displays/mcts_display.py | johink/willsmith | a6bdfff2e3b12770100811002867bf3ed64ad6d3 | [
"MIT"
]
| null | null | null | from tkinter import Label, GROOVE
from willsmith.gui_display_controller import GUIDisplayController
class MCTSDisplay(GUIDisplayController):
"""
The display controller for MCTSAgent.
Creates a Tkinter GUI that displays some stats about the agent's latest
moves.
"""
WINDOW_TITLE = "MCTS Agent"
LABEL_FONT = ("Courier New", 14)
LABEL_WIDTH = 25
LABEL_BORDER_WIDTH = 1
LABEL_RELIEF = GROOVE
def __init__(self):
super().__init__()
self.playouts_label = None
self.action_label = None
self.win_pct_label = None
self.depth_label = None
def _initialize_widgets(self):
self.playouts_label = self._create_label()
self.action_label = self._create_label()
self.win_pct_label = self._create_label()
self.depth_label = self._create_label()
def _create_label(self):
return Label(self.root, font = self.LABEL_FONT,
width = self.LABEL_WIDTH,
bd = self.LABEL_BORDER_WIDTH, relief = GROOVE)
def _place_widgets(self):
self.playouts_label.grid(row = 0, column = 0)
self.action_label.grid(row = 1, column = 0)
self.win_pct_label.grid(row = 2, column = 0)
self.depth_label.grid(row = 3, column = 0)
def _update_display(self, agent, action):
self._update_labels_from_agent(agent, action)
def _reset_display(self, agent):
self._update_labels_from_agent(agent, None)
def _update_labels_from_agent(self, agent, action):
self.action_label["text"] = "Latest action:\n{}".format(action)
self.playouts_label["text"] = "Latest playout count:\n{}".format(agent.playout_total)
win_pct = 0
if agent.action_node is not None:
win_pct = agent.action_node.value_estimate()
self.win_pct_label["text"] = "Node sim win rate:\n{:.2%}".format(win_pct)
self.depth_label["text"] = "Node tree depth:\n{}".format(agent.root.depth())
| 33.45 | 93 | 0.652217 | 1,903 | 0.948181 | 0 | 0 | 0 | 0 | 0 | 0 | 288 | 0.143498 |
6fd4802c9f5de417a2846d954a84224c9c4296eb | 9,461 | py | Python | mrgcn.py | bretthannigan/relational-gcn | 61219cdee2c244682680ac0d7371758dcb9cea96 | [
"MIT"
]
| null | null | null | mrgcn.py | bretthannigan/relational-gcn | 61219cdee2c244682680ac0d7371758dcb9cea96 | [
"MIT"
]
| null | null | null | mrgcn.py | bretthannigan/relational-gcn | 61219cdee2c244682680ac0d7371758dcb9cea96 | [
"MIT"
]
| null | null | null | #!/usr/bin/python3
import logging
import argparse
from time import time
import toml
from data.io.knowledge_graph import KnowledgeGraph
from data.io.tarball import Tarball
from data.io.tsv import TSV
from data.utils import is_readable, is_writable
from embeddings import graph_structure
from tasks.node_classification import build_dataset, build_model, evaluate_model
from tasks.utils import mksplits, init_fold, mkfolds, sample_mask, set_seed, strip_graph
def single_run(A, X, Y, X_node_map, tsv_writer, config):
tsv_writer.writerow(["epoch", "training_loss", "training_accurary",
"validation_loss", "validation_accuracy",
"test_loss", "test_accuracy"])
# create splits
dataset = mksplits(X, Y, X_node_map,
config['task']['dataset_ratio'])
# compile model computation graph
model = build_model(X, Y, A, config)
# train model
nepoch = config['model']['epoch']
batch_size = X.shape[0] # number of nodes
sample_weights = sample_mask(dataset['train']['X_idx'],
Y.shape[0])
for epoch in train_model(A, model, dataset, sample_weights, batch_size, nepoch):
# log metrics
tsv_writer.writerow([str(epoch[0]),
str(epoch[1]),
str(epoch[2]),
str(epoch[3]),
str(epoch[4]),
"-1", "-1"])
# test model
test_loss, test_acc = test_model(A, model, dataset, batch_size)
# log metrics
tsv_writer.writerow(["-1", "-1", "-1", "-1", "-1",
str(test_loss[0]), str(test_acc[0])])
return (test_loss[0], test_acc[0])
def kfold_crossvalidation(A, X, Y, X_node_map, k, tsv_writer, config):
tsv_writer.writerow(["fold", "epoch",
"training_loss", "training_accurary",
"validation_loss", "validation_accuracy",
"test_loss", "test_accuracy"])
# generate fold indices
folds_idx = mkfolds(X_node_map.shape[0], k)
results = []
logger.info("Starting {}-fold cross validation".format(k))
for fold in range(1, k+1):
logger.info("Fold {} / {}".format(fold, k))
# compile model computation graph
model = build_model(X, Y, A, config)
# initialize fold
dataset = init_fold(X, Y, X_node_map, folds_idx[fold-1],
config['task']['dataset_ratio'])
# train model
nepoch = config['model']['epoch']
batch_size = X.shape[0] # number of nodes
sample_weights = sample_mask(dataset['train']['X_idx'],
Y.shape[0])
for epoch in train_model(A, model, dataset, sample_weights, batch_size, nepoch):
# log metrics
tsv_writer.writerow([str(fold),
str(epoch[0]),
str(epoch[1]),
str(epoch[2]),
str(epoch[3]),
str(epoch[4]),
"-1", "-1"])
# test model
test_loss, test_acc = test_model(A, model, dataset, batch_size)
results.append((test_loss[0], test_acc[0]))
# log metrics
tsv_writer.writerow([str(fold),
"-1", "-1", "-1", "-1", "-1",
str(test_loss[0]), str(test_acc[0])])
mean_loss, mean_acc = tuple(sum(e)/len(e) for e in zip(*results))
tsv_writer.writerow(["-1", "-1", "-1", "-1", "-1", "-1",
str(mean_loss), str(mean_acc)])
return (mean_loss, mean_acc)
def train_model(A, model, dataset, sample_weights, batch_size, nepoch):
logging.info("Training for {} epoch".format(nepoch))
# Log wall-clock time
t0 = time()
for epoch in range(1, nepoch+1):
# Single training iteration
model.fit(x=[dataset['train']['X']] + A,
y=dataset['train']['Y'],
batch_size=batch_size,
epochs=1,
shuffle=False,
sample_weight=sample_weights,
validation_data=([dataset['val']['X']] + A,
dataset['val']['Y']),
callbacks=[],
verbose=0)
# Predict on full dataset
Y_hat = model.predict(x=[dataset['train']['X']] + A,
batch_size=batch_size,
verbose=0)
# Train / validation scores
train_val_loss, train_val_acc = evaluate_model(Y_hat,
[dataset['train']['Y'],
dataset['val']['Y']],
[dataset['train']['X_idx'],
dataset['val']['X_idx']])
logging.info("{:04d} ".format(epoch) \
+ "| train loss {:.4f} / acc {:.4f} ".format(train_val_loss[0],
train_val_acc[0])
+ "| val loss {:.4f} / acc {:.4f}".format(train_val_loss[1],
train_val_acc[1]))
yield (epoch,
train_val_loss[0], train_val_acc[0],
train_val_loss[1], train_val_acc[1])
logging.info("training time: {:.2f}s".format(time()-t0))
def test_model(A, model, dataset, batch_size):
# Predict on full dataset
Y_hat = model.predict(x=[dataset['train']['X']] + A,
batch_size=batch_size,
verbose=0)
test_loss, test_acc = evaluate_model(Y_hat,
[dataset['test']['Y']],
[dataset['test']['X_idx']])
logging.info("Performance on test set: loss {:.4f} / accuracy {:.4f}".format(
test_loss[0],
test_acc[0]))
return (test_loss, test_acc)
def run(args, tsv_writer, config):
set_seed(config['task']['seed'])
# prep data
if args.input is None:
logging.debug("No tarball supplied - building task prequisites")
with KnowledgeGraph(path=config['graph']['file']) as kg:
targets = strip_graph(kg, config)
A = graph_structure.generate(kg, config)
X, Y, X_node_map = build_dataset(kg, targets, config)
else:
assert is_readable(args.input)
logging.debug("Importing prepared tarball")
with Tarball(args.input, 'r') as tb:
A = tb.get('A')
X = tb.get('X')
Y = tb.get('Y')
X_node_map = tb.get('X_node_map')
if config['task']['kfolds'] < 0:
loss, accuracy = single_run(A, X, Y, X_node_map, tsv_writer, config)
else:
loss, accuracy = kfold_crossvalidation(A, X, Y, X_node_map,
config['task']['kfolds'],
tsv_writer, config)
logging.info("Mean performance: loss {:.4f} / accuracy {:.4f}".format(
loss,
accuracy))
if args.verbose < 1:
print("Mean performance: loss {:.4f} / accuracy {:.4f}".format(
loss,
accuracy))
def init_logger(filename, verbose=0):
logging.basicConfig(filename=filename,
format='[%(asctime)s] %(module)s/%(funcName)s | %(levelname)s: %(message)s',
level=logging.DEBUG)
if verbose > 0:
stream_handler = logging.StreamHandler()
level = logging.INFO
if verbose >= 2:
level = logging.DEBUG
stream_handler.setLevel(level)
logging.getLogger().addHandler(stream_handler)
if __name__ == "__main__":
timestamp = int(time())
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config", help="Configuration file (toml)", required=True, default=None)
parser.add_argument("-i", "--input", help="Optional prepared input file (tar)", default=None)
parser.add_argument("-o", "--output", help="Output directory", default="/tmp/")
parser.add_argument("-v", "--verbose", help="Increase output verbosity", action='count', default=0)
args = parser.parse_args()
# load configuration
assert is_readable(args.config)
config = toml.load(args.config)
# set output base filename
baseFilename = "{}{}{}".format(args.output, config['name'], timestamp) if args.output.endswith("/") \
else "{}/{}{}".format(args.output, config['name'], timestamp)
assert is_writable(baseFilename)
init_logger(baseFilename+'.log', args.verbose)
logger = logging.getLogger(__name__)
tsv_writer = TSV(baseFilename+'.tsv', 'w')
# log parameters
logger.debug("Arguments:\n{}".format(
"\n".join(["\t{}: {}".format(arg, getattr(args, arg)) for arg in vars(args)])))
logger.debug("Configuration:\n{}".format(
"\n".join(["\t{}: {}".format(k,v) for k,v in config.items()])))
# run training
run(args, tsv_writer, config)
logging.shutdown()
| 38.45935 | 105 | 0.51559 | 0 | 0 | 1,860 | 0.196597 | 0 | 0 | 0 | 0 | 1,851 | 0.195645 |
6fd6455ab81b55e483148e73eb93742490544d5f | 9,343 | py | Python | core.py | aminPial/Overwatch-Streaming-Automation | 5d382e979980e99cebadd802eef999601833dc48 | [
"MIT"
]
| 1 | 2020-03-21T04:32:14.000Z | 2020-03-21T04:32:14.000Z | core.py | aminPial/Overwatch-Streaming-Automation | 5d382e979980e99cebadd802eef999601833dc48 | [
"MIT"
]
| null | null | null | core.py | aminPial/Overwatch-Streaming-Automation | 5d382e979980e99cebadd802eef999601833dc48 | [
"MIT"
]
| null | null | null | import argparse, time, os, cv2, shutil, datetime, math, subprocess, pickle, multiprocessing
from actn import *
ap = argparse.ArgumentParser()
# for help -> python alpha.py --help
ap.add_argument("-f", "--file", required=True,
help="name of the file")
ap.add_argument("-o", "--output", required=True,
help="specifiy the folder path of output")
ap.add_argument("-b", "--before", required=True,
help="seconds to cut before", type=int)
ap.add_argument("-a", "--after", required=True,
help="seconds to cut after", type=int)
args = vars(ap.parse_args())
class core_overwatch():
def __init__(self, file_name, output_folder, before, after):
self.file_name = file_name
self.output_folder = output_folder
self.before = before
self.after = after
if not os.path.exists(str(self.output_folder)):
print("The File Path Doesn't Exist!")
print("[++++++]Creating the Folder in Path {0}".format(output_folder))
os.makedirs("{0}".format(self.output_folder))
print("[++++++]Finished Making The Folder in Path {0}".format(output_folder))
try:
fh = open('{0}'.format(self.file_name), 'r')
except FileNotFoundError:
print("[+++++++]The Video File Not Found In Path.Please Try Again")
cmd1 = "ffmpeg -i {0} 2>&1 | sed -n \"s/.*, \(.*\) fp.*/\\1/p\"".format(self.file_name)
os.system(cmd1 + ' > tmp1')
self.fps = int(open('tmp1', 'r').read())
os.system(
""" ffprobe -v error -select_streams v:0 -show_entries stream=nb_frames -of default=nokey=1:noprint_wrappers=1 {0} > tmp2 """.format(
self.file_name
))
self.frame_count = int(open('tmp2', 'r').read())
print('[++++++]fps', self.fps)
print('[++++++]frame count', self.frame_count)
# get imp vid inf
def build_folder(self):
folder_names = ['./raw_calc', './raw_calc/frame_db_temp']
for directory in folder_names:
if not os.path.exists(str(directory)):
os.makedirs(str(directory))
# if exists then delete all the files in that dir tree
def which_frame_formula(self):
second_length = 1
chunk_size = round(self.fps * second_length) # fps*second_length
assert type(chunk_size) is int, "Chunk Size must have to be Integer Type"
# upto which frame the ops will execute(for loop to extract one frame from chunk size )
n = round(round(self.frame_count) / chunk_size)
start_frame = round(self.fps / 2)
common_diff = round(self.fps * second_length) # * second length,taking 1F/60
return start_frame, n, common_diff
def select_frame(self, a, n, d):
# arithmetic series y=a+(p-1)*d
which_frame_list = [a + (p - 1) * d for p in range(1, n + 1)]
return which_frame_list
def read_save_frame(self):
os.system("ffmpeg -hide_banner -loglevel panic -i {video_Fname} -vf fps=1 {f_name}/%d.png".format(
f_name='./raw_calc/frame_db_temp', video_Fname=str(self.file_name)
))
def get_action_process_multithreaded_cmd_run_commands(self):
img_list = ['./raw_calc/frame_db_temp/{0}'.format(x) for x in os.listdir('./raw_calc/frame_db_temp')]
img_list.sort(key=lambda fx: int(''.join(filter(str.isdigit, fx))))
az = return_text(img_list)
return az
# utils function start here -3
def _dbl(self, time):
if time < 10:
return '0' + str(time)
else:
return str(time)
def time_cut(self, input_in_sec):
times = []
hours = 0
minutes = 0
seconds = 0
hours = input_in_sec // 3600
minutes = (input_in_sec % 3600) // 60
seconds = (input_in_sec % 3600) % 60
return "{}:{}:{}".format(core_overwatch._dbl(self, hours), core_overwatch._dbl(self, minutes),
core_overwatch._dbl(self, seconds))
def findIndices(self, sequence, _str, extra=0): # 0011
assert len(sequence) < len(_str), "Sequence is Greater Than the Main String"
indices = []
for i in range(len(_str) - len(sequence) + 1):
temp = _str[i:i + len(sequence)]
if (sequence == temp):
indices.append(i + 2 - extra)
return indices
# utils fx ends here
def action_index_find(self, raw_list, which_frame):
raw_str_hashed = ''
for j in raw_list:
raw_str_hashed += str(j)
assert type(raw_str_hashed) is str, " The parameter to find Indices Type must have to be a String"
result_list = core_overwatch.findIndices(self, '01', raw_str_hashed, extra=1)
final_result = []
for yx in result_list:
final_result.append(int(which_frame[yx]))
return final_result
def build_frame_range_to_cut(self, action_result):
# print(action_result)
# input will be taken ->cp from raw code
frames = round(self.frame_count)
fps = round(self.fps)
bef = int(self.before) * fps # count frm
aft = int(self.after) * fps
# frame range (tuple ds) contained list
frame_range = []
# build condition for after and before trimming
for ucv in action_result:
if int(ucv) < bef and aft < frames:
frame_range.append((0, ucv + aft))
elif int(ucv) < bef and aft > frames:
frame_range.append((0, frames))
elif int(ucv) > bef and aft < frames:
frame_range.append((ucv - bef, ucv + aft))
elif int(ucv) > bef and aft < frames:
frame_range.append((ucv - bef, frames))
# (temp) test
return frame_range
def build_output(self, start, end, video_name, file_name, end1):
os.system(
'ffmpeg -hide_banner -loglevel panic -ss {st} -i {ivfname} -to {ed} -c copy {ovfname}'.format(st=start,
ed=end1,
ivfname=self.file_name,
ovfname=video_name))
file_ = open('{}'.format(file_name), 'w')
file_.write('Start at : {sec} \n End at : {sec1} '.format(sec=start, sec1=end))
file_.close()
def send_frame_signal(self, frame_range):
# frame range is like [(0,21),(4,198)]
assert type(frame_range) is list, "Frame range must have to be a list"
fps = round(self.fps)
# build video file path name
ax = str(datetime.datetime.now())
tm = ax[0:10] + '_' + ax[11:]
file_n_ = str(self.output_folder + '/' + str(tm))
os.makedirs(file_n_)
video_type = os.path.splitext(os.path.basename(str(self.file_name)))[1] # output e.g as .mp4
for ux in range(len(frame_range)):
start = core_overwatch.time_cut(self, input_in_sec=math.ceil(frame_range[ux][0] / fps))
end = core_overwatch.time_cut(self, input_in_sec=math.ceil(frame_range[ux][1] / fps))
end1 = core_overwatch.time_cut(self, input_in_sec=math.ceil(
(frame_range[ux][1] / fps) - (frame_range[ux][0] / fps)))
print('[++++++]Start at {0} End at {1}'.format(start, end))
core_overwatch.build_output(self, start=str(start),
end=str(end),
video_name=file_n_ + '/output{vid_number}{type_v}'.format(vid_number=ux,
type_v=video_type),
file_name=file_n_ + '/output{0}.txt'.format(ux),
end1=end1
)
print("Total {0} Videos have been cut from Main Video".format(len(os.listdir(file_n_))/2))
if __name__ == "__main__":
a = core_overwatch(file_name=str(args['file']), output_folder=str(args['output']), before=int(args['before']),
after=int(args['after']))
a.build_folder()
start_frame, n, common_diff = a.which_frame_formula() # returns a,n,d
c = a.select_frame(start_frame, n, common_diff) # returns which_frame_list
st = time.time()
print("[+++++]Reading Frames....")
a.read_save_frame()
print("[+++++++]Finished Reading Frames")
print("[+++++++]Image Processing Rolling....")
d = a.get_action_process_multithreaded_cmd_run_commands()
print("[++++++++]Finished Processing Images")
f = a.action_index_find(raw_list=d, which_frame=c) # return list to start aft and bef(action first observed)
g = a.build_frame_range_to_cut(f)
a.send_frame_signal(frame_range=g)
print('[++++++]Time req to run The Engine is {0}m'.format((time.time() - st) / 60))
print('Deleting temp folders..')
shutil.rmtree('./raw_calc/frame_db_temp')
os.remove('./tmp1')
os.remove('./tmp2')
| 33.851449 | 146 | 0.558172 | 7,644 | 0.818153 | 0 | 0 | 0 | 0 | 0 | 0 | 2,230 | 0.238681 |
6fd71b1757e46f6f90fe0e76c1023674c1eb24fe | 2,168 | py | Python | reports/ipam-reports/ip-primary-missing.py | ryanmerolle/reports | 9f69eb088884033c4cc85ce43c528e964b5a8b41 | [
"MIT"
]
| null | null | null | reports/ipam-reports/ip-primary-missing.py | ryanmerolle/reports | 9f69eb088884033c4cc85ce43c528e964b5a8b41 | [
"MIT"
]
| 3 | 2022-01-30T17:51:00.000Z | 2022-01-30T17:52:16.000Z | reports/ipam-reports/ip-primary-missing.py | ryanmerolle/reports | 9f69eb088884033c4cc85ce43c528e964b5a8b41 | [
"MIT"
]
| null | null | null | from dcim.choices import DeviceStatusChoices
from dcim.models import Device
from extras.reports import Report
class DeviceIPReport(Report):
description = (
"Check that every device has either an IPv4 or IPv6 primary address assigned"
)
def test_primary_ip4(self):
for device in Device.objects.filter(status=DeviceStatusChoices.STATUS_ACTIVE):
intcount = 0
for interface in device.interfaces.all():
if not interface.mgmt_only:
intcount += 1
# There may be dumb devices with no interfaces so no IP addresses, that's OK
if intcount == 0:
if device.primary_ip4_id is not None:
if device.primary_ip6_id is not None:
self.log_failure(
device,
"Device has primary IPv4 and IPv6 address but no interfaces",
)
else:
self.log_warning(
device,
"Device has missing primary IPv4 addresses but no interfaces",
)
else:
self.log_success(device)
elif device.primary_ip4_id is None:
if device.device_type.is_child_device is True:
self.log_success(device)
else:
if device.primary_ip6_id is None:
self.log_failure(
device, "Device is missing primary IPv4 and IPv6 address"
)
else:
self.log_warning(
device, "Device is missing primary IPv4 addresses"
)
else:
if device.device_type.is_child_device is True:
self.log_success(device)
else:
if device.primary_ip6_id is None:
self.log_info(device, "Device is missing primary IPv6 address")
else:
self.log_success(device)
| 41.692308 | 90 | 0.497694 | 2,055 | 0.947878 | 0 | 0 | 0 | 0 | 0 | 0 | 405 | 0.186808 |
6fd82d73cc91d48c575559a3f1137c753ea8bb0d | 3,940 | py | Python | runtime/bots/irc/main.py | AKhilRaghav0/dovenetwork | accf19fc4942d5e177d1f4d1302c40c9f979c391 | [
"MIT"
]
| null | null | null | runtime/bots/irc/main.py | AKhilRaghav0/dovenetwork | accf19fc4942d5e177d1f4d1302c40c9f979c391 | [
"MIT"
]
| null | null | null | runtime/bots/irc/main.py | AKhilRaghav0/dovenetwork | accf19fc4942d5e177d1f4d1302c40c9f979c391 | [
"MIT"
]
| null | null | null | import socket
import random
import os
import requests
import re
import github
import minecraft
import string
import sys
HOST = "xeroxirc.net"
PORT = 6667
NICK = "ak_sus"
#PASSWORD = os.getenv("PASSWORD")
CHANNEL = "#BlockySurvival"
SERVER = ""
readbuffer = ""
def send(message):
s.send(message)
print(message)
s = socket.socket()
s.connect((HOST, PORT))
send(bytes("NICK %s\r\n" % NICK, "UTF-8"))
send(bytes("USER %s %s %s :%s\r\n" % (NICK, NICK, NICK, NICK), "UTF-8"))
#s.send(bytes("PRIVMSG NickServ regain {} {}\r\n".format(NICK, PASSWORD), "UTF-8"))
#s.send(bytes("PRIVMSG NickServ identify {} {}\r\n".format(NICK, PASSWORD), "UTF-8"))
send(bytes("JOIN {}\r\n".format(CHANNEL), "UTF-8"))
#s.send(bytes("PRIVMSG NickServ :identify {}\r\n".format(PASSWORD), "UTF-8"))
readbuffer = readbuffer + s.recv(1024).decode("UTF-8")
temp = str.split(readbuffer, "\n")
readbuffer = temp.pop()
for line in temp:
SERVER = str.rstrip(line)[1:].split()[0]
print(str.rstrip(line))
while 1:
readbuffer = readbuffer + s.recv(1024).decode("UTF-8")
temp = str.split(readbuffer, "\n")
readbuffer = temp.pop()
for line in temp:
print(str.rstrip(line))
message = str.rstrip(line).split(" PRIVMSG {} :".format(CHANNEL))
if "PING" in line: send("PONG :{}\r\n".format(SERVER).encode("utf-8"))
msg = message[-1]
tokens = msg.split()
if msg == "$hello": send("PRIVMSG {} :Hello!\r\n".format(CHANNEL).encode("utf-8"))
if msg == "$ping": send("PRIVMSG {} :Pong!\r\n".format(CHANNEL).encode("utf-8"))
if msg == "$random": send("PRIVMSG {} :{}\r\n".format(CHANNEL, random.randint(0, 100)).encode("utf-8"))
if msg.startswith("$youtube "):
html = requests.get("https://www.youtube.com/results?search_query=" + " ".join(msg.split()[1:])).content
video_ids = re.findall(r"watch\?v=(\S{11})", html.decode())
send("PRIVMSG {} :https://www.youtube.com/watch?v={}\r\n".format(CHANNEL, video_ids[0]).encode("utf-8"))
#if msg.startswith("$google "): send("PRIVMSG {} :{}\r\n".format(CHANNEL, googlesearch.search(" ".join(msg.split()[1:]))[0]).encode("utf-8"))
#if msg.startswith("$wolfram "): send("PRIVMSG {} :{}\r\n".format(CHANNEL, wolfram.get(" ".join(msg.split()[1:]))).encode("utf-8"))
if msg.startswith("$github "):
if tokens[1] == "url": send("PRIVMSG {} :https://github.com/{}/{}\r\n".format(CHANNEL, tokens[2], tokens[3]).encode("utf-8"))
if tokens[1] == "issues": send("PRIVMSG {} :#{}: {}\r\n".format(CHANNEL, tokens[4], github.get_issue_title(tokens[2], tokens[3], tokens[4])).encode("utf-8"))
if msg == "$server": send("PRIVMSG {} :{}\r\n".format(CHANNEL, minecraft.get()).encode("utf-8"))
if msg == "$help": send("PRIVMSG {} :Avalible commands: $hello, $ping, $youtube, $google, $github, $wolfram.\r\n".format(CHANNEL).encode("utf-8"))
if msg.startswith("$help "):
if tokens[1] == "hello": send("PRIVMSG {} :Syntax: $hello Action: Says \"Hello!\".\r\n".format(CHANNEL).encode("utf-8"))
if tokens[1] == "ping":send("PRIVMSG {} :Syntax: $ping Action: Says \"Ping!\".\r\n".format(CHANNEL).encode("utf-8"))
if tokens[1] == "youtube": send("PRIVMSG {} :Syntax: $youtube <keyword> Action: Sends the URL of a YouTube video matching the keyword given.\r\n".format(CHANNEL).encode("utf-8"))
#if tokens[1] == "google": send("PRIVMSG {} :Syntax: $google <keyword> Action: Sends the URL of a google search with the keyword given\r\n".format(CHANNEL).encode("utf-8"))
if tokens[1] == "github": send("PRIVMSG {} :Syntax: $github <topic> <user> <repo> <number> Action: Returns data about a github repo.\r\n".format(CHANNEL).encode("utf-8"))
#if tokens[1] == "wolfram": send("PRIVMSG {} :Syntax: $wolfram <query> Action: Asks Wolfram|Alpha the query given.\r\n".format(CHANNEL).encode("utf-8"))
| 60.615385 | 190 | 0.612437 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,945 | 0.493655 |
6fd8ba3787eb3badc7089b8d33f9dd05d0100188 | 3,456 | py | Python | evaluate.py | PhilippMarquardt/Amazing-Semantic-Segmentation | 9ca17b71a7bd9e4f4d433fe3bb50105bad564df4 | [
"Apache-2.0"
]
| null | null | null | evaluate.py | PhilippMarquardt/Amazing-Semantic-Segmentation | 9ca17b71a7bd9e4f4d433fe3bb50105bad564df4 | [
"Apache-2.0"
]
| null | null | null | evaluate.py | PhilippMarquardt/Amazing-Semantic-Segmentation | 9ca17b71a7bd9e4f4d433fe3bb50105bad564df4 | [
"Apache-2.0"
]
| null | null | null | """
The file defines the evaluate process on target dataset.
@Author: Yang Lu
@Github: https://github.com/luyanger1799
@Project: https://github.com/luyanger1799/amazing-semantic-segmentation
"""
from sklearn.metrics import multilabel_confusion_matrix
from amazingutils.helpers import *
from amazingutils.utils import load_image
import numpy as np
import argparse
import sys
import cv2
import os
def str2bool(v):
if v.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
parser = argparse.ArgumentParser()
parser.add_argument('--dataset', help='The path of the dataset.', type=str, default='CamVid')
parser.add_argument('--crop_height', help='The height to crop the image.', type=int, default=256)
parser.add_argument('--crop_width', help='The width to crop the image.', type=int, default=256)
parser.add_argument('--predictions', help='The path of predicted image.', type=str, required=True)
args = parser.parse_args()
# check related paths
paths = check_related_path(os.getcwd())
# get image and label file names for training and validation
_, _, _, _, _, test_label_names = get_dataset_info(args.dataset)
# get color info
csv_file = os.path.join(args.dataset, 'class_dict.csv')
class_names, _ = get_colored_info(csv_file)
# get the prediction file name list
if not os.path.exists(args.predictions):
raise ValueError('the path of predictions does not exit.')
prediction_names = []
for file in sorted(os.listdir(args.predictions)):
prediction_names.append(os.path.join(args.predictions, file))
# evaluated classes
evaluated_classes = get_evaluated_classes(os.path.join(args.dataset, 'evaluated_classes.txt'))
num_classes = len(class_names)
class_iou = dict()
for name in evaluated_classes:
class_iou[name] = list()
class_idx = dict(zip(class_names, range(num_classes)))
# begin evaluate
assert len(test_label_names) == len(prediction_names)
for i, (name1, name2) in enumerate(zip(test_label_names, prediction_names)):
sys.stdout.write('\rRunning test image %d / %d' % (i + 1, len(test_label_names)))
sys.stdout.flush()
label = np.array(cv2.resize(load_image(name1),
dsize=(args.crop_width, args.crop_height), interpolation=cv2.INTER_NEAREST))
pred = np.array(cv2.resize(load_image(name2),
dsize=(args.crop_width, args.crop_height), interpolation=cv2.INTER_NEAREST))
confusion_matrix = multilabel_confusion_matrix(label.flatten(), pred.flatten(), labels=list(class_idx.values()))
for eval_cls in evaluated_classes:
eval_idx = class_idx[eval_cls]
(tn, fp), (fn, tp) = confusion_matrix[eval_idx]
if tp + fn > 0:
class_iou[eval_cls].append(tp / (tp + fp + fn))
print('\n****************************************')
print('* The IoU of each class is as follows: *')
print('****************************************')
for eval_cls in evaluated_classes:
class_iou[eval_cls] = np.mean(class_iou[eval_cls])
print('{cls:}: {iou:.4f}'.format(cls=eval_cls, iou=class_iou[eval_cls]))
print('\n**********************************************')
print('* The Mean IoU of all classes is as follows: *')
print('**********************************************')
print('Mean IoU: {mean_iou:.4f}'.format(mean_iou=np.mean(list(class_iou.values()))))
| 36 | 116 | 0.672743 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,036 | 0.299769 |
6fd965866bb7ccf36c6dbb4a056de13696d6bd75 | 6,524 | py | Python | examples/pso_trail/demo_gpso.py | gmjustforfun/code | 7551909edf61bddfeafdff223c2a3390661dc62f | [
"MIT"
]
| null | null | null | examples/pso_trail/demo_gpso.py | gmjustforfun/code | 7551909edf61bddfeafdff223c2a3390661dc62f | [
"MIT"
]
| null | null | null | examples/pso_trail/demo_gpso.py | gmjustforfun/code | 7551909edf61bddfeafdff223c2a3390661dc62f | [
"MIT"
]
| null | null | null | from pso.GPSO import GPSO
import numpy as np
import time
import pandas as pd
np.random.seed(42)
# f1 完成
def Sphere(p):
# Sphere函数
out_put = 0
for i in p:
out_put += i ** 2
return out_put
# f2 完成
def Sch222(x):
out_put = 0
out_put01 = 1
for i in x:
out_put += abs(i)
out_put01 = abs(i)*out_put01
out_put = out_put01+out_put
return out_put
# f3 完成
def Quadric(x):
output = 0
# print(x.shape[0])
for i in range(x.shape[0]):
output += (np.sum(x[0:i+1])) ** 2
# print(np.square(np.sum(x[0:i+1])))
return output
# f4 完成
def Schl(x):
# print(np.max(np.abs(x)))
return np.max(np.abs(x))
# f5 完成
def Step(x):
output = 0
for i in x:
output += (np.floor(i+0.5))**2
return output
# f6 完成
def Noise(x):
output = 0
cnt = 1
for i in x:
output = cnt * (i**4) + output
cnt += 1
output += np.random.rand()
return output
# f7 完成
def Rosenbrock(p):
'''
-2.048<=xi<=2.048
函数全局最优点在一个平滑、狭长的抛物线山谷内,使算法很难辨别搜索方向,查找最优也变得十分困难
在(1,...,1)处可以找到极小值0
:param p:
:return:
'''
n_dim = len(p)
res = 0
for i in range(n_dim - 1):
res += 100 * np.square(np.square(p[i]) - p[i + 1]) + np.square(p[i] - 1)
return res
# f8 有问题,忽略,这个是APSO的f8
def Schewel(x):
out_put = 0
for i in x:
out_put += -i*np.sin(np.sqrt(abs(i)))
return out_put
# f9 完成
def Rastrigin(p):
'''
多峰值函数,也是典型的非线性多模态函数
-5.12<=xi<=5.12
在范围内有10n个局部最小值,峰形高低起伏不定跳跃。很难找到全局最优
has a global minimum at x = 0 where f(x) = 0
'''
return np.sum([np.square(x) - 10 * np.cos(2 * np.pi * x) + 10 for x in p])
# f10
def Ackley(x):
part1 = 0
part2 = 0
for i in x:
part1 += (i**2)
part2 += np.cos(2 * np.pi * i)
left = 20 * np.exp(-0.2 * ((part1 / x.shape[0]) ** .5))
right = np.exp(part2 / x.shape[0])
return -left - right + 20 + np.e
# f11 ok
def Griewank(p):
'''
存在多个局部最小值点,数目与问题的维度有关。
此函数是典型的非线性多模态函数,具有广泛的搜索空间,是优化算法很难处理的复杂多模态问题。
在(0,...,0)处取的全局最小值0
-600<=xi<=600
'''
part1 = [np.square(x) / 4000 for x in p]
part2 = [np.cos(x / np.sqrt(i + 1)) for i, x in enumerate(p)]
return np.sum(part1) - np.prod(part2) + 1
g = 10000
times = 30
table = np.zeros((2, 10))
gBest = np.zeros((10, 30)) # 1010个函数的30次的最优值
for i in range(times):
optimizer = GPSO(func=Sphere, dim=30, pop=20, max_iter=g, lb=np.ones(30) * (-100), ub=np.ones(30) * 100,
w=0.9, c1=2, c2=2, acceptance=0.01)
start = time.time()
optimizer.run()
end = time.time()
print('Sphere:', optimizer.gbest_y)
table[0, 0] += optimizer.gbest_y
table[1, 0] += end - start
gBest[0, i] = optimizer.gbest_y
optimizer = GPSO(func=Sch222, dim=30, pop=20, max_iter=g, lb=np.ones(30) * (-10), ub=np.ones(30) * 10,
w=0.9, c1=2, c2=2, acceptance=0.01)
start = time.time()
optimizer.run()
end = time.time()
print('Sch222:', optimizer.gbest_y)
table[0, 1] += optimizer.gbest_y
table[1, 1] += end - start
gBest[1, i] = optimizer.gbest_y
optimizer = GPSO(func=Quadric, dim=30, pop=20, max_iter=g, lb=np.ones(30) * (-100), ub=np.ones(30) * 100,
w=0.9, c1=2, c2=2, acceptance=100)
start = time.time()
optimizer.run()
end = time.time()
print('Quadric:', optimizer.gbest_y)
table[0, 2] += optimizer.gbest_y
table[1, 2] += end - start
gBest[2, i] = optimizer.gbest_y
optimizer = GPSO(func=Rosenbrock, dim=30, pop=20, max_iter=g, lb=np.ones(30) * (-10), ub=np.ones(30) * 10,
w=0.9, c1=2, c2=2, acceptance=100)
start = time.time()
optimizer.run()
end = time.time()
print('Rosenbrock:', optimizer.gbest_y)
table[0, 3] += optimizer.gbest_y
table[1, 3] += end - start
gBest[3, i] = optimizer.gbest_y
optimizer = GPSO(func=Step, dim=30, pop=20, max_iter=g, lb=np.ones(30) * (-100), ub=np.ones(30) * 100,
w=0.9, c1=2, c2=2, acceptance=0)
start = time.time()
optimizer.run()
end = time.time()
print('Step:', optimizer.gbest_y)
table[0, 4] += optimizer.gbest_y
table[1, 4] += end - start
gBest[4, i] = optimizer.gbest_y
optimizer = GPSO(func=Noise, dim=30, pop=20, max_iter=g, lb=np.ones(30) * (-1.28), ub=np.ones(30) * 1.28,
w=0.9, c1=2, c2=2, acceptance=0.01)
start = time.time()
optimizer.run()
end = time.time()
print('Noise:', optimizer.gbest_y)
table[0, 5] += optimizer.gbest_y
table[1, 5] += end - start
gBest[5, i] = optimizer.gbest_y
optimizer = GPSO(func=Schewel, dim=30, pop=20, max_iter=g, lb=np.ones(30) * (-500), ub=np.ones(30) * 500,
w=0.9, c1=2, c2=2, acceptance=-10000)
start = time.time()
optimizer.run()
end = time.time()
print('Schewel:', optimizer.gbest_y)
table[0, 6] += optimizer.gbest_y
table[1, 6] += end - start
gBest[6, i] = optimizer.gbest_y
optimizer = GPSO(func=Rastrigin, dim=30, pop=20, max_iter=g, lb=np.ones(30) * (-5.12), ub=np.ones(30) * 5.12,
w=0.9, c1=2, c2=2, acceptance=50)
start = time.time()
optimizer.run()
end = time.time()
print('Rastrigin:', optimizer.gbest_y)
table[0, 7] += optimizer.gbest_y
table[1, 7] += end - start
gBest[7, i] = optimizer.gbest_y
optimizer = GPSO(func=Ackley, dim=30, pop=20, max_iter=g, lb=np.ones(30) * (-32), ub=np.ones(30) * 32,
w=0.9, c1=2, c2=2, acceptance=0.01)
start = time.time()
optimizer.run()
end = time.time()
print('Ackley:', optimizer.gbest_y)
table[0, 8] += optimizer.gbest_y
table[1, 8] += end - start
gBest[8, i] = optimizer.gbest_y
optimizer = GPSO(func=Griewank, dim=30, pop=20, max_iter=g, lb=np.ones(30) * (-600), ub=np.ones(30) * 600,
w=0.9, c1=2, c2=2, acceptance=0.01)
start = time.time()
optimizer.run()
end = time.time()
print('Griewank:', optimizer.gbest_y)
table[0, 9] += optimizer.gbest_y
table[1, 9] += end - start
gBest[9, i] = optimizer.gbest_y
table = table / times
table = pd.DataFrame(table)
table.columns = ['Sphere', 'Schwefel_P222', 'Quadric', 'Rosenbrock', 'Step', 'Quadric_Noise', 'Schwefel',
'Rastrigin', 'Ackley', 'Griewank']
table.index = ['mean score', 'mean time']
print(table)
print('10个测试函数的30次std:', np.std(gBest, axis=1))
print('10个测试函数的30次best:', np.min(gBest, axis=1)) | 28.995556 | 113 | 0.566524 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,330 | 0.190381 |
6fdb320f11ce21ba2207772e25516617a4f09f64 | 310 | py | Python | setup.py | Juniper/contrail-server-manager | 61a586495b4819904887b5dccb9288b9cf3d2ad5 | [
"Apache-2.0"
]
| 12 | 2015-07-28T15:31:51.000Z | 2019-03-03T23:39:10.000Z | setup.py | Juniper/contrail-server-manager | 61a586495b4819904887b5dccb9288b9cf3d2ad5 | [
"Apache-2.0"
]
| 4 | 2017-01-25T05:24:17.000Z | 2019-04-03T00:25:13.000Z | setup.py | Juniper/contrail-server-manager | 61a586495b4819904887b5dccb9288b9cf3d2ad5 | [
"Apache-2.0"
]
| 33 | 2015-01-07T10:01:28.000Z | 2020-07-26T08:22:53.000Z | #
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
from setuptools import setup
import setuptools
setup(
name='contrail-server-manager',
version='0.1dev',
packages=setuptools.find_packages(exclude=["*.pyc"]),
zip_safe=False,
long_description="Server Manager package",
)
| 20.666667 | 64 | 0.716129 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 130 | 0.419355 |
6fdc3aa267ad82108937792e25090869d2290abd | 6,272 | py | Python | Modules/Loadable/Markups/Testing/Python/MarkupsSceneViewRestoreTestManyLists.py | TheInterventionCentre/NorMIT-Plan-App | 765ed9a5dccc1cc134b65ccabe93fc132baeb2ea | [
"MIT"
]
| null | null | null | Modules/Loadable/Markups/Testing/Python/MarkupsSceneViewRestoreTestManyLists.py | TheInterventionCentre/NorMIT-Plan-App | 765ed9a5dccc1cc134b65ccabe93fc132baeb2ea | [
"MIT"
]
| null | null | null | Modules/Loadable/Markups/Testing/Python/MarkupsSceneViewRestoreTestManyLists.py | TheInterventionCentre/NorMIT-Plan-App | 765ed9a5dccc1cc134b65ccabe93fc132baeb2ea | [
"MIT"
]
| null | null | null |
# Test restoring a scene with multiple lists with different number
# of fiducials
# first fiducial list
displayNode1 = slicer.vtkMRMLMarkupsDisplayNode()
slicer.mrmlScene.AddNode(displayNode1)
fidNode1 = slicer.vtkMRMLMarkupsFiducialNode()
fidNode1.SetName("FidNode1")
slicer.mrmlScene.AddNode(fidNode1)
fidNode1.SetAndObserveDisplayNodeID(displayNode1.GetID())
coords = [0.0, 0.0, 0.0]
numFidsInList1 = 5
for i in range(numFidsInList1):
fidNode1.AddFiducialFromArray(coords)
coords[0] += 1.0
coords[1] += 2.0
coords[2] += 1.0
# second fiducial list
displayNode2 = slicer.vtkMRMLMarkupsDisplayNode()
slicer.mrmlScene.AddNode(displayNode2)
fidNode2 = slicer.vtkMRMLMarkupsFiducialNode()
fidNode2.SetName("FidNode2")
slicer.mrmlScene.AddNode(fidNode2)
fidNode2.SetAndObserveDisplayNodeID(displayNode2.GetID())
numFidsInList2 = 10
for i in range(numFidsInList2):
fidNode2.AddFiducialFromArray(coords)
coords[0] += 1.0
coords[1] += 1.0
coords[2] += 3.0
sv = slicer.mrmlScene.AddNode(slicer.vtkMRMLSceneViewNode())
numFidNodesBeforeStore = slicer.mrmlScene.GetNumberOfNodesByClass('vtkMRMLMarkupsFiducialNode')
sv.StoreScene()
# add a third list that will get removed on restore
# second fiducial list
displayNode3 = slicer.vtkMRMLMarkupsDisplayNode()
slicer.mrmlScene.AddNode(displayNode3)
fidNode3 = slicer.vtkMRMLMarkupsFiducialNode()
fidNode3.SetName("FidNode3")
slicer.mrmlScene.AddNode(fidNode3)
fidNode3.SetAndObserveDisplayNodeID(displayNode3.GetID())
numFidsInList3 = 2
for i in range(numFidsInList3):
fidNode3.AddFiducialFromArray(coords)
coords[0] += 1.0
coords[1] += 2.0
coords[2] += 3.0
sv.RestoreScene()
numFidNodesAfterRestore = slicer.mrmlScene.GetNumberOfNodesByClass('vtkMRMLMarkupsFiducialNode')
if numFidNodesAfterRestore != numFidNodesBeforeStore:
print "After restoring the scene, expected ", numFidNodesBeforeStore, " fiducial nodes, but have ", numFidNodesAfterRestore
exceptionMessage = "After restoring the scene, expected " + str(numFidNodesBeforeStore) + " fiducial nodes, but have " + str(numFidNodesAfterRestore)
raise Exception(exceptionMessage)
#fid1AfterRestore = slicer.mrmlScene.GetNodeByID("vtkMRMLMarkupsFiducialNode1")
fid1AfterRestore = slicer.mrmlScene.GetFirstNodeByName("FidNode1")
numFidsInList1AfterRestore = fid1AfterRestore.GetNumberOfMarkups()
print "After restore, list with name FidNode1 has id ", fid1AfterRestore.GetID(), " and num fids = ", numFidsInList1AfterRestore
if numFidsInList1AfterRestore != numFidsInList1:
exceptionMessage = "After restoring list 1, id = " + fid1AfterRestore.GetID()
exceptionMessage += ", expected " + str(numFidsInList1) + " but got "
exceptionMessage += str(numFidsInList1AfterRestore)
raise Exception(exceptionMessage)
# fid2AfterRestore = slicer.mrmlScene.GetNodeByID("vtkMRMLMarkupsFiducialNode2")
fid2AfterRestore = slicer.mrmlScene.GetFirstNodeByName("FidNode2")
numFidsInList2AfterRestore = fid2AfterRestore.GetNumberOfMarkups()
print "After restore, list with name FidNode2 has id ", fid2AfterRestore.GetID(), " and num fids = ", numFidsInList2AfterRestore
if numFidsInList2AfterRestore != numFidsInList2:
exceptionMessage = "After restoring list 2, id = " + fid2AfterRestore.GetID()
exceptionMessage += ", expected " + str(numFidsInList2) + " but got "
exceptionMessage += str(numFidsInList2AfterRestore)
raise Exception(exceptionMessage)
# check the displayable manager for the right number of widgets/seeds
lm = slicer.app.layoutManager()
td = lm.threeDWidget(0)
ms = vtk.vtkCollection()
td.getDisplayableManagers(ms)
fidManagerIndex = -1
for i in range(ms.GetNumberOfItems()):
m = ms.GetItemAsObject(i)
if m.GetClassName() == "vtkMRMLMarkupsFiducialDisplayableManager3D":
fidManagerIndex = i
print m.GetClassName(), fidManagerIndex
if fidManagerIndex == -1:
exceptionMessage = "Failed to find markups fiducial displayable manager 3d!"
raise Exception(exceptionMessage)
mfm = ms.GetItemAsObject(fidManagerIndex)
h = mfm.GetHelper()
print 'Helper = ',h
seedWidget1 = h.GetWidget(fid1AfterRestore)
rep1 = seedWidget1.GetRepresentation()
print "Seed widget 1 has number of seeds = ",rep1.GetNumberOfSeeds()
if rep1.GetNumberOfSeeds() != numFidsInList1AfterRestore:
exceptionMessage = "After restoring list 1, expected seed widget to have "
exceptionMessage += str(numFidsInList1AfterRestore) + " seeds, but it has "
exceptionMessage += str(rep1.GetNumberOfSeeds())
raise Exception(exceptionMessage)
# check positions
for s in range(numFidsInList1AfterRestore):
seed = seedWidget1.GetSeed(s)
handleRep = seed.GetHandleRepresentation()
worldPos = handleRep.GetWorldPosition()
print "seed ",s," world position = ",worldPos
fidPos = [0.0,0.0,0.0]
fid1AfterRestore.GetNthFiducialPosition(s,fidPos)
xdiff = fidPos[0] - worldPos[0]
ydiff = fidPos[1] - worldPos[1]
zdiff = fidPos[2] - worldPos[2]
diffTotal = xdiff + ydiff + zdiff
if diffTotal > 0.1:
exceptionMessage = "List1: Difference between seed position " + str(s)
exceptionMessage += " and fiducial position totals = " + str(diffTotal)
raise Exception(exceptionMessage)
seedWidget2 = h.GetWidget(fid2AfterRestore)
rep2 = seedWidget2.GetRepresentation()
print "Seed widget 2 has number of seeds = ",rep2.GetNumberOfSeeds()
if rep2.GetNumberOfSeeds() != numFidsInList2AfterRestore:
exceptionMessage = "After restoring fid list 2, expected seed widget to have "
exceptionMessage += str(numFidsInList2AfterRestore) + " seeds, but it has "
exceptionMessage += str(rep2.GetNumberOfSeeds())
raise Exception(exceptionMessage)
# check positions
for s in range(numFidsInList2AfterRestore):
seed = seedWidget2.GetSeed(s)
handleRep = seed.GetHandleRepresentation()
worldPos = handleRep.GetWorldPosition()
print "seed ",s," world position = ",worldPos
fidPos = [0.0,0.0,0.0]
fid2AfterRestore.GetNthFiducialPosition(s,fidPos)
xdiff = fidPos[0] - worldPos[0]
ydiff = fidPos[1] - worldPos[1]
zdiff = fidPos[2] - worldPos[2]
diffTotal = xdiff + ydiff + zdiff
if diffTotal > 0.1:
exceptionMessage = "List2: Difference between seed position " + str(s)
exceptionMessage += " and fiducial position totals = " + str(diffTotal)
raise Exception(exceptionMessage)
ms.RemoveAllItems()
| 37.333333 | 151 | 0.772162 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,489 | 0.237404 |
6fdd8bc73e2b49aa962aeebacd2ae774e4162d17 | 1,013 | py | Python | segmentfault/apps/msg/consumer.py | Yookyiss/segmentfault | 8fb7890c8b650ac34541a8fb14c3cd9bef98d120 | [
"MIT"
]
| null | null | null | segmentfault/apps/msg/consumer.py | Yookyiss/segmentfault | 8fb7890c8b650ac34541a8fb14c3cd9bef98d120 | [
"MIT"
]
| 12 | 2020-02-12T01:14:42.000Z | 2022-03-11T23:54:43.000Z | segmentfault/apps/msg/consumer.py | Yookyiss/segmentfault | 8fb7890c8b650ac34541a8fb14c3cd9bef98d120 | [
"MIT"
]
| null | null | null | # -*- coding:utf-8 -*-
# @Time : 2019/7/21 12:35 PM
# @Author : __wutonghe__
# docs https://channels.readthedocs.io/en/latest/tutorial/part_3.html#rewrite-the-consumer-to-be-asynchronous
from channels.generic.websocket import AsyncWebsocketConsumer
import json
class MessageConsumer(AsyncWebsocketConsumer):
"""
私信websocket,采用异步通信来增加并发
"""
async def connect(self):
"""当 websocket 一链接上以后触发该函数"""
if self.scope['user'].is_anonymous:
await self.close()
else:
await self.channel_layer.group_add(self.scope['user'].username + '-message',self.channel_name) # 创建聊天室
await self.accept()
async def receive(self, text_data=None, bytes_data=None):
"""将答复交回给websocket"""
await self.send(text_data=json.dumps(text_data)) # 将消息发送给前端
async def disconnect(self, code):
"""断开链接时触发该函数"""
await self.channel_layer.group_discard(self.scope['user'].username + '-message',self.channel_name) # 将该链接移出聊天室
| 30.69697 | 119 | 0.669299 | 870 | 0.763828 | 0 | 0 | 0 | 0 | 736 | 0.646181 | 484 | 0.424934 |
6fe090a4e22c0963ebcb0f7db477cda0fa848e0e | 2,618 | py | Python | tests/utils/test_interpolator.py | JelleAalbers/hypney | 3e38e21743fc9babe0ed47af299d08242a9b6d32 | [
"MIT"
]
| null | null | null | tests/utils/test_interpolator.py | JelleAalbers/hypney | 3e38e21743fc9babe0ed47af299d08242a9b6d32 | [
"MIT"
]
| null | null | null | tests/utils/test_interpolator.py | JelleAalbers/hypney | 3e38e21743fc9babe0ed47af299d08242a9b6d32 | [
"MIT"
]
| null | null | null | import eagerpy as ep
import numpy as np
from scipy.interpolate import RegularGridInterpolator
import hypney
tl = ep.numpy
def test_regular_grid_interpolator():
"""Adapted from
https://github.com/sbarratt/torch_interpolations/blob/master/tests/test_grid_interpolator.py
"""
points = [tl.arange(-0.5, 2.5, 0.1) * 1.0, tl.arange(-0.5, 2.5, 0.2) * 1.0]
values = (
hypney.utils.eagerpy.sin(points[0])[:, None]
+ 2 * hypney.utils.eagerpy.cos(points[1])[None, :]
+ hypney.utils.eagerpy.sin(5 * points[0][:, None] @ points[1][None, :])
)
X, Y = ep.meshgrid(tl.arange(-0.5, 2, 0.1), tl.arange(-0.5, 2, 0.1))
points_to_interp = ep.stack([X.flatten(), Y.flatten()]).T
gi = hypney.utils.interpolation.RegularGridInterpolator(points, values)
fx = gi(points_to_interp)
rgi = RegularGridInterpolator(
[p.numpy() for p in points], [x.numpy() for x in values], bounds_error=False
)
rfx = rgi(points_to_interp.numpy())
np.testing.assert_allclose(rfx, fx.numpy(), atol=1e-6)
# TODO: port derivative test to eagerpy
# note that points_to_interp has to be transposed
#
# def test_regular_grid_interpolator_derivative():
# points = [torch.arange(-.5, 2.5, .5) * 1., torch.arange(-.5, 2.5, .5) * 1.]
# values = torch.sin(points[0])[:, None] + 2 * torch.cos(points[1])[None, :] + torch.sin(5 * points[0][:, None] @ points[1][None, :])
# values.requires_grad_(True)
#
# X, Y = np.meshgrid(np.arange(-.5, 2, .19), np.arange(-.5, 2, .19))
# points_to_interp = [torch.from_numpy(
# X.flatten()).float(), torch.from_numpy(Y.flatten()).float()]
#
# def f(values):
# return torch_interpolations.RegularGridInterpolator(
# points, values)(points_to_interp)
#
# torch.autograd.gradcheck(f, (values,), eps=1e-5, atol=1e-1, rtol=1e-1)
def test_interpolator_builder():
itp = hypney.utils.interpolation.InterpolatorBuilder([(-1, 0, 1)])
def scalar_f(z):
return z[0]
z = ep.astensor(np.array([1, 0, -1, 0, 1, 1, -1]))
scalar_itp = itp.make_interpolator(scalar_f)
np.testing.assert_array_equal(scalar_itp(z).numpy(), z.numpy())
def matrix_f(z):
return ep.astensor(np.ones((2, 2)) * z[0])
matrix_itp = itp.make_interpolator(matrix_f)
np.testing.assert_array_equal(
matrix_itp(z).numpy(), z[:, None, None].numpy() * np.ones((1, 2, 2))
)
# What happened here? Does the test not make sense or did the API change?
# np.testing.assert_array_equal(
# matrix_itp(ep.numpy.array([0, 0, 0])).numpy(),
# np.ones((2, 2)))
| 33.564103 | 137 | 0.632544 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,082 | 0.413293 |
6fe12a816ae34998a3fcf2329f909ed39bda660d | 8,451 | py | Python | python/database.py | bvmeggelen/routino | b6bcc47be6ba4a90353a5b140ca9996aaa17d2b8 | [
"X11",
"MIT"
]
| 1 | 2016-02-12T20:26:31.000Z | 2016-02-12T20:26:31.000Z | python/database.py | bvmeggelen/routino | b6bcc47be6ba4a90353a5b140ca9996aaa17d2b8 | [
"X11",
"MIT"
]
| 2 | 2019-01-16T10:00:19.000Z | 2019-02-03T10:53:32.000Z | python/database.py | bvmeggelen/routino | b6bcc47be6ba4a90353a5b140ca9996aaa17d2b8 | [
"X11",
"MIT"
]
| null | null | null | #!/usr/bin/python3
##########################################
# Routino database access from Python.
#
# Part of the Routino routing software.
##########################################
# This file Copyright 2018 Andrew M. Bishop
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##########################################
import routino.database
# Database, access all attributes
database = routino.database.LoadDatabase("../../src/test/fat", "turns")
if database is None:
database = routino.database.LoadDatabase("../src/test/fat", "turns")
if database is None:
print("Failed to load database")
exit(1)
print(database)
database_attrs = ['nnodes', 'nsegments', 'nways', 'nrelations']
for attr in database_attrs:
print(" Attribute: " + attr + " =", getattr(database, attr))
print("")
# A single node, access all attributes and all functions
node=database.GetNode(0)
print("1st node =", node)
node_attrs = ['id', 'firstsegment', 'latitude', 'longitude', 'allow', 'flags']
node_infos = ['', '', 'degrees', 'degrees', '[note 1]', '[note 2]']
for attr,info in zip(node_attrs,node_infos):
print(" Attribute: " + attr + " =", getattr(node, attr), info)
segments = node.Segments()
print(" Function: " + "Segments()" + " = [" + ", ".join([str(segments[x]) for x in range(len(segments))]) + "]")
print("")
# A single segment, access all attributes and all functions
segment=database.GetSegment(0)
print("1st segment =", segment)
segment_attrs = ['id', 'node1', 'node2', 'next2', 'way', 'distance', 'flags']
segment_infos = ['', '', '', '', '', 'km', '[note 3]']
for attr,info in zip(segment_attrs,segment_infos):
print(" Attribute: " + attr + " =", getattr(segment, attr), info)
print(" Function: " + "Node1()" + " = " + str(segment.Node1()))
print(" Function: " + "Node2()" + " = " + str(segment.Node2()))
print(" Function: " + "Way()" + " = " + str(segment.Way()))
print("")
# A single way, access all attributes and all functions
way=database.GetWay(0)
print("1st way =", way)
way_attrs = ['id', 'name', 'allow', 'type', 'props', 'speed', 'weight', 'height', 'width', 'length']
way_infos = ['', '', '[note 1]', '[note 4]', '[note 5]', 'km/hr [note 6]', 'tonnes [note 6]', 'metres [note 6]', 'metres [note 6]', 'metres [note 6]']
for attr,info in zip(way_attrs,way_infos):
print(" Attribute: " + attr + " =", getattr(way, attr), info)
print("")
# A single relation, access all attributes and all functions
relation=database.GetRelation(0)
print("1st relation =", relation)
relation_attrs = ['id', 'from_seg', 'via_node', 'to_seg', 'from_way', 'to_way', 'from_node', 'to_node', 'except_transport']
relation_infos = ['', '', '', '', '', '', '', '', '[note 7]']
for attr,info in zip(relation_attrs,relation_infos):
print(" Attribute: " + attr + " =", getattr(relation, attr), info)
print(" Function: " + "FromSegment()" + " = " + str(relation.FromSegment()))
print(" Function: " + "ViaNode()" + " = " + str(relation.ViaNode()))
print(" Function: " + "ToSegment()" + " = " + str(relation.ToSegment()))
print(" Function: " + "FromWay()" + " = " + str(relation.FromWay()))
print(" Function: " + "ToWay()" + " = " + str(relation.ToWay()))
print(" Function: " + "FromNode()" + " = " + str(relation.FromNode()))
print(" Function: " + "ToNode()" + " = " + str(relation.ToNode()))
print("")
# The list of nodes as a list and an iterable (just the first 4)
nodes=database.Nodes()
print("len(database.Nodes()) = " + str(len(nodes)))
print("database.Nodes() = [" + ", ".join([str(nodes[x]) for x in range(4)]) + ", ...]")
for node in nodes:
if node.id == 4:
break
print(node)
print("")
# The list of segments as a list and an iterable (just the first 4)
segments=database.Segments()
print("len(database.Segments()) = " + str(len(segments)))
print("database.Segments() = [" + ", ".join([str(segments[x]) for x in range(4)]) + ", ...]")
for segment in segments:
if segment.id == 4:
break
print(segment)
print("")
# The list of ways as a list and an iterable (just the first 4)
ways=database.Ways()
print("len(database.Ways()) = " + str(len(ways)))
print("database.Ways() = [" + ", ".join([str(ways[x]) for x in range(4)]) + ", ...]")
for way in ways:
if way.id == 4:
break
print(way)
print("")
# The list of relations as a list and an iterable (just the first 4)
relations=database.Relations()
print("len(database.Relations()) = " + str(len(relations)))
print("database.Relations() = [" + ", ".join([str(relations[x]) for x in range(4)]) + ", ...]")
for relation in relations:
if relation.id == 4:
break
print(relation)
print("")
# Enumerated lists
transports_enum = ["Transports_None",
"Transports_Foot",
"Transports_Horse",
"Transports_Wheelchair",
"Transports_Bicycle",
"Transports_Moped",
"Transports_Motorcycle",
"Transports_Motorcar",
"Transports_Goods",
"Transports_HGV",
"Transports_PSV",
"Transports_ALL"]
nodeflags_enum = ["Nodeflag_Super",
"Nodeflag_U_Turn",
"Nodeflag_Mini_Roundabout",
"Nodeflag_Turn_Restrict",
"Nodeflag_Turn_Restrict2"]
segmentflags_enum = ["Segmentflag_Area",
"Segmentflag_Oneway_1to2",
"Segmentflag_Oneway_2to1",
"Segmentflag_Super",
"Segmentflag_Normal"]
properties_enum = ["Properties_None",
"Properties_Paved",
"Properties_Multilane",
"Properties_Bridge",
"Properties_Tunnel",
"Properties_FootRoute",
"Properties_BicycleRoute",
"Properties_ALL"]
highway_enum = ["Highway_Motorway",
"Highway_Trunk",
"Highway_Primary",
"Highway_Secondary",
"Highway_Tertiary",
"Highway_Unclassified",
"Highway_Residential",
"Highway_Service",
"Highway_Track",
"Highway_Cycleway",
"Highway_Path",
"Highway_Steps",
"Highway_Ferry",
"Highway_Count",
"Highway_CycleBothWays",
"Highway_OneWay",
"Highway_Roundabout",
"Highway_Area"]
def print_enum(list):
for item in list:
print(" routino.database."+item)
print("Note 1: The Node's and Way's 'allow' parameter can be the combination of these enumerated values:")
print_enum(transports_enum)
print("")
print("Note 2: The Node's 'flags' parameter can be the combination of these enumerated values:")
print_enum(nodeflags_enum)
print("")
print("Note 3: The Segment's 'flags' parameter can be the combination of these enumerated values:")
print_enum(segmentflags_enum)
print("")
print("Note 4: The Way's 'type' parameter can be one the combination of these enumerated values:")
print_enum(highway_enum)
print("")
print("Note 5: The Way's 'props' parameter can be the combination of these enumerated values:")
print_enum(properties_enum)
print("")
print("Note 6: A value of zero for a Way's speed, weight, height, width or length means that there is no limit.")
print("")
print("Note 7: The Relation's 'except_transport' parameter can be the combination of these enumerated values:")
print_enum(transports_enum)
print("")
import gc
gc.collect()
| 30.956044 | 156 | 0.587504 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,416 | 0.522542 |
6fe133c49876165e512b3f8ef38f50888917b203 | 475 | py | Python | vcsver/tests/test_util.py | janneronkko/vcsver | 77020e296a8239ee142213642504c03b9064653f | [
"MIT"
]
| 1 | 2020-12-23T19:22:51.000Z | 2020-12-23T19:22:51.000Z | vcsver/tests/test_util.py | janneronkko/vcsver | 77020e296a8239ee142213642504c03b9064653f | [
"MIT"
]
| null | null | null | vcsver/tests/test_util.py | janneronkko/vcsver | 77020e296a8239ee142213642504c03b9064653f | [
"MIT"
]
| null | null | null | import io
from .. import util
def test_parsing_pkg_info_file(mocker):
open_mock = mocker.patch('vcsver.util.open')
open_mock.return_value = io.StringIO(
'Name: name\n'
'Version: 1.0\n'
)
pkg_info_data = util.parse_pkg_info_file(mocker.sentinel.path)
open_mock = open_mock.assert_called_once_with(
mocker.sentinel.path,
'rt',
)
assert {
'Name': 'name',
'Version': '1.0',
} == pkg_info_data
| 19.791667 | 66 | 0.614737 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 78 | 0.164211 |
6fe228d5ef8bd389e1153721d4a2bf62938a548d | 820 | py | Python | matrixFuncs/determinant.py | AmaarMarfatia/MatrixFunctions | c115dd58c273dc791dfd56316c855e601b0d94cc | [
"MIT"
]
| null | null | null | matrixFuncs/determinant.py | AmaarMarfatia/MatrixFunctions | c115dd58c273dc791dfd56316c855e601b0d94cc | [
"MIT"
]
| null | null | null | matrixFuncs/determinant.py | AmaarMarfatia/MatrixFunctions | c115dd58c273dc791dfd56316c855e601b0d94cc | [
"MIT"
]
| null | null | null | def determinant(matA):
dimA = []
# find dimensions of arrA
a = matA
while type(a) == list:
dimA.append(len(a))
a = a[0]
#is it square
if dimA[0] != dimA[1]:
raise Exception("Matrix is not square")
#find determinant
total = 0
if dimA[0] == 2:
total = matA[0][0] * matA[1][1] - matA[1][0] * matA[0][1]
return total
else:
sign = 1
for i in range(dimA[0]):
temp = matA[1:]
#remove the current column from the temp stuff
for j in range(dimA[0]-1):
temp[j] = temp[j][0:i] + temp[j][i+1:]
sub = determinant(temp)
total = total + sign * matA[0][i] * sub
sign *= -1
return total
matA = [[1,2,3],[4,5,6],[7,8,15]]
print(determinant(matA)) | 28.275862 | 65 | 0.486585 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 123 | 0.15 |
6fe25b4c2678c24198e66f7fedfb9fb15fdcf64a | 5,498 | py | Python | pysnmp/BAY-STACK-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
]
| 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/BAY-STACK-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
]
| 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/BAY-STACK-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
]
| 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module BAY-STACK-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/BAY-STACK-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:19:06 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsUnion, ConstraintsIntersection, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueSizeConstraint", "ValueRangeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, Counter64, Bits, Counter32, ModuleIdentity, ObjectIdentity, IpAddress, iso, Integer32, NotificationType, MibIdentifier, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "Counter64", "Bits", "Counter32", "ModuleIdentity", "ObjectIdentity", "IpAddress", "iso", "Integer32", "NotificationType", "MibIdentifier", "Unsigned32")
TruthValue, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "TextualConvention", "DisplayString")
bayStackMibs, = mibBuilder.importSymbols("SYNOPTICS-ROOT-MIB", "bayStackMibs")
bayStackMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 45, 5, 13))
bayStackMib.setRevisions(('2013-10-11 00:00', '2012-10-02 00:00', '2009-09-28 00:00', '2007-09-04 00:00', '2005-08-22 00:00',))
if mibBuilder.loadTexts: bayStackMib.setLastUpdated('201310110000Z')
if mibBuilder.loadTexts: bayStackMib.setOrganization('Nortel Networks')
bayStackObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 5, 13, 1))
bayStackConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 45, 5, 13, 1, 1))
bayStackConfigExpectedStackSize = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 13, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bayStackConfigExpectedStackSize.setStatus('current')
bayStackConfigStackErrorNotificationInterval = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 13, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(60)).setUnits('Seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: bayStackConfigStackErrorNotificationInterval.setStatus('current')
bayStackConfigStackErrorNotificationEnabled = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 13, 1, 1, 3), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bayStackConfigStackErrorNotificationEnabled.setStatus('current')
bayStackConfigStackRebootUnitOnFailure = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 13, 1, 1, 4), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bayStackConfigStackRebootUnitOnFailure.setStatus('current')
bayStackConfigStackRetryCount = MibScalar((1, 3, 6, 1, 4, 1, 45, 5, 13, 1, 1, 5), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bayStackConfigStackRetryCount.setStatus('current')
bayStackUnitConfigTable = MibTable((1, 3, 6, 1, 4, 1, 45, 5, 13, 1, 2), )
if mibBuilder.loadTexts: bayStackUnitConfigTable.setStatus('current')
bayStackUnitConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 45, 5, 13, 1, 2, 1), ).setIndexNames((0, "BAY-STACK-MIB", "bayStackUnitConfigIndex"))
if mibBuilder.loadTexts: bayStackUnitConfigEntry.setStatus('current')
bayStackUnitConfigIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 13, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bayStackUnitConfigIndex.setStatus('current')
bayStackUnitConfigRearPortAdminMode = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 13, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("standalone", 1), ("stacking", 2), ("spb", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: bayStackUnitConfigRearPortAdminMode.setStatus('current')
bayStackUnitConfigRearPortOperMode = MibTableColumn((1, 3, 6, 1, 4, 1, 45, 5, 13, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("standalone", 1), ("stacking", 2), ("spb", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bayStackUnitConfigRearPortOperMode.setStatus('current')
mibBuilder.exportSymbols("BAY-STACK-MIB", bayStackMib=bayStackMib, bayStackUnitConfigIndex=bayStackUnitConfigIndex, bayStackConfigStackErrorNotificationEnabled=bayStackConfigStackErrorNotificationEnabled, PYSNMP_MODULE_ID=bayStackMib, bayStackConfigStackRetryCount=bayStackConfigStackRetryCount, bayStackConfigStackErrorNotificationInterval=bayStackConfigStackErrorNotificationInterval, bayStackUnitConfigRearPortOperMode=bayStackUnitConfigRearPortOperMode, bayStackUnitConfigEntry=bayStackUnitConfigEntry, bayStackConfigStackRebootUnitOnFailure=bayStackConfigStackRebootUnitOnFailure, bayStackObjects=bayStackObjects, bayStackUnitConfigRearPortAdminMode=bayStackUnitConfigRearPortAdminMode, bayStackConfig=bayStackConfig, bayStackUnitConfigTable=bayStackUnitConfigTable, bayStackConfigExpectedStackSize=bayStackConfigExpectedStackSize)
| 130.904762 | 836 | 0.790833 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,303 | 0.236995 |
6fe276f59396e1213dd3856def48f978483f24dc | 2,184 | py | Python | priv/flatex/flatex.py | w495/survey-cbvr | 861db7b9020a4c5809af5c147cfd7387a3a856ed | [
"MIT"
]
| 1 | 2016-08-15T22:37:03.000Z | 2016-08-15T22:37:03.000Z | priv/flatex/flatex.py | w495/survey-cbvr | 861db7b9020a4c5809af5c147cfd7387a3a856ed | [
"MIT"
]
| null | null | null | priv/flatex/flatex.py | w495/survey-cbvr | 861db7b9020a4c5809af5c147cfd7387a3a856ed | [
"MIT"
]
| null | null | null | #!/usr/bin/env python
# This "flattens" a LaTeX document by replacing all
# \input{X} lines w/ the text actually contained in X. See
# associated README.md for details.
# Use as a python module in a python script by saying import flatex then flatex.main(in file, out file)
import os
import re
import sys
def is_input(line):
"""
Determines whether or not a read in line contains an
uncommented out \input{} statement. Allows only spaces between
start of line and '\input{}'.
"""
#tex_input_re = r"""^\s*\\input{[^}]*}""" # input only
tex_input_re = r"""(^[^\%]*\\input{[^}]*})|(^[^\%]*\\include{[^}]*})""" # input or include
return re.search(tex_input_re, line)
def get_input(line):
"""
Gets the file name from a line containing an input statement.
"""
tex_input_filename_re = r"""{[^}]*"""
m = re.search(tex_input_filename_re, line)
return m.group()[1:]
def combine_path(base_path, relative_ref):
"""
Combines the base path of the tex document being worked on
with the the relate reference found in that document.
"""
#if (base_path != ""):
#print "os.getcwd()", os.getcwd()
#os.chdir(base_path)
filePath = os.path.abspath(relative_ref)
filePath = filePath + ".tex"
return filePath
def expand_file(base_file):
"""
Recursively-defined function that takes as input a file and
returns it with all the inputs replaced with the contents of the
referenced file.
"""
output_lines = []
f = open(base_file, "r")
for line in f:
if is_input(line):
new_base_file = combine_path(current_path, get_input(line))
output_lines += expand_file(new_base_file)
output_lines.append('\n') # add a new line after each file input
else:
output_lines.append(line)
f.close()
return output_lines
def main(base_file, output_file):
g = open(output_file, "w")
g.write(''.join(expand_file(base_file)))
g.close()
return None
if __name__ == '__main__':
base_file, output_file = sys.argv[1:]
current_path = os.path.split(base_file)[0]
main(base_file, output_file)
| 31.2 | 106 | 0.641026 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,095 | 0.501374 |
6fe3cc82a26ac5744b2544116ad6a32d14b35afa | 30 | py | Python | sigal/plugins/encrypt/__init__.py | fidergo-stephane-gourichon/sigal | b1f2e947700e618425e170e8758b1fbb82c91acb | [
"MIT"
]
| null | null | null | sigal/plugins/encrypt/__init__.py | fidergo-stephane-gourichon/sigal | b1f2e947700e618425e170e8758b1fbb82c91acb | [
"MIT"
]
| null | null | null | sigal/plugins/encrypt/__init__.py | fidergo-stephane-gourichon/sigal | b1f2e947700e618425e170e8758b1fbb82c91acb | [
"MIT"
]
| null | null | null | from .encrypt import register
| 15 | 29 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
6fe4383a2b514d6df56ab04d2ab236c4e1f75e15 | 1,600 | py | Python | pull_1m/pull_v2.py | tlh45342/polygon-pull | 1fc59d07d37ea56be5db4856f25ac1d9eca7e810 | [
"Apache-2.0"
]
| null | null | null | pull_1m/pull_v2.py | tlh45342/polygon-pull | 1fc59d07d37ea56be5db4856f25ac1d9eca7e810 | [
"Apache-2.0"
]
| null | null | null | pull_1m/pull_v2.py | tlh45342/polygon-pull | 1fc59d07d37ea56be5db4856f25ac1d9eca7e810 | [
"Apache-2.0"
]
| null | null | null | import datetime
import os
import pandas
from polygon.rest.client import RESTClient
def ts_to_datetime(ts) -> str:
return datetime.datetime.fromtimestamp(ts / 1000.0).strftime('%Y-%m-%d %H:%M')
def pull_day(Symbol, from_):
POLYGON_API_KEY = os.environ.get('POLYGON_API_KEY')
enddate = datetime.datetime.fromisoformat(from_)
enddate += datetime.timedelta(days=1)
enddate = str(enddate)[0:10]
with RESTClient(POLYGON_API_KEY) as client:
resp = client.stocks_equities_aggregates(Symbol, 1, "minute", from_, enddate, unadjusted=False)
#print(f"Minute aggregates for {resp.ticker} between {from_} and {enddate}.")
out = {}
df = pandas.DataFrame(out, columns = ['Datetime', 'Open', 'High', 'Low','Close','Adj Close','Volume'])
for result in resp.results:
#dt = ts_to_datetime(result["t"])
#print(f"{dt}\n\tO: {result['o']}\n\tH: {result['h']}\n\tL: {result['l']}\n\tC: {result['c']} ")
date = {"Datetime": result['t']}
open = {"Open": result['o']}
high = {"High": result['h']}
low = {"Low": result['l']}
close = {"Close": result['c']}
volume = {"Volume": result['v']}
bar = {**date, **open, **high, **low, **close, **volume}
df = df.append(bar,ignore_index=True)
return(df)
# ----------------------------
daystr = "2021-09-10"
df = pull_day("LC", daystr)
fname = r"M:\data\out.csv"
print("Writing: ", fname)
df.to_csv (fname, index = False, header=True) | 34.042553 | 113 | 0.56 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 439 | 0.274375 |
6fe47402ae7ebb99a7865e69f0ddb0204cb01079 | 1,595 | py | Python | pwython/__main__.py | Adwaith-Rajesh/Pwython | f734066e251373c7b1634c27617089f9fe0ac79a | [
"Unlicense"
]
| null | null | null | pwython/__main__.py | Adwaith-Rajesh/Pwython | f734066e251373c7b1634c27617089f9fe0ac79a | [
"Unlicense"
]
| null | null | null | pwython/__main__.py | Adwaith-Rajesh/Pwython | f734066e251373c7b1634c27617089f9fe0ac79a | [
"Unlicense"
]
| null | null | null | from random import choice, randint
from re import sub, split, findall
from string import ascii_letters
from subprocess import PIPE, Popen
from sys import argv, executable, stderr
from .responses import pronouns, reactions, remarks
def owoify(text):
if type(text) == bytes:
text = str(text)[2:-1].replace("\\n", "\n")
text = sub("[rlv]", "w", text)
text = sub("[RLV]", "W", text)
text = sub("ee", "wee", text)
# This is to convert the string into a array whilst maintaining whitespace
words = split(r"\s+", text)
whitespace = findall(r"\s+", text)
text = [None] * (len(words) + len(whitespace))
text[::2], text[1::2] = words, whitespace
# Random stutter
for idx, word in enumerate(text):
if len(word) > 0:
if word[0] in ascii_letters and word[0].lower() not in "aeiouw":
if randint(1, 10) == 1:
text[idx] = f"{word[0]}-{word}"
text = "".join(text)
return text
def main():
process = Popen([executable] + argv[1:], stderr=PIPE)
while process.poll() is None:
for line in iter(process.stderr.readline, b""):
if line == b"Traceback (most recent call last):\n":
# Easter egg :)
if randint(1, 10) == 1:
stderr.write(f"{choice(pronouns)}, {choice(remarks)}, you sussy baka {choice(reactions)}\n")
else:
stderr.write(f"{choice(pronouns)}, {choice(remarks)} {choice(reactions)}\n")
stderr.write(owoify(line))
if __name__ == "__main__":
main()
| 29.537037 | 112 | 0.573041 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 376 | 0.235737 |
6fe49f0a6289892c3834db40f35fd362645dee89 | 6,966 | py | Python | botnet/fabfile.py | MrScytheLULZ/Simple-python-cnc | 23adc7ae553239a0c23da63d6eb97da24054fd93 | [
"MIT"
]
| 23 | 2016-05-31T22:42:07.000Z | 2022-03-30T13:37:44.000Z | botnet/fabfile.py | MrScytheLULZ/Simple-python-cnc | 23adc7ae553239a0c23da63d6eb97da24054fd93 | [
"MIT"
]
| null | null | null | botnet/fabfile.py | MrScytheLULZ/Simple-python-cnc | 23adc7ae553239a0c23da63d6eb97da24054fd93 | [
"MIT"
]
| 18 | 2017-03-15T08:22:33.000Z | 2020-11-14T23:37:06.000Z | import os
from fabric.api import env, run, sudo, execute, local, settings, \
hide, open_shell, parallel, serial, put
from fabric.decorators import hosts
from fabric.contrib.console import confirm
import fabric.colors as fab_col
import paramiko
import getpass
from tabulate import tabulate
file_hosts = "hosts.txt"
paramiko.util.log_to_file("paramiko.log")
env.colorize_errors = True
# The selected hosts are the hosts in env (at the beginning)
selected_hosts = env.hosts
running_hosts = {}
env.connection_attempts = 2
# env.skip_bad_hosts = True
def load_hosts():
"""
Load hosts from hosts.txt.
A host can either be in form
username@host[:port] password
or
username@host[:port]
If no port is specified, port 22 is selected.
"""
with open(file_hosts, "r") as f:
data = f.readlines()
for line in data:
try:
host, password = line.strip().split()
except ValueError:
host = line.strip()
password = None
if len(host.split(':')) == 1:
host = host + ":22"
env.hosts.append(host)
if password is not None:
env.passwords[host] = password.strip()
env.hosts = list(set(env.hosts)) # Remove duplicates
def add_host():
"""
Add a new host to the running hosts.
The user can decide whether to add the host also to the external hosts.txt
file.
"""
name = raw_input("Username: ")
host = raw_input("Host: ")
port = input("Port: ")
new_host = name + "@" + host + ":" + str(port)
selected_hosts.append(new_host)
password = None
if confirm("Authenticate using a password? "):
password = getpass.getpass("Password: ").strip()
env.passwords[new_host] = password
# Append the new host to the hosts file
if confirm("Add the new host to the hosts file? "):
if password is not None:
line = new_host + " " + password + "\n"
else:
line = new_host + "\n"
with open(file_hosts, 'a') as f:
f.write(line)
def print_hosts():
"""
Print selected hosts.
If hosts haven't been hand-selected yet, all hosts are selected.
"""
hosts = map(lambda x: [x, env.passwords.get(x, None)], selected_hosts)
print(fab_col.green(tabulate(hosts, ["Host", "Password"])))
def check_hosts():
"""
Check if hosts are active or not and print the result.
"""
global running_hosts
running_hosts = dict()
for host in selected_hosts:
print(fab_col.magenta("\nPing host %d of %d" %
(selected_hosts.index(host) + 1, len(selected_hosts))))
response = os.system("ping -c 1 " + host.split("@")[1].split(":")[0])
if response == 0:
running_hosts[host] = True
else:
running_hosts[host] = False
# Convert running_hosts in order to print it as table
mylist = map(lambda index: [index[0], str(index[1])], running_hosts.items())
print(fab_col.green(tabulate(mylist, ["Host", "Running"])))
def select_running_hosts():
"""
Select all active hosts.
"""
global selected_hosts
with hide('stdout'):
check_hosts()
host_up = filter(lambda x: running_hosts.get(x, False),
running_hosts.keys())
selected_hosts = host_up
def choose_hosts():
"""
Select the hosts to be used.
"""
global selected_hosts
mylist = map(lambda (num, h): [num, h], enumerate(env.hosts))
print(fab_col.blue("Select Hosts (space-separated):"))
print(fab_col.blue(tabulate(mylist, ["Number", "Host"])))
choices = raw_input("> ").split()
# Avoid letters in string index
choices = filter(lambda x: x.isdigit(), choices)
# Convert to int list
choices = map(int, choices)
# Avoid IndexError
choices = filter(lambda x: x < len(env.hosts), choices)
# Remove duplicates
choices = list(set(choices))
# If no hosts are selected, keep the current hosts
if len(choices) == 0:
return
# Get only selected hosts
selected_hosts = map(lambda i: env.hosts[i], choices)
def run_locally(cmd=None):
"""
Execute a command locally.
"""
if cmd is None:
cmd = raw_input("Insert command: ")
with settings(warn_only=True):
local(cmd)
# This function cannot have the parallel decorator since
# a sudo command must receive the user password
@serial
def _execute_sudo(command):
"""
Execute a sudo command on a host.
Returns:
The results of the execution.
"""
with settings(warn_only=True):
return sudo(command[4:].strip(), shell=True)
@parallel
def _execute_command(command):
"""
Execute a command on a host.
Returns:
The results of the execution.
"""
with settings(warn_only=True):
try:
return run(command)
except:
print(fab_col.red("Error execution in host %s" % env.host))
return None
@parallel
def run_command(cmd=None):
"""
Execute a command on hosts.
"""
if cmd is None:
cmd = raw_input("Insert command: ")
if cmd.strip()[:4] == "sudo":
execute(_execute_sudo, cmd, hosts=selected_hosts)
else:
execute(_execute_command, cmd, hosts=selected_hosts)
@hosts(selected_hosts)
def execute_script():
"""
Execute a script file.
"""
# Attention to script name.
# Add security checks
script_file = raw_input("Name of the script: ")
remote_path = "~/"
if len(script_file) < 4 or ".." in script_file:
# Invalid script
print(fab_col.red("Error. Invalid script name."))
return
for h in selected_hosts:
with settings(host_string=h):
with hide('running'):
put(script_file, remote_path, mode=777)
# Remove the path from the name of the script
script_file = script_file.split("/")[-1]
# Execution
extension = script_file.split(".")[-1]
if extension == script_file:
print(fab_col.red("Invalid script"))
return
if extension == 'py':
run_command("python " + remote_path + script_file)
elif extension == "sh" or extension == "bash":
run_command("bash " + remote_path + script_file)
else:
print(fab_col.red("Extension not supported"))
# Delete the script
with hide('running', 'stdout'):
run_command("rm -f " + remote_path + script_file)
def open_sh():
"""
Open a shell on a host.
"""
mylist = map(lambda (num, h): [num, h], enumerate(selected_hosts))
print(fab_col.blue(tabulate(mylist, ["Number", "Host"])))
try:
n = input("Open shell in host number: ")
h = selected_hosts[n]
execute(open_shell, host=h)
except (NameError, IndexError):
print(fab_col.red("Error: invalid host selection."))
print(fab_col.red("Shell not opened."))
| 28.904564 | 80 | 0.611111 | 0 | 0 | 0 | 0 | 2,025 | 0.290698 | 0 | 0 | 2,190 | 0.314384 |
6fe5e32a34a80c26893983ec1a48a618f1bae5f3 | 781 | py | Python | shared.py | gattis/magnum-py | 98027eee373296030a118681a449629c57d85426 | [
"Unlicense"
]
| null | null | null | shared.py | gattis/magnum-py | 98027eee373296030a118681a449629c57d85426 | [
"Unlicense"
]
| null | null | null | shared.py | gattis/magnum-py | 98027eee373296030a118681a449629c57d85426 | [
"Unlicense"
]
| null | null | null |
objects = {}
def instantiate():
# This function is called once during server startup. Modify the global 'objects' dict with of instantiated
# shared objects that you wish to store in the parent process and have access to from child request handler
# processes. Each object must support being shared via the multiproccessing module or else the object will
# just be copied into the children. See http://docs.python.org/library/multiprocessing.html
#
# For example, in this function you might put:
#
# import multiprocessing
# objects['num_requests'] = multiprocessing.Value('i',0)
#
# And in your request handler, put:
#
# from magnum.shared import objects
# objects['num_requests'].value += 1
return
| 27.892857 | 112 | 0.690141 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 658 | 0.84251 |
6fe5fbc85c69cdd93c1fbe4aaa0233fcbcd08383 | 10,398 | py | Python | appengine/cr-buildbucket/legacy/api_common.py | xinghun61/infra | b5d4783f99461438ca9e6a477535617fadab6ba3 | [
"BSD-3-Clause"
]
| 2 | 2021-04-13T21:22:18.000Z | 2021-09-07T02:11:57.000Z | appengine/cr-buildbucket/legacy/api_common.py | xinghun61/infra | b5d4783f99461438ca9e6a477535617fadab6ba3 | [
"BSD-3-Clause"
]
| 16 | 2020-09-07T11:55:09.000Z | 2022-03-02T05:47:58.000Z | appengine/cr-buildbucket/legacy/api_common.py | xinghun61/infra | b5d4783f99461438ca9e6a477535617fadab6ba3 | [
"BSD-3-Clause"
]
| null | null | null | # Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
from google.appengine.ext import ndb
from google.protobuf import json_format
from google.protobuf import struct_pb2
from protorpc import messages
from components import utils
from proto import common_pb2
import bbutil
import config
import logging
import model
# Names of well-known parameters.
BUILDER_PARAMETER = 'builder_name'
PROPERTIES_PARAMETER = 'properties'
def format_luci_bucket(bucket_id):
"""Returns V1 luci bucket name, e.g. "luci.chromium.try"."""
return 'luci.%s.%s' % config.parse_bucket_id(bucket_id)
def parse_luci_bucket(bucket):
"""Converts V1 LUCI bucket to a bucket ID string.
Returns '' if bucket is not a LUCI bucket.
"""
parts = bucket.split('.', 2)
if len(parts) == 3 and parts[0] == 'luci':
return config.format_bucket_id(parts[1], parts[2])
return ''
@ndb.tasklet
def to_bucket_id_async(bucket):
"""Converts a bucket string to a bucket id.
A bucket string is either a bucket id (e.g. "chromium/try") or
a legacy bucket name (e.g. "master.tryserver.x", "luci.chromium.try").
Does not check access.
Returns:
bucket id string or None if such bucket does not exist.
Raises:
errors.InvalidInputError if bucket is invalid or ambiguous.
"""
is_legacy = config.is_legacy_bucket_id(bucket)
if not is_legacy:
config.validate_bucket_id(bucket)
raise ndb.Return(bucket)
config.validate_bucket_name(bucket)
bucket_id = parse_luci_bucket(bucket)
if bucket_id:
raise ndb.Return(bucket_id)
# The slowest code path.
# Does not apply to LUCI.
bucket_id = config.resolve_bucket_name_async(bucket).get_result()
if bucket_id:
logging.info('resolved bucket id %r => %r', bucket, bucket_id)
raise ndb.Return(bucket_id)
class CanaryPreference(messages.Enum):
# The build system will decide whether to use canary or not
AUTO = 1
# Use the production build infrastructure
PROD = 2
# Use the canary build infrastructure
CANARY = 3
CANARY_PREFERENCE_TO_TRINARY = {
CanaryPreference.AUTO: common_pb2.UNSET,
CanaryPreference.PROD: common_pb2.NO,
CanaryPreference.CANARY: common_pb2.YES,
}
TRINARY_TO_CANARY_PREFERENCE = {
v: k for k, v in CANARY_PREFERENCE_TO_TRINARY.iteritems()
}
class BuildMessage(messages.Message):
"""Describes model.Build, see its docstring."""
id = messages.IntegerField(1, required=True)
bucket = messages.StringField(2, required=True)
tags = messages.StringField(3, repeated=True)
parameters_json = messages.StringField(4)
status = messages.EnumField(model.BuildStatus, 5)
result = messages.EnumField(model.BuildResult, 6)
result_details_json = messages.StringField(7)
failure_reason = messages.EnumField(model.FailureReason, 8)
cancelation_reason = messages.EnumField(model.CancelationReason, 9)
lease_expiration_ts = messages.IntegerField(10)
lease_key = messages.IntegerField(11)
url = messages.StringField(12)
created_ts = messages.IntegerField(13)
started_ts = messages.IntegerField(20)
updated_ts = messages.IntegerField(14)
completed_ts = messages.IntegerField(15)
created_by = messages.StringField(16)
status_changed_ts = messages.IntegerField(17)
utcnow_ts = messages.IntegerField(18, required=True)
retry_of = messages.IntegerField(19)
canary_preference = messages.EnumField(CanaryPreference, 21)
canary = messages.BooleanField(22)
project = messages.StringField(23)
experimental = messages.BooleanField(24)
service_account = messages.StringField(25)
def proto_to_timestamp(ts):
if not ts.seconds:
return None
return utils.datetime_to_timestamp(ts.ToDatetime())
def legacy_bucket_name(bucket_id, is_luci):
if is_luci:
# In V1, LUCI builds use a "long" bucket name, e.g. "luci.chromium.try"
# as opposed to just "try". This is because in the past bucket names
# were globally unique, as opposed to unique per project.
return format_luci_bucket(bucket_id)
_, bucket_name = config.parse_bucket_id(bucket_id)
return bucket_name
# List of deprecated properties that are converted from float to int for
# backward compatibility.
# TODO(crbug.com/877161): remove this list.
INTEGER_PROPERTIES = [
'buildnumber',
'issue',
'patchset',
'patch_issue',
'patch_set',
]
def get_build_url(build):
"""Returns view URL of the build."""
if build.url:
return build.url
settings = config.get_settings_async().get_result()
return 'https://%s/b/%d' % (settings.swarming.milo_hostname, build.proto.id)
def properties_to_json(properties):
"""Converts properties to JSON.
properties should be struct_pb2.Struct, but for convenience in tests
a dict is also accepted.
CAUTION: in general converts all numbers to floats,
because JSON format does not distinguish floats and ints.
For backward compatibility, temporarily (crbug.com/877161) renders widely
used, deprecated properties as integers, see INTEGER_PROPERTIES.
"""
return json.dumps(_properties_to_dict(properties), sort_keys=True)
def _properties_to_dict(properties):
"""Implements properties_to_json."""
assert isinstance(properties, (dict, struct_pb2.Struct)), properties
if isinstance(properties, dict): # pragma: no branch
properties = bbutil.dict_to_struct(properties)
# Note: this dict does not necessarily equal the original one.
# In particular, an int may turn into a float.
as_dict = json_format.MessageToDict(properties)
for p in INTEGER_PROPERTIES:
if isinstance(as_dict.get(p), float):
as_dict[p] = int(as_dict[p])
return as_dict
def build_to_message(build_bundle, include_lease_key=False):
"""Converts a model.BuildBundle to BuildMessage."""
build = build_bundle.build
assert build
assert build.key
assert build.key.id()
bp = build.proto
infra = build_bundle.infra.parse()
sw = infra.swarming
logdog = infra.logdog
recipe = infra.recipe
result_details = (build.result_details or {}).copy()
result_details['properties'] = {}
if build_bundle.output_properties: # pragma: no branch
result_details['properties'] = _properties_to_dict(
build_bundle.output_properties.parse()
)
if bp.summary_markdown:
result_details['ui'] = {'info': bp.summary_markdown}
parameters = (build.parameters or {}).copy()
parameters[BUILDER_PARAMETER] = bp.builder.builder
parameters[PROPERTIES_PARAMETER] = _properties_to_dict(
infra.buildbucket.requested_properties
)
recipe_name = recipe.name
if build_bundle.input_properties: # pragma: no cover
input_props = build_bundle.input_properties.parse()
if 'recipe' in input_props.fields:
recipe_name = input_props['recipe']
if bp.status != common_pb2.SUCCESS and bp.summary_markdown:
result_details['error'] = {
'message': bp.summary_markdown,
}
if sw.bot_dimensions:
by_key = {}
for d in sw.bot_dimensions:
by_key.setdefault(d.key, []).append(d.value)
result_details.setdefault('swarming', {})['bot_dimensions'] = by_key
tags = set(build.tags)
if build.is_luci:
tags.add('swarming_hostname:%s' % sw.hostname)
tags.add('swarming_task_id:%s' % sw.task_id)
# Milo uses swarming tags.
tags.add('swarming_tag:recipe_name:%s' % recipe_name)
tags.add(
'swarming_tag:recipe_package:%s' %
(bp.exe.cipd_package or recipe.cipd_package)
)
tags.add(
'swarming_tag:log_location:logdog://%s/%s/%s/+/annotations' %
(logdog.hostname, logdog.project, logdog.prefix)
)
tags.add('swarming_tag:luci_project:%s' % bp.builder.project)
# Try to find OS
for d in sw.bot_dimensions:
if d.key == 'os':
tags.add('swarming_tag:os:%s' % d.value)
break
msg = BuildMessage(
id=build.key.id(),
project=bp.builder.project,
bucket=legacy_bucket_name(build.bucket_id, build.is_luci),
tags=sorted(tags),
parameters_json=json.dumps(parameters, sort_keys=True),
status=build.status_legacy,
result=build.result,
result_details_json=json.dumps(result_details, sort_keys=True),
cancelation_reason=build.cancelation_reason,
failure_reason=build.failure_reason,
lease_key=build.lease_key if include_lease_key else None,
url=get_build_url(build),
created_ts=proto_to_timestamp(bp.create_time),
started_ts=proto_to_timestamp(bp.start_time),
updated_ts=proto_to_timestamp(bp.update_time),
completed_ts=proto_to_timestamp(bp.end_time),
created_by=build.created_by.to_bytes() if build.created_by else None,
status_changed_ts=utils.datetime_to_timestamp(build.status_changed_time),
utcnow_ts=utils.datetime_to_timestamp(utils.utcnow()),
retry_of=build.retry_of,
canary_preference=(
# This is not accurate, but it does not matter at this point.
# This is deprecated.
CanaryPreference.CANARY if build.canary else CanaryPreference.PROD
),
canary=build.canary,
experimental=build.experimental,
service_account=sw.task_service_account,
# when changing this function, make sure build_to_dict would still work
)
if build.lease_expiration_date is not None:
msg.lease_expiration_ts = utils.datetime_to_timestamp(
build.lease_expiration_date
)
return msg
def build_to_dict(build_bundle, include_lease_key=False):
"""Converts a build to an externally consumable dict.
This function returns a dict that a BuildMessage would be encoded to.
"""
# Implementing this function in a generic way (message_to_dict) requires
# knowledge of many protorpc and endpoints implementation details.
# Not worth it.
msg = build_to_message(build_bundle, include_lease_key=include_lease_key)
# Special cases
result = {
'tags': msg.tags, # a list
}
for f in msg.all_fields():
v = msg.get_assigned_value(f.name)
if f.name in result or v is None:
# None is the default. It is omitted by Cloud Endpoints.
continue
if isinstance(v, messages.Enum):
v = str(v)
else:
assert isinstance(v, (basestring, int, long, bool)), v
if (isinstance(f, messages.IntegerField) and
f.variant == messages.Variant.INT64):
v = str(v)
result[f.name] = v
return result
| 31.413897 | 79 | 0.726486 | 1,475 | 0.141854 | 0 | 0 | 909 | 0.087421 | 0 | 0 | 2,987 | 0.287267 |
6fe6702a87b9963548f65391cbbfe3151ca980e8 | 1,606 | py | Python | src/probnum/random_variables/__init__.py | admdev8/probnum | 792b6299bac247cf8b1b5056756f0f078855d83a | [
"MIT"
]
| null | null | null | src/probnum/random_variables/__init__.py | admdev8/probnum | 792b6299bac247cf8b1b5056756f0f078855d83a | [
"MIT"
]
| 2 | 2020-12-28T19:37:16.000Z | 2020-12-28T19:37:31.000Z | src/probnum/random_variables/__init__.py | admdev8/probnum | 792b6299bac247cf8b1b5056756f0f078855d83a | [
"MIT"
]
| null | null | null | """
This package implements random variables. Random variables are the primary in- and
outputs of probabilistic numerical methods. A generic signature of such methods looks
like this:
.. highlight:: python
.. code-block:: python
randvar_out, info = probnum_method(problem, randvar_in, **kwargs)
"""
from ._dirac import Dirac
from ._normal import Normal
from ._random_variable import (
ContinuousRandomVariable,
DiscreteRandomVariable,
RandomVariable,
)
from ._scipy_stats import (
WrappedSciPyContinuousRandomVariable,
WrappedSciPyDiscreteRandomVariable,
WrappedSciPyRandomVariable,
)
from ._utils import asrandvar
# Public classes and functions. Order is reflected in documentation.
__all__ = [
"asrandvar",
"RandomVariable",
"DiscreteRandomVariable",
"ContinuousRandomVariable",
"Dirac",
"Normal",
"WrappedSciPyRandomVariable",
"WrappedSciPyDiscreteRandomVariable",
"WrappedSciPyContinuousRandomVariable",
]
# Set correct module paths. Corrects links and module paths in documentation.
RandomVariable.__module__ = "probnum.random_variables"
DiscreteRandomVariable.__module__ = "probnum.random_variables"
ContinuousRandomVariable.__module__ = "probnum.random_variables"
WrappedSciPyRandomVariable.__module__ = "probnum.random_variables"
WrappedSciPyDiscreteRandomVariable.__module__ = "probnum.random_variables"
WrappedSciPyContinuousRandomVariable.__module__ = "probnum.random_variables"
Dirac.__module__ = "probnum.random_variables"
Normal.__module__ = "probnum.random_variables"
asrandvar.__module__ = "probnum.random_variables"
| 30.301887 | 85 | 0.797011 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 878 | 0.5467 |
6fe83234101021db05245c3a1a51c329dc0916b4 | 6,147 | py | Python | test/acceptance/features/steps/generic_testapp.py | multi-arch/service-binding-operator | a92b303d45a06e8c4396ae80721e1cc5b9019e4a | [
"Apache-2.0"
]
| null | null | null | test/acceptance/features/steps/generic_testapp.py | multi-arch/service-binding-operator | a92b303d45a06e8c4396ae80721e1cc5b9019e4a | [
"Apache-2.0"
]
| null | null | null | test/acceptance/features/steps/generic_testapp.py | multi-arch/service-binding-operator | a92b303d45a06e8c4396ae80721e1cc5b9019e4a | [
"Apache-2.0"
]
| null | null | null | from app import App
import requests
import json
import polling2
from behave import step
from openshift import Openshift
from util import substitute_scenario_id
from string import Template
class GenericTestApp(App):
deployment_name_pattern = "{name}"
def __init__(self, name, namespace, app_image="ghcr.io/multi-arch/sbo-generic-test-app:latest"):
App.__init__(self, name, namespace, app_image, "8080")
def get_env_var_value(self, name):
resp = polling2.poll(lambda: requests.get(url=f"http://{self.route_url}/env/{name}"),
check_success=lambda r: r.status_code in [200, 404], step=5, timeout=400, ignore_exceptions=(requests.exceptions.ConnectionError,))
print(f'env endpoint response: {resp.text} code: {resp.status_code}')
if resp.status_code == 200:
return json.loads(resp.text)
else:
return None
def format_pattern(self, pattern):
return pattern.format(name=self.name)
def get_file_value(self, file_path):
resp = polling2.poll(lambda: requests.get(url=f"http://{self.route_url}{file_path}"),
check_success=lambda r: r.status_code == 200, step=5, timeout=400, ignore_exceptions=(requests.exceptions.ConnectionError,))
print(f'file endpoint response: {resp.text} code: {resp.status_code}')
return resp.text
def assert_file_not_exist(self, file_path):
polling2.poll(lambda: requests.get(url=f"http://{self.route_url}{file_path}"),
check_success=lambda r: r.status_code == 404, step=5, timeout=400, ignore_exceptions=(requests.exceptions.ConnectionError,))
def set_label(self, label):
self.openshift.set_label(self.name, label, self.namespace)
@step(u'Generic test application "{application_name}" is running')
@step(u'Generic test application "{application_name}" is running with binding root as "{bindingRoot}"')
@step(u'Generic test application is running')
@step(u'Generic test application is running with binding root as "{bindingRoot}"')
def is_running(context, application_name=None, bindingRoot=None, asDeploymentConfig=False):
if application_name is None:
application_name = substitute_scenario_id(context)
application = GenericTestApp(application_name, context.namespace.name)
if asDeploymentConfig:
application.resource = "deploymentconfig"
if not application.is_running():
print("application is not running, trying to import it")
application.install(bindingRoot=bindingRoot)
context.application = application
# save the generation number
context.original_application_generation = application.get_generation()
context.latest_application_generation = application.get_generation()
@step(u'Generic test application is running as deployment config')
def is_running_deployment_config(context):
is_running(context, asDeploymentConfig=True)
@step(u'The application env var "{name}" has value "{value}"')
def check_env_var_value(context, name, value):
value = substitute_scenario_id(context, value)
found = polling2.poll(lambda: context.application.get_env_var_value(name) == value, step=5, timeout=400)
assert found, f'Env var "{name}" should contain value "{value}"'
@step(u'The env var "{name}" is not available to the application')
def check_env_var_existence(context, name):
output = polling2.poll(lambda: context.application.get_env_var_value(name) is None, step=5, timeout=400)
assert output, f'Env var "{name}" should not exist'
@step(u'Content of file "{file_path}" in application pod is')
def check_file_value(context, file_path):
value = Template(context.text.strip()).substitute(NAMESPACE=context.namespace.name)
resource = substitute_scenario_id(context, file_path)
polling2.poll(lambda: context.application.get_file_value(resource) == value, step=5, timeout=400)
@step(u'File "{file_path}" exists in application pod')
def check_file_exists(context, file_path):
resource = substitute_scenario_id(context, file_path)
polling2.poll(lambda: context.application.get_file_value(resource) != "", step=5, timeout=400)
@step(u'File "{file_path}" is unavailable in application pod')
def check_file_unavailable(context, file_path):
context.application.assert_file_not_exist(file_path)
@step(u'Test applications "{first_app_name}" and "{second_app_name}" is running')
def are_two_apps_running(context, first_app_name, second_app_name, bindingRoot=None):
application1 = GenericTestApp(first_app_name, context.namespace.name)
if not application1.is_running():
print("application1 is not running, trying to import it")
application1.install(bindingRoot=bindingRoot)
context.application1 = application1
application2 = GenericTestApp(second_app_name, context.namespace.name)
if not application2.is_running():
print("application2 is not running, trying to import it")
application2.install(bindingRoot=bindingRoot)
context.application2 = application2
@step(u'The common label "{label}" is set for both apps')
def set_common_label(context, label):
context.application1.set_label(f"{label}")
context.application2.set_label(f"{label}")
@step(u'The application env var "{name}" has value "{value}" in both apps')
def check_env_var_value_in_both_apps(context, name, value):
polling2.poll(lambda: context.application1.get_env_var_value(name) == value, step=5, timeout=400)
polling2.poll(lambda: context.application2.get_env_var_value(name) == value, step=5, timeout=400)
@step(u'The container declared in application resource contains env "{envVar}" set only once')
@step(u'The container declared in application "{app_name}" resource contains env "{envVar}" set only once')
def check_env_var_count_set_on_container(context, envVar, app_name=None):
openshift = Openshift()
if app_name is None:
app_name = context.application.name
app_name = substitute_scenario_id(context, app_name)
env = openshift.get_deployment_env_info(app_name, context.namespace.name)
assert str(env).count(envVar) == 1
| 45.198529 | 160 | 0.738572 | 1,582 | 0.257361 | 0 | 0 | 4,341 | 0.706198 | 0 | 0 | 1,577 | 0.256548 |
6fe90908102c482823f4162b72dbc1d35bfe2cbb | 8,033 | py | Python | SKY130_PDK/gen_param.py | ALIGN-analoglayout/ALIGN-pdk-sky130 | ffddd848656f09cc03f210d7b44b5ffbd1ab228f | [
"BSD-3-Clause"
]
| null | null | null | SKY130_PDK/gen_param.py | ALIGN-analoglayout/ALIGN-pdk-sky130 | ffddd848656f09cc03f210d7b44b5ffbd1ab228f | [
"BSD-3-Clause"
]
| null | null | null | SKY130_PDK/gen_param.py | ALIGN-analoglayout/ALIGN-pdk-sky130 | ffddd848656f09cc03f210d7b44b5ffbd1ab228f | [
"BSD-3-Clause"
]
| null | null | null | import json
import logging
from math import sqrt, floor, ceil, log10
from copy import deepcopy
logger = logging.getLogger(__name__)
def limit_pairs(pairs):
# Hack to limit aspect ratios when there are a lot of choices
if len(pairs) > 12:
new_pairs = []
log10_aspect_ratios = [-0.3, 0, 0.3]
for l in log10_aspect_ratios:
best_pair = min((abs(log10(newy) - log10(newx) - l), (newx, newy))
for newx, newy in pairs)[1]
new_pairs.append(best_pair)
return new_pairs
else:
return pairs
def add_primitive(primitives, block_name, block_args):
if block_name in primitives:
if not primitives[block_name] == block_args:
logger.warning(f"Distinct devices mapped to the same primitive {block_name}: \
existing: {primitives[block_name]}\
new: {block_args}")
else:
logger.debug(f"Found primitive {block_name} with {block_args}")
if 'x_cells' in block_args and 'y_cells' in block_args:
x, y = block_args['x_cells'], block_args['y_cells']
pairs = set()
m = x*y
y_sqrt = floor(sqrt(x*y))
for y in range(y_sqrt, 0, -1):
if m % y == 0:
pairs.add((y, m//y))
pairs.add((m//y, y))
if y == 1:
break
pairs = limit_pairs((pairs))
for newx, newy in pairs:
concrete_name = f'{block_name}_X{newx}_Y{newy}'
if concrete_name not in primitives:
primitives[concrete_name] = deepcopy(block_args)
primitives[concrete_name]['x_cells'] = newx
primitives[concrete_name]['y_cells'] = newy
primitives[concrete_name]['abstract_template_name'] = block_name
primitives[concrete_name]['concrete_template_name'] = concrete_name
else:
primitives[block_name] = block_args
primitives[block_name]['abstract_template_name'] = block_name
primitives[block_name]['concrete_template_name'] = block_name
def gen_param(subckt, primitives, pdk_dir):
block_name = subckt.name
vt = subckt.elements[0].model
values = subckt.elements[0].parameters
generator_name = subckt.generator["name"]
block_name = subckt.name
generator_name = subckt.generator["name"]
layers_json = pdk_dir / "layers.json"
with open(layers_json, "rt") as fp:
pdk_data = json.load(fp)
design_config = pdk_data["design_info"]
if len(subckt.elements) == 1:
values = subckt.elements[0].parameters
else:
mvalues = {}
for ele in subckt.elements:
mvalues[ele.name] = ele.parameters
if generator_name == 'CAP':
assert float(values["VALUE"]) or float(values["C"]), f"unidentified size {values} for {block_name}"
if "C" in values:
size = round(float(values["C"]) * 1E15, 4)
elif 'VALUE' in values:
size = round(float(values["VALUE"]) * 1E15, 4)
assert size <= design_config["max_size_cap"], f"caps larger that {design_config['max_size_cap']}fF are not supported"
# TODO: use float in name
logger.debug(f"Generating capacitor for:{block_name}, {size}")
block_args = {
'primitive': generator_name,
'value': float(size)
}
add_primitive(primitives, block_name, block_args)
elif generator_name == 'RES':
assert float(values["VALUE"]) or float(values["R"]), f"unidentified size {values['VALUE']} for {name}"
if "R" in values:
size = round(float(values["R"]), 2)
elif 'VALUE' in values:
size = round(float(values["VALUE"]), 2)
# TODO: use float in name
if size.is_integer():
size = int(size)
height = ceil(sqrt(float(size) / design_config["unit_height_res"]))
logger.debug(f'Generating resistor for: {block_name} {size}')
block_args = {
'primitive': generator_name,
'value': (height, float(size))
}
add_primitive(primitives, block_name, block_args)
else:
assert 'MOS' == generator_name, f'{generator_name} is not recognized'
if "vt_type" in design_config:
vt = [vt.upper() for vt in design_config["vt_type"] if vt.upper() in subckt.elements[0].model]
mvalues = {}
for ele in subckt.elements:
mvalues[ele.name] = ele.parameters
device_name_all = [*mvalues.keys()]
device_name = next(iter(mvalues))
for key in mvalues:
assert mvalues[key]["W"] != str, f"unrecognized size of device {key}:{mvalues[key]['W']} in {block_name}"
assert int(
float(mvalues[key]["W"])*1E+9) % design_config["Fin_pitch"] == 0, \
f"Width of device {key} in {block_name} should be multiple of fin pitch:{design_config['Fin_pitch']}"
size = int(float(mvalues[key]["W"])*1E+9/design_config["Fin_pitch"])
mvalues[key]["NFIN"] = size
name_arg = 'NFIN'+str(size)
if 'NF' in mvalues[device_name].keys():
for key in mvalues:
assert int(mvalues[key]["NF"]), f"unrecognized NF of device {key}:{mvalues[key]['NF']} in {name}"
assert int(mvalues[key]["NF"]) % 2 == 0, f"NF must be even for device {key}:{mvalues[key]['NF']} in {name}"
name_arg = name_arg+'_NF'+str(int(mvalues[device_name]["NF"]))
if 'M' in mvalues[device_name].keys():
for key in mvalues:
assert int(mvalues[key]["M"]), f"unrecognized M of device {key}:{mvalues[key]['M']} in {name}"
if "PARALLEL" in mvalues[key].keys() and int(mvalues[key]['PARALLEL']) > 1:
mvalues[key]["PARALLEL"] = int(mvalues[key]['PARALLEL'])
mvalues[key]['M'] = int(mvalues[key]['M'])*int(mvalues[key]['PARALLEL'])
name_arg = name_arg+'_M'+str(int(mvalues[device_name]["M"]))
size = 0
logger.debug(f"Generating lef for {block_name}")
if isinstance(size, int):
for key in mvalues:
assert int(mvalues[device_name]["NFIN"]) == int(mvalues[key]["NFIN"]), f"W should be same for all devices in {name} {mvalues}"
size_device = int(mvalues[key]["NF"])*int(mvalues[key]["M"])
size = size + size_device
no_units = ceil(size / (2*len(mvalues))) # Factor 2 is due to NF=2 in each unit cell; needs to be generalized
if any(x in block_name for x in ['DP', '_S']) and floor(sqrt(no_units/3)) >= 1:
square_y = floor(sqrt(no_units/3))
else:
square_y = floor(sqrt(no_units))
while no_units % square_y != 0:
square_y -= 1
yval = square_y
xval = int(no_units / square_y)
if 'SCM' in block_name:
if int(mvalues[device_name_all[0]]["NFIN"])*int(mvalues[device_name_all[0]]["NF"])*int(mvalues[device_name_all[0]]["M"]) != \
int(mvalues[device_name_all[1]]["NFIN"])*int(mvalues[device_name_all[1]]["NF"])*int(mvalues[device_name_all[1]]["M"]):
square_y = 1
yval = square_y
xval = int(no_units / square_y)
block_args = {
'primitive': generator_name,
'value': mvalues[device_name]["NFIN"],
'x_cells': xval,
'y_cells': yval,
'parameters': mvalues
}
if 'STACK' in mvalues[device_name].keys() and int(mvalues[device_name]["STACK"]) > 1:
block_args['stack'] = int(mvalues[device_name]["STACK"])
if vt:
block_args['vt_type'] = vt[0]
add_primitive(primitives, block_name, block_args)
return True
| 45.642045 | 142 | 0.563675 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,823 | 0.226939 |
6fe94897b6016b8d63c8281d624fe928a95dab1f | 1,804 | py | Python | forcephot/throttles.py | lukeshingles/atlasserver | 87c8e437891a1516ac1fadb84d1d9b796dc5a367 | [
"MIT"
]
| 5 | 2020-11-06T11:55:07.000Z | 2021-09-28T22:27:28.000Z | forcephot/throttles.py | lukeshingles/atlasserver | 87c8e437891a1516ac1fadb84d1d9b796dc5a367 | [
"MIT"
]
| 21 | 2020-11-03T13:26:30.000Z | 2022-02-18T10:50:52.000Z | forcephot/throttles.py | lukeshingles/atlasserver | 87c8e437891a1516ac1fadb84d1d9b796dc5a367 | [
"MIT"
]
| null | null | null | from rest_framework.throttling import SimpleRateThrottle
class ForcedPhotRateThrottle(SimpleRateThrottle):
"""
Limits the rate of API calls by different amounts for various parts of
the API. Any view that has the `throttle_scope` property set will be
throttled. The unique cache key will be generated by concatenating the
user id of the request, and the scope of the view being accessed.
"""
scope_attr = 'throttle_scope'
def __init__(self):
# Override the usual SimpleRateThrottle, because we can't determine
# the rate until called by the view.
pass
def allow_request(self, request, view):
if request.method == 'GET':
return True
# We can only determine the scope once we're called by the view.
self.scope = getattr(view, self.scope_attr, None)
# If a view does not have a `throttle_scope` always allow the request
if not self.scope:
return True
# Determine the allowed request rate as we normally would during
# the `__init__` call.
self.rate = self.get_rate()
self.num_requests, self.duration = self.parse_rate(self.rate)
# We can now proceed as normal.
return super().allow_request(request, view)
def get_cache_key(self, request, view):
"""
If `view.throttle_scope` is not set, don't apply this throttle.
Otherwise generate the unique cache key by concatenating the user id
with the '.throttle_scope` property of the view.
"""
if request.user.is_authenticated:
ident = request.user.pk
else:
ident = self.get_ident(request)
return self.cache_format % {
'scope': self.scope,
'ident': ident
}
| 34.037736 | 77 | 0.644678 | 1,744 | 0.966741 | 0 | 0 | 0 | 0 | 0 | 0 | 916 | 0.507761 |
6fea6ea2847af9fb33f26ea132b770aa2ffca311 | 9,206 | py | Python | dot_vim/plugged/ultisnips/test/test_SnippetOptions.py | gabefgonc/san-francisco-rice-dotfiles | 60ff3539f34ecfff6d7bce895497e2a3805910d4 | [
"MIT"
]
| 10 | 2020-07-21T21:59:54.000Z | 2021-07-19T11:01:47.000Z | dot_vim/plugged/ultisnips/test/test_SnippetOptions.py | gabefgonc/san-francisco-rice-dotfiles | 60ff3539f34ecfff6d7bce895497e2a3805910d4 | [
"MIT"
]
| null | null | null | dot_vim/plugged/ultisnips/test/test_SnippetOptions.py | gabefgonc/san-francisco-rice-dotfiles | 60ff3539f34ecfff6d7bce895497e2a3805910d4 | [
"MIT"
]
| 1 | 2021-01-30T18:17:01.000Z | 2021-01-30T18:17:01.000Z | # encoding: utf-8
from test.vim_test_case import VimTestCase as _VimTest
from test.constant import *
from test.util import running_on_windows
class SnippetOptions_OnlyExpandWhenWSInFront_Expand(_VimTest):
snippets = ("test", "Expand me!", "", "b")
keys = "test" + EX
wanted = "Expand me!"
class SnippetOptions_OnlyExpandWhenWSInFront_Expand2(_VimTest):
snippets = ("test", "Expand me!", "", "b")
keys = " test" + EX
wanted = " Expand me!"
class SnippetOptions_OnlyExpandWhenWSInFront_DontExpand(_VimTest):
snippets = ("test", "Expand me!", "", "b")
keys = "a test" + EX
wanted = "a test" + EX
class SnippetOptions_OnlyExpandWhenWSInFront_OneWithOneWO(_VimTest):
snippets = (("test", "Expand me!", "", "b"), ("test", "not at beginning", "", ""))
keys = "a test" + EX
wanted = "a not at beginning"
class SnippetOptions_OnlyExpandWhenWSInFront_OneWithOneWOChoose(_VimTest):
snippets = (("test", "Expand me!", "", "b"), ("test", "not at beginning", "", ""))
keys = " test" + EX + "1\n"
wanted = " Expand me!"
class SnippetOptions_ExpandInwordSnippets_SimpleExpand(_VimTest):
snippets = (("test", "Expand me!", "", "i"),)
keys = "atest" + EX
wanted = "aExpand me!"
class SnippetOptions_ExpandInwordSnippets_ExpandSingle(_VimTest):
snippets = (("test", "Expand me!", "", "i"),)
keys = "test" + EX
wanted = "Expand me!"
class SnippetOptions_ExpandInwordSnippetsWithOtherChars_Expand(_VimTest):
snippets = (("test", "Expand me!", "", "i"),)
keys = "$test" + EX
wanted = "$Expand me!"
class SnippetOptions_ExpandInwordSnippetsWithOtherChars_Expand2(_VimTest):
snippets = (("test", "Expand me!", "", "i"),)
keys = "-test" + EX
wanted = "-Expand me!"
class SnippetOptions_ExpandInwordSnippetsWithOtherChars_Expand3(_VimTest):
skip_if = lambda self: running_on_windows()
snippets = (("test", "Expand me!", "", "i"),)
keys = "ßßtest" + EX
wanted = "ßßExpand me!"
class _SnippetOptions_ExpandWordSnippets(_VimTest):
snippets = (("test", "Expand me!", "", "w"),)
class SnippetOptions_ExpandWordSnippets_NormalExpand(
_SnippetOptions_ExpandWordSnippets
):
keys = "test" + EX
wanted = "Expand me!"
class SnippetOptions_ExpandWordSnippets_NoExpand(_SnippetOptions_ExpandWordSnippets):
keys = "atest" + EX
wanted = "atest" + EX
class SnippetOptions_ExpandWordSnippets_ExpandSuffix(
_SnippetOptions_ExpandWordSnippets
):
keys = "a-test" + EX
wanted = "a-Expand me!"
class SnippetOptions_ExpandWordSnippets_ExpandSuffix2(
_SnippetOptions_ExpandWordSnippets
):
keys = "a(test" + EX
wanted = "a(Expand me!"
class SnippetOptions_ExpandWordSnippets_ExpandSuffix3(
_SnippetOptions_ExpandWordSnippets
):
keys = "[[test" + EX
wanted = "[[Expand me!"
class _No_Tab_Expand(_VimTest):
snippets = ("test", "\t\tExpand\tme!\t", "", "t")
class No_Tab_Expand_Simple(_No_Tab_Expand):
keys = "test" + EX
wanted = "\t\tExpand\tme!\t"
class No_Tab_Expand_Leading_Spaces(_No_Tab_Expand):
keys = " test" + EX
wanted = " \t\tExpand\tme!\t"
class No_Tab_Expand_Leading_Tabs(_No_Tab_Expand):
keys = "\ttest" + EX
wanted = "\t\t\tExpand\tme!\t"
class No_Tab_Expand_No_TS(_No_Tab_Expand):
def _extra_vim_config(self, vim_config):
vim_config.append("set sw=3")
vim_config.append("set sts=3")
keys = "test" + EX
wanted = "\t\tExpand\tme!\t"
class No_Tab_Expand_ET(_No_Tab_Expand):
def _extra_vim_config(self, vim_config):
vim_config.append("set sw=3")
vim_config.append("set expandtab")
keys = "test" + EX
wanted = "\t\tExpand\tme!\t"
class No_Tab_Expand_ET_Leading_Spaces(_No_Tab_Expand):
def _extra_vim_config(self, vim_config):
vim_config.append("set sw=3")
vim_config.append("set expandtab")
keys = " test" + EX
wanted = " \t\tExpand\tme!\t"
class No_Tab_Expand_ET_SW(_No_Tab_Expand):
def _extra_vim_config(self, vim_config):
vim_config.append("set sw=8")
vim_config.append("set expandtab")
keys = "test" + EX
wanted = "\t\tExpand\tme!\t"
class No_Tab_Expand_ET_SW_TS(_No_Tab_Expand):
def _extra_vim_config(self, vim_config):
vim_config.append("set sw=3")
vim_config.append("set sts=3")
vim_config.append("set ts=3")
vim_config.append("set expandtab")
keys = "test" + EX
wanted = "\t\tExpand\tme!\t"
class _TabExpand_RealWorld:
snippets = (
"hi",
r"""hi
`!p snip.rv="i1\n"
snip.rv += snip.mkline("i1\n")
snip.shift(1)
snip.rv += snip.mkline("i2\n")
snip.unshift(2)
snip.rv += snip.mkline("i0\n")
snip.shift(3)
snip.rv += snip.mkline("i3")`
snip.rv = repr(snip.rv)
End""",
)
class No_Tab_Expand_RealWorld(_TabExpand_RealWorld, _VimTest):
def _extra_vim_config(self, vim_config):
vim_config.append("set noexpandtab")
keys = "\t\thi" + EX
wanted = """\t\thi
\t\ti1
\t\ti1
\t\t\ti2
\ti0
\t\t\t\ti3
\t\tsnip.rv = repr(snip.rv)
\t\tEnd"""
class SnippetOptions_Regex_Expand(_VimTest):
snippets = ("(test)", "Expand me!", "", "r")
keys = "test" + EX
wanted = "Expand me!"
class SnippetOptions_Regex_WithSpace(_VimTest):
snippets = ("test ", "Expand me!", "", "r")
keys = "test " + EX
wanted = "Expand me!"
class SnippetOptions_Regex_Multiple(_VimTest):
snippets = ("(test *)+", "Expand me!", "", "r")
keys = "test test test" + EX
wanted = "Expand me!"
class _Regex_Self(_VimTest):
snippets = ("((?<=\W)|^)(\.)", "self.", "", "r")
class SnippetOptions_Regex_Self_Start(_Regex_Self):
keys = "." + EX
wanted = "self."
class SnippetOptions_Regex_Self_Space(_Regex_Self):
keys = " ." + EX
wanted = " self."
class SnippetOptions_Regex_Self_TextAfter(_Regex_Self):
keys = " .a" + EX
wanted = " .a" + EX
class SnippetOptions_Regex_Self_TextBefore(_Regex_Self):
keys = "a." + EX
wanted = "a." + EX
class SnippetOptions_Regex_PythonBlockMatch(_VimTest):
snippets = (
r"([abc]+)([def]+)",
r"""`!p m = match
snip.rv += m.group(2)
snip.rv += m.group(1)
`""",
"",
"r",
)
keys = "test cabfed" + EX
wanted = "test fedcab"
class SnippetOptions_Regex_PythonBlockNoMatch(_VimTest):
snippets = (r"cabfed", r"""`!p snip.rv = match or "No match"`""")
keys = "test cabfed" + EX
wanted = "test No match"
# Tests for Bug #691575
class SnippetOptions_Regex_SameLine_Long_End(_VimTest):
snippets = ("(test.*)", "Expand me!", "", "r")
keys = "test test abc" + EX
wanted = "Expand me!"
class SnippetOptions_Regex_SameLine_Long_Start(_VimTest):
snippets = ("(.*test)", "Expand me!", "", "r")
keys = "abc test test" + EX
wanted = "Expand me!"
class SnippetOptions_Regex_SameLine_Simple(_VimTest):
snippets = ("(test)", "Expand me!", "", "r")
keys = "abc test test" + EX
wanted = "abc test Expand me!"
class MultiWordSnippet_Simple(_VimTest):
snippets = ("test me", "Expand me!")
keys = "test me" + EX
wanted = "Expand me!"
class MultiWord_SnippetOptions_OnlyExpandWhenWSInFront_Expand(_VimTest):
snippets = ("test it", "Expand me!", "", "b")
keys = "test it" + EX
wanted = "Expand me!"
class MultiWord_SnippetOptions_OnlyExpandWhenWSInFront_Expand2(_VimTest):
snippets = ("test it", "Expand me!", "", "b")
keys = " test it" + EX
wanted = " Expand me!"
class MultiWord_SnippetOptions_OnlyExpandWhenWSInFront_DontExpand(_VimTest):
snippets = ("test it", "Expand me!", "", "b")
keys = "a test it" + EX
wanted = "a test it" + EX
class MultiWord_SnippetOptions_OnlyExpandWhenWSInFront_OneWithOneWO(_VimTest):
snippets = (
("test it", "Expand me!", "", "b"),
("test it", "not at beginning", "", ""),
)
keys = "a test it" + EX
wanted = "a not at beginning"
class MultiWord_SnippetOptions_OnlyExpandWhenWSInFront_OneWithOneWOChoose(_VimTest):
snippets = (
("test it", "Expand me!", "", "b"),
("test it", "not at beginning", "", ""),
)
keys = " test it" + EX + "1\n"
wanted = " Expand me!"
class MultiWord_SnippetOptions_ExpandInwordSnippets_SimpleExpand(_VimTest):
snippets = (("test it", "Expand me!", "", "i"),)
keys = "atest it" + EX
wanted = "aExpand me!"
class MultiWord_SnippetOptions_ExpandInwordSnippets_ExpandSingle(_VimTest):
snippets = (("test it", "Expand me!", "", "i"),)
keys = "test it" + EX
wanted = "Expand me!"
class _MultiWord_SnippetOptions_ExpandWordSnippets(_VimTest):
snippets = (("test it", "Expand me!", "", "w"),)
class MultiWord_SnippetOptions_ExpandWordSnippets_NormalExpand(
_MultiWord_SnippetOptions_ExpandWordSnippets
):
keys = "test it" + EX
wanted = "Expand me!"
class MultiWord_SnippetOptions_ExpandWordSnippets_NoExpand(
_MultiWord_SnippetOptions_ExpandWordSnippets
):
keys = "atest it" + EX
wanted = "atest it" + EX
class MultiWord_SnippetOptions_ExpandWordSnippets_ExpandSuffix(
_MultiWord_SnippetOptions_ExpandWordSnippets
):
keys = "a-test it" + EX
wanted = "a-Expand me!"
| 25.360882 | 86 | 0.647078 | 8,886 | 0.964821 | 0 | 0 | 0 | 0 | 0 | 0 | 2,515 | 0.273073 |
6fef2e1b0042f7afa10afa7bd70caee5d7662859 | 3,119 | py | Python | Solver.py | kazemisoroush/JCL | a752e8b445e270dab7597d96956c2c52d53665dc | [
"MIT"
]
| null | null | null | Solver.py | kazemisoroush/JCL | a752e8b445e270dab7597d96956c2c52d53665dc | [
"MIT"
]
| null | null | null | Solver.py | kazemisoroush/JCL | a752e8b445e270dab7597d96956c2c52d53665dc | [
"MIT"
]
| null | null | null | from scipy.optimize import minimize
class Solver(object):
# initialize the object
def __init__(self, servers, alpha, teta, lambda_s):
self.c_prime = 0
self.teta = teta
self.alpha = alpha
self.lambda_s = lambda_s
# fill servers specification in arrays for solver use
self.k = []
self.a = []
self.mu = []
self.lambs = []
self.bound = []
self.active = []
for server in servers:
self.a.append((server.e_p - server.e_i) / server.mu * server.k)
self.k.append(server.k)
self.mu.append(server.mu)
self.lambs.append(server.lamb)
self.active.append(server.active)
# solve sub-problem
def solve(self, linear):
if linear:
return self.linear_solver()
else:
return self.non_linear_solver()
# implementation of linear solver
# this method uses cvxopt solver
# visit http://cvxopt.org for more information
def linear_solver(self):
print("Initiating linear solver")
# implementation of non-linear solver
# this method uses scipy solver
# visit http://docs.scipy.org for more information
def non_linear_solver(self):
print("Initiating non-linear solver")
for active in self.active:
print(active)
cons = [{'type': 'eq', 'fun': self.workload_preserving_constraint}]
cons += self.inactive_server_constraint(self.lambs)
cons += self.positive_variables(self.lambs)
res = minimize(self.objective_function, self.lambs, method='SLSQP', bounds=None, constraints=tuple(cons))
print(res.x)
self.c_prime = res.fun
return res.x
# definition of objective function
def objective_function(self, x):
objective = 0
for index, lamb in enumerate(x):
if lamb == 0:
continue
objective += self.teta * (lamb / (self.mu[index] - lamb / self.k[index])) + self.alpha * (
self.a[index] * lamb)
return objective
# definition of workload preserving constraint
def workload_preserving_constraint(self, x):
constraint = 0
for index, lamb in enumerate(x):
constraint += lamb
constraint -= self.lambda_s
return constraint
# definition of inactive server constraint using "Big M" method
def inactive_server_constraint(self, x):
constraints = []
for index, lamb in enumerate(x):
if self.active[index] == 0:
continue
constraints.append({'type': 'ineq', 'fun': lambda x: self.lambda_s - x[index]})
return constraints
# all variables must be positive
def positive_variables(self, x):
constraints = []
for index, lamb in enumerate(x):
if self.active[index] == 0:
constraints.append({'type': 'eq', 'fun': lambda x: x[index]})
else:
constraints.append({'type': 'ineq', 'fun': lambda x: x[index]})
return constraints
| 29.149533 | 113 | 0.588971 | 3,080 | 0.987496 | 0 | 0 | 0 | 0 | 0 | 0 | 626 | 0.200705 |
6ff10fab31eb05859eb44845cd805cd024038463 | 9,444 | py | Python | scope/device/spectra.py | drew-sinha/rpc-scope | 268864097b5b7d123a842f216adc446ec6b32d01 | [
"MIT"
]
| null | null | null | scope/device/spectra.py | drew-sinha/rpc-scope | 268864097b5b7d123a842f216adc446ec6b32d01 | [
"MIT"
]
| null | null | null | scope/device/spectra.py | drew-sinha/rpc-scope | 268864097b5b7d123a842f216adc446ec6b32d01 | [
"MIT"
]
| null | null | null | # This code is licensed under the MIT License (see LICENSE file for details)
import threading
import time
from ..util import smart_serial
from ..util import property_device
from ..util import state_stack
from ..config import scope_configuration
from . import iotool
def _make_dac_bytes(IIC_Addr, bit):
dac_bytes = bytearray(b'\x53\x00\x03\x00\x00\x00\x50')
dac_bytes[1] = IIC_Addr
dac_bytes[3] = 1 << bit
return dac_bytes
LAMP_DAC_COMMANDS = {
'uv': _make_dac_bytes(0x18, 0),
'blue': _make_dac_bytes(0x1A, 0),
'cyan': _make_dac_bytes(0x18, 1),
'teal': _make_dac_bytes(0x1A, 1),
'green_yellow': _make_dac_bytes(0x18, 2),
'red': _make_dac_bytes(0x18, 3)
}
LAMP_SPECS = {
'uv': (396, 16),
'blue': (434, 22),
'cyan': (481, 22),
'teal': (508, 29),
'green_yellow': (545, 70),
'red': (633, 19)
}
LAMP_NAMES = set(LAMP_DAC_COMMANDS.keys())
class Lamp(state_stack.StateStackDevice):
def __init__(self, name, spectra):
super().__init__()
self._name = name
self._spectra = spectra
def set_intensity(self, value):
"""Set lamp intensity in the range [0, 255]"""
self._spectra._lamp_intensity(self._name, value)
def get_intensity(self):
return self._spectra._lamp_intensities[self._name]
def set_enabled(self, enabled):
self._spectra._lamp_enable(self._name, enabled)
def get_enabled(self):
return self._spectra._lamp_enableds[self._name]
class SpectraX(property_device.PropertyDevice):
_DESCRIPTION = 'Lumencor Spectra X'
_EXPECTED_INIT_ERRORS = (smart_serial.SerialException,)
def __init__(self, iotool: iotool.IOTool, property_server=None, property_prefix=''):
super().__init__(property_server, property_prefix)
self._spconfig = scope_configuration.get_config().spectra
self._serial_port = smart_serial.Serial(self._spconfig.SERIAL_PORT, timeout=1, **self._spconfig.SERIAL_ARGS)
# RS232 Lumencor docs state: "The [following] two commands MUST be issued after every power cycle to properly configure controls for further commands."
# "Set GPIO0-3 as open drain output"
self._serial_port.write(b'\x57\x02\xFF\x50')
# "Set GPI05-7 push-pull out, GPIO4 open drain out"
self._serial_port.write(b'\x57\x03\xAB\x50')
# test if we can connect:
try:
self.get_temperature()
except smart_serial.SerialTimeout:
# explicitly clobber traceback from SerialTimeout exception
raise smart_serial.SerialException('Could not read data from Spectra -- is it turned on?')
self._iotool = iotool
if property_server:
self._update_property('temperature', self.get_temperature())
self._sleep_time = 10
self._timer_running = True
self._timer_thread = threading.Thread(target=self._timer_update_temp, daemon=True)
self._timer_thread.start()
self._available_lamps = set(self._spconfig.IOTOOL_LAMP_PINS.keys())
for name in self._available_lamps:
setattr(self, name, Lamp(name, self))
self._lamp_intensities = {}
self._lamp_enableds = {}
self.lamps(**{lamp+'_enabled': False for lamp in self._available_lamps})
self.lamps(**{lamp+'_intensity': 255 for lamp in self._available_lamps})
def _timer_update_temp(self):
while self._timer_running:
self._update_property('temperature', self.get_temperature())
time.sleep(self._sleep_time)
def _lamp_intensity(self, lamp, value):
assert 0 <= value <= 255
inverted = 255 - value
# Put the intensity value, which is from 0xFF (off) to 0 (on), into the middle 8 bits of a 16-bit integer,
# with the high 4 bits as 0xF and the low 4 as 0. Bizarre, but that's the wire protocol.
intensity_bytes = 0xF000 | (inverted << 4)
dac_bytes = LAMP_DAC_COMMANDS[lamp]
dac_bytes[4] = intensity_bytes >> 8
dac_bytes[5] = intensity_bytes & 0x00FF
self._serial_port.write(bytes(dac_bytes))
self._lamp_intensities[lamp] = value
self._update_property(lamp+'.intensity', value)
def _lamp_enable(self, lamp, enabled):
self._iotool.execute(*self._iotool_lamp_commands(**{lamp: enabled}))
self._lamp_enableds[lamp] = enabled
self._update_property(lamp+'.enabled', enabled)
def _iotool_lamp_commands(self, **lamps):
"""Produce a sequence of IOTool commands to enable and disable given
Spectra lamps.
Keyword arguments must be lamp names, as specified in the scope configuration.
The values specified must be True to enable that lamp, False to disable,
or None to do nothing (unspecified lamps are also not altered)."""
commands = []
for lamp, enabled in lamps.items():
if enabled is None:
continue
pin = self._spconfig.IOTOOL_LAMP_PINS[lamp]
if enabled:
commands.append(self._iotool.commands.set_high(pin))
else:
commands.append(self._iotool.commands.set_low(pin))
return commands
def get_lamp_specs(self):
"""Return a dict mapping lamp names to tuples of (peak_wavelength, bandwidth), in nm,
where bandwidth is the minimum width required to contain 75% of the spectral intensity
of the lamp output."""
return {lamp: LAMP_SPECS[lamp] for lamp in self._available_lamps}
def get_temperature(self):
self._serial_port.write(b'\x53\x91\x02\x50')
r = self._serial_port.read(2)
return ((r[0] << 3) | (r[1] >> 5)) * 0.125
def lamps(self, **lamp_parameters):
"""Set a number of lamp parameters at once using keyword arguments, e.g.
spectra.lamps(red_enabled=True, red_intensity=255, blue_enabled=False)
Intensity values must be in the range [0, 255]. Valid lamp names can be
retrieved with get_lamp_specs().
"""
self._set_state(lamp_parameters.items())
def _get_getter_setter(self, prop):
"""Return a property setter/getter pair, either from a "real" property
get/set pair, or a "virtual" property like "red_enabled" or "cyan_intensity"."""
if hasattr(self, 'set_'+prop):
return getattr(self, 'get_'+prop), getattr(self, 'set_'+prop)
else:
lamp_name, lamp_prop = prop.rsplit('_', 1)
if lamp_name not in self._available_lamps:
raise ValueError('Invalid lamp name')
lamp = getattr(self, lamp_name)
if hasattr(lamp, 'set_'+ lamp_prop):
return getattr(lamp, 'get_'+lamp_prop), getattr(lamp, 'set_'+lamp_prop)
else:
raise ValueError('Invalid lamp parameter "{}"'.format(lamp_prop))
def _set_state(self, properties_and_values):
for prop, value in properties_and_values:
setter = self._get_getter_setter(prop)[1]
setter(value)
def push_state(self, **lamp_parameters):
"""Set a number of parameters at once using keyword arguments, while
saving the old values of those parameters. (See lamps() for a description
of valid parameters.) pop_state() will restore those previous values.
push_state/pop_state pairs can be nested arbitrarily."""
# Also note that we do not filter out identical states from being pushed.
# Since the enabled state can be fiddled with IOTool, there is good reason
# for pushing an enabled state identical to the current one, so that it
# will be restored after any such fiddling.
old_state = {}
for prop, value in lamp_parameters.items():
getter, setter = self._get_getter_setter(prop)
old_state[prop] = getter()
setter(value)
self._state_stack.append(old_state)
class Spectra(SpectraX):
_DESCRIPTION = 'Lumencor Spectra'
def __init__(self, iotool: iotool.IOTool, property_server=None, property_prefix=''):
super().__init__(iotool, property_server, property_prefix)
self.set_green_yellow_filter('green')
def _iotool_enable_green_command(self):
"""Produce a command that switches the green/yellow paddle to the green filter position."""
return self._iotool.commands.set_high(self._spconfig.IOTOOL_GREEN_YELLOW_SWITCH_PIN)
def _iotool_enable_yellow_command(self):
"""Produce a command that switches the green/yellow paddle to the yellow filter position."""
return self._iotool.commands.set_low(self._spconfig.IOTOOL_GREEN_YELLOW_SWITCH_PIN)
def set_green_yellow_filter(self, position):
"""'position' should be either 'green' or 'yellow' to insert the
corresponding excitation filter into the green/yellow beam."""
if position not in {'green', 'yellow'}:
raise ValueError('"position" parameter must be either "green" or "yellow"')
if position == 'green':
self._iotool.execute(self._iotool_enable_green_command())
else:
self._iotool.execute(self._iotool_enable_yellow_command())
time.sleep(self._spconfig.FILTER_SWITCH_DELAY)
self._green_yellow_pos = position
self._update_property('green_yellow_filter', position)
def get_green_yellow_filter(self):
return self._green_yellow_pos
| 42.927273 | 159 | 0.66529 | 8,536 | 0.903854 | 0 | 0 | 0 | 0 | 0 | 0 | 3,030 | 0.320839 |
6ff1787c0118af1f7695392c705c973f44490257 | 352 | py | Python | src/rics/utility/perf/__init__.py | rsundqvist/rics | c67ff6703facb3170535dcf173d7e55734cedbc6 | [
"MIT"
]
| 1 | 2022-02-24T22:12:13.000Z | 2022-02-24T22:12:13.000Z | src/rics/utility/perf/__init__.py | rsundqvist/rics | c67ff6703facb3170535dcf173d7e55734cedbc6 | [
"MIT"
]
| 26 | 2022-02-24T21:08:51.000Z | 2022-03-19T19:55:26.000Z | src/rics/utility/perf/__init__.py | rsundqvist/rics | c67ff6703facb3170535dcf173d7e55734cedbc6 | [
"MIT"
]
| null | null | null | """Performance testing utility."""
from ._format_perf_counter import format_perf_counter
from ._multi_case_timer import MultiCaseTimer
from ._util import plot_run, to_dataframe
from ._wrappers import run_multivariate_test
__all__ = [
"MultiCaseTimer",
"run_multivariate_test",
"format_perf_counter",
"plot_run",
"to_dataframe",
]
| 23.466667 | 53 | 0.769886 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 118 | 0.335227 |
6ff20bebb27acf09aac86317bb08657cf322d1a8 | 1,951 | py | Python | LinkPrediction/pruning.py | x-zho14/Unified-LTH-GNN | edbb2f9aaa7cb363424dcfcb2ce198cfb66f3d55 | [
"MIT"
]
| 29 | 2021-02-17T02:46:54.000Z | 2022-03-18T02:09:03.000Z | LinkPrediction/pruning.py | x-zho14/Unified-LTH-GNN | edbb2f9aaa7cb363424dcfcb2ce198cfb66f3d55 | [
"MIT"
]
| 1 | 2021-09-03T13:30:50.000Z | 2021-09-03T13:30:50.000Z | LinkPrediction/pruning.py | x-zho14/Unified-LTH-GNN | edbb2f9aaa7cb363424dcfcb2ce198cfb66f3d55 | [
"MIT"
]
| 10 | 2021-04-01T16:27:03.000Z | 2022-03-07T09:20:38.000Z | import numpy as np
import torch
from sklearn.metrics import roc_auc_score
from sklearn.metrics import average_precision_score
def test(model, features, adj, sparse, adj_sparse, test_edges, test_edges_false):
eps = 1e-4
embeds, _,_, S= model.embed(features, adj, sparse, None, 100)
embs = embeds[0, :]
embs = embs / (embs.norm(dim=1)[:, None] + eps)
sc_roc, sc_ap = get_roc_score(test_edges, test_edges_false, embs.cpu().detach().numpy(), adj_sparse)
return sc_roc, sc_ap
def get_roc_score(edges_pos, edges_neg, embeddings, adj_sparse):
"from https://github.com/tkipf/gae"
score_matrix = np.dot(embeddings, embeddings.T)
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# Store positive edge predictions, actual values
preds_pos = []
pos = []
for edge in edges_pos:
preds_pos.append(sigmoid(score_matrix[edge[0], edge[1]])) # predicted score
pos.append(adj_sparse[edge[0], edge[1]]) # actual value (1 for positive)
# Store negative edge predictions, actual values
preds_neg = []
neg = []
for edge in edges_neg:
preds_neg.append(sigmoid(score_matrix[edge[0], edge[1]])) # predicted score
neg.append(adj_sparse[edge[0], edge[1]]) # actual value (0 for negative)
# Calculate scores
preds_all = np.hstack([preds_pos, preds_neg])
labels_all = np.hstack([np.ones(len(preds_pos)), np.zeros(len(preds_neg))])
#print(preds_all, labels_all )
roc_score = roc_auc_score(labels_all, preds_all)
ap_score = average_precision_score(labels_all, preds_all)
return roc_score, ap_score
def torch_normalize_adj(adj):
adj = adj + torch.eye(adj.shape[0]).cuda()
rowsum = adj.sum(1)
d_inv_sqrt = torch.pow(rowsum, -0.5).flatten()
d_inv_sqrt[torch.isinf(d_inv_sqrt)] = 0.0
d_mat_inv_sqrt = torch.diag(d_inv_sqrt).cuda()
return adj.mm(d_mat_inv_sqrt).t().mm(d_mat_inv_sqrt) | 34.839286 | 104 | 0.672476 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 275 | 0.140953 |
6ff236eea364203b4294e15e6410629f2aeb3886 | 3,143 | py | Python | imap_wrapper.py | tbrownaw/rss-imap | 41d930d778017a9d60feed1688f9f2c7a94b94a6 | [
"MIT"
]
| 2 | 2016-09-28T19:44:53.000Z | 2021-09-17T11:36:24.000Z | imap_wrapper.py | tbrownaw/rss-imap | 41d930d778017a9d60feed1688f9f2c7a94b94a6 | [
"MIT"
]
| null | null | null | imap_wrapper.py | tbrownaw/rss-imap | 41d930d778017a9d60feed1688f9f2c7a94b94a6 | [
"MIT"
]
| null | null | null | import email
import logging
import re
from imapclient import IMAPClient
class IMAPError(IOError):
pass
class ImapWrapper:
"""A wrapper around imaplib, since that's a bit
lower-level than I'd prefer to work with."""
#This regex is:
# list of flags in parens
# quoted delimiter
# possible-quoted folder name
list_matcher = re.compile(r'^\(([^()]*)\) "([^"]*)" (([^" ]+)|"([^"]*)")$')
def __init__(self, host, user, pw, **kwargs):
"""kwargs: Paassed through to IMAPClient"""
self.M = IMAPClient(host, **kwargs)
self.M.login(user, pw)
self._selected_folder = None
self._update_folders()
def logout(self):
self.M.logout()
def _update_folders(self):
listing = self.M.list_folders()
self.folder_list = [name for (flags, delim, name) in listing]
def ensure_folder(self, name):
"""Return True if the folder was created, False if it already existed."""
l = logging.getLogger(__name__)
search_name = name[:-1] if name.endswith('/') else name
if not any(n == search_name for n in self.folder_list):
rslt = self.M.create_folder(name)
l.info(f"Folder create result: {rslt}")
self.folder_list.append(search_name)
return True
else:
return False
def fetch_messages(self, folder, *search_args):
l = logging.getLogger(__name__)
ret = []
self.select_folder(folder)
message_ids = self.M.search(search_args)
message_dict = self.M.fetch(message_ids, 'RFC822')
for msg in message_dict.values():
l.debug("Got message: %s", msg)
msg = email.message_from_string(msg[b'RFC822'].decode('UTF-8'))
ret.append(msg)
return ret
def check_folder_for_message_ids(self, folder, msgids):
self.select_folder(folder)
search_ids = []
for msgid in msgids:
if len(search_ids) > 0:
search_ids.insert(0, 'OR')
search_ids.append(['HEADER', 'Message-Id', msgid])
message_numbers = self.M.search(['NOT', 'DELETED', search_ids])
message_envelopes = self.M.fetch(message_numbers, 'ENVELOPE')
have_ids = []
for msgdata in message_envelopes.values():
envelope = msgdata[b'ENVELOPE']
have_ids.append(envelope.message_id)
return have_ids
def append(self, folder_name, email):
response = self.M.append(folder_name, str(email).encode('utf-8'))
logging.getLogger(__name__).debug("Append response: %s", response)
# FIXME sets the context folder
def select_folder(self, name):
if self._selected_folder == name:
return
dtl = self.M.select_folder(name)
logging.getLogger(__name__).debug("select_folder = %s", dtl)
self._selected_folder = name
def create_subscribe_folder(self, name):
created = self.ensure_folder(name)
if created:
res = self.M.subscribe_folder(name)
logging.getLogger(__name__).debug("Subscribe result: %s", res)
| 34.538462 | 81 | 0.610563 | 3,065 | 0.975183 | 0 | 0 | 0 | 0 | 0 | 0 | 582 | 0.185173 |
6ff24a30aea978a6baf63da8c2c0d8819e5f801c | 19 | py | Python | beeline/version.py | noam-stein/beeline-python | a5ae7b30d9abebc681524f1087c404fb2e2b915f | [
"Apache-2.0"
]
| null | null | null | beeline/version.py | noam-stein/beeline-python | a5ae7b30d9abebc681524f1087c404fb2e2b915f | [
"Apache-2.0"
]
| null | null | null | beeline/version.py | noam-stein/beeline-python | a5ae7b30d9abebc681524f1087c404fb2e2b915f | [
"Apache-2.0"
]
| null | null | null | VERSION = '2.11.2'
| 9.5 | 18 | 0.578947 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 | 0.421053 |
6ff5a9c769828d3ebb1466ef2f609d93dfafa30f | 2,527 | py | Python | genie/seg.py | karawoo/Genie | d39451655ec3632df6002c1d73b17dacba2a8720 | [
"MIT"
]
| null | null | null | genie/seg.py | karawoo/Genie | d39451655ec3632df6002c1d73b17dacba2a8720 | [
"MIT"
]
| null | null | null | genie/seg.py | karawoo/Genie | d39451655ec3632df6002c1d73b17dacba2a8720 | [
"MIT"
]
| 1 | 2022-01-20T16:33:19.000Z | 2022-01-20T16:33:19.000Z | import logging
import os
import pandas as pd
from .example_filetype_format import FileTypeFormat
from . import process_functions
logger = logging.getLogger(__name__)
class seg(FileTypeFormat):
_fileType = "seg"
_process_kwargs = ["newPath", "databaseSynId"]
def _validateFilename(self, filePath):
assert os.path.basename(filePath[0]) == "genie_data_cna_hg19_%s.%s" % (self.center, self._fileType)
def _process(self, seg):
seg.columns = [col.upper() for col in seg.columns]
newsamples = [process_functions.checkGenieId(i, self.center) for i in seg['ID']]
seg['ID'] = newsamples
seg = seg.drop_duplicates()
seg = seg.rename(columns= {'LOC.START':'LOCSTART','LOC.END':'LOCEND','SEG.MEAN':'SEGMEAN','NUM.MARK':'NUMMARK'})
seg['CHROM'] = [str(chrom).replace("chr","") for chrom in seg['CHROM']]
seg['CENTER'] = self.center
seg['LOCSTART'] = seg['LOCSTART'].astype(int)
seg['LOCEND'] = seg['LOCEND'].astype(int)
seg['NUMMARK'] = seg['NUMMARK'].astype(int)
return(seg)
def process_steps(self, seg, newPath, databaseSynId):
seg = self._process(seg)
process_functions.updateData(self.syn, databaseSynId, seg, self.center, toDelete=True)
seg.to_csv(newPath,sep="\t",index=False)
return(newPath)
def _validate(self, segDF):
total_error = ""
warning = ""
segDF.columns = [col.upper() for col in segDF.columns]
REQUIRED_HEADERS = pd.Series(['ID','CHROM','LOC.START','LOC.END','NUM.MARK','SEG.MEAN'])
if not all(REQUIRED_HEADERS.isin(segDF.columns)):
total_error += "Your seg file is missing these headers: %s.\n" % ", ".join(REQUIRED_HEADERS[~REQUIRED_HEADERS.isin(segDF.columns)])
else:
intCols = ['LOC.START','LOC.END','NUM.MARK']
nonInts = [col for col in intCols if segDF[col].dtype != int]
if len(nonInts) > 0:
total_error += "Seg: Only integars allowed in these column(s): %s.\n" % ", ".join(sorted(nonInts))
if not segDF['SEG.MEAN'].dtype in [float, int]:
total_error += "Seg: Only numerical values allowed in SEG.MEAN.\n"
checkNA = segDF.isna().apply(sum)
nullCols = [ind for ind in checkNA.index if checkNA[ind] > 0]
if len(nullCols) > 0:
total_error += "Seg: No null or empty values allowed in column(s): %s.\n" % ", ".join(sorted(nullCols))
return(total_error, warning)
| 40.111111 | 143 | 0.61852 | 2,355 | 0.931935 | 0 | 0 | 0 | 0 | 0 | 0 | 544 | 0.215275 |
6ff60de2cd7ad626ba0327a1b10d9c7e29a27101 | 3,150 | py | Python | serve/api/predict.py | HalleyYoung/musicautobot | 075afba70a57ebacfcd8d2bf9dc178a93c05a116 | [
"MIT"
]
| 402 | 2019-07-31T00:37:10.000Z | 2022-03-27T22:21:29.000Z | serve/api/predict.py | HalleyYoung/musicautobot | 075afba70a57ebacfcd8d2bf9dc178a93c05a116 | [
"MIT"
]
| 26 | 2019-08-20T13:44:30.000Z | 2022-01-27T10:42:28.000Z | serve/api/predict.py | HalleyYoung/musicautobot | 075afba70a57ebacfcd8d2bf9dc178a93c05a116 | [
"MIT"
]
| 81 | 2019-08-14T06:55:55.000Z | 2022-03-19T09:49:15.000Z | import sys
from . import app
sys.path.append(str(app.config['LIB_PATH']))
from musicautobot.music_transformer import *
from musicautobot.config import *
from flask import Response, send_from_directory, send_file, request, jsonify
from .save import to_s3
import torch
import traceback
torch.set_num_threads(4)
data = load_data(app.config['DATA_PATH'], app.config['DATA_SAVE_NAME'], num_workers=1)
learn = music_model_learner(data, pretrained_path=app.config['MUSIC_MODEL_PATH'])
if torch.cuda.is_available(): learn.model.cuda()
# learn.to_fp16(loss_scale=512) # fp16 not supported for cpu - https://github.com/pytorch/pytorch/issues/17699
@app.route('/predict/midi', methods=['POST'])
def predict_midi():
args = request.form.to_dict()
midi = request.files['midi'].read()
print('THE ARGS PASSED:', args)
bpm = float(args['bpm']) # (AS) TODO: get bpm from midi file instead
temperatures = (float(args.get('noteTemp', 1.2)), float(args.get('durationTemp', 0.8)))
n_words = int(args.get('nSteps', 200))
seed_len = int(args.get('seedLen', 12))
# debugging 1 - send exact midi back
# with open('/tmp/test.mid', 'wb') as f:
# f.write(midi)
# return send_from_directory('/tmp', 'test.mid', mimetype='audio/midi')
# debugging 2 - test music21 conversion
# stream = file2stream(midi) # 1.
# debugging 3 - test npenc conversion
# seed_np = midi2npenc(midi) # music21 can handle bytes directly
# stream = npenc2stream(seed_np, bpm=bpm)
# debugging 4 - midi in, convert, midi out
# stream = file2stream(midi) # 1.
# midi_in = Path(stream.write("musicxml"))
# print('Midi in:', midi_in)
# stream_sep = separate_melody_chord(stream)
# midi_out = Path(stream_sep.write("midi"))
# print('Midi out:', midi_out)
# s3_id = to_s3(midi_out, args)
# result = {
# 'result': s3_id
# }
# return jsonify(result)
# Main logic
try:
full = predict_from_midi(learn, midi=midi, n_words=n_words, seed_len=seed_len, temperatures=temperatures)
stream = separate_melody_chord(full.to_stream(bpm=bpm))
midi_out = Path(stream.write("midi"))
print('Wrote to temporary file:', midi_out)
except Exception as e:
traceback.print_exc()
return jsonify({'error': f'Failed to predict: {e}'})
s3_id = to_s3(midi_out, args)
result = {
'result': s3_id
}
return jsonify(result)
# return send_from_directory(midi_out.parent, midi_out.name, mimetype='audio/midi')
# @app.route('/midi/song/<path:sid>')
# def get_song_midi(sid):
# return send_from_directory(file_path/data_dir, htlist[sid]['midi'], mimetype='audio/midi')
@app.route('/midi/convert', methods=['POST'])
def convert_midi():
args = request.form.to_dict()
if 'midi' in request.files:
midi = request.files['midi'].read()
elif 'midi_path'in args:
midi = args['midi_path']
stream = file2stream(midi) # 1.
# stream = file2stream(midi).chordify() # 1.
stream_out = Path(stream.write('musicxml'))
return send_from_directory(stream_out.parent, stream_out.name, mimetype='xml')
| 35 | 113 | 0.672381 | 0 | 0 | 0 | 0 | 2,340 | 0.742857 | 0 | 0 | 1,476 | 0.468571 |
6ff6a8b26ae79b9bbc49d8449424fa04eace814f | 1,086 | py | Python | tests/test_user.py | meisnate12/trakt.py | 37da6f64a4f82c6600a61e388458190590f86f29 | [
"MIT"
]
| 147 | 2015-01-07T11:27:26.000Z | 2022-02-21T19:57:44.000Z | tests/test_user.py | meisnate12/trakt.py | 37da6f64a4f82c6600a61e388458190590f86f29 | [
"MIT"
]
| 90 | 2015-01-11T14:38:22.000Z | 2021-10-03T12:18:13.000Z | tests/test_user.py | meisnate12/trakt.py | 37da6f64a4f82c6600a61e388458190590f86f29 | [
"MIT"
]
| 61 | 2015-01-09T12:32:09.000Z | 2022-02-03T00:50:36.000Z | from __future__ import absolute_import, division, print_function
from tests.core import mock
from trakt import Trakt
from httmock import HTTMock
import pytest
def test_likes():
with HTTMock(mock.fixtures, mock.unknown):
with Trakt.configuration.auth('mock', 'mock'):
likes = Trakt['users'].likes()
assert likes is not None
likes = list(likes)
assert len(likes) == 3
assert likes[0].keys == [
('trakt', 1519)
]
assert likes[1].keys == [
('trakt', '1238362'),
('slug', 'star-wars-machete')
]
assert likes[2].keys == [
('trakt', '840781'),
('slug', 'star-wars-timeline')
]
def test_likes_invalid_response():
with HTTMock(mock.fixtures, mock.unknown):
likes = Trakt['users'].likes()
assert likes is None
def test_likes_invalid_type():
with HTTMock(mock.fixtures, mock.unknown):
with pytest.raises(ValueError):
likes = Trakt['users'].likes('invalid')
assert likes is not None
likes = list(likes)
| 22.163265 | 64 | 0.601289 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 131 | 0.120626 |
6ff6e195cda0b39ff8fca0cbb3168fe50f6be0ec | 12,983 | py | Python | main.py | mohamed-seyam/Image-Mixer- | f2a2c3d5808e1c8485c851f3a27cbed7249ad073 | [
"MIT"
]
| null | null | null | main.py | mohamed-seyam/Image-Mixer- | f2a2c3d5808e1c8485c851f3a27cbed7249ad073 | [
"MIT"
]
| null | null | null | main.py | mohamed-seyam/Image-Mixer- | f2a2c3d5808e1c8485c851f3a27cbed7249ad073 | [
"MIT"
]
| null | null | null | import sys
import PyQt5.QtWidgets as qtw
import PyQt5.QtCore as qtc
from Image import Image
from main_layout import Ui_MainWindow
import logging
import os
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(levelname)s:%(name)s:%(asctime)s - %(message)s')
file_handler = logging.FileHandler('log')
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
class MainWindow(qtw.QMainWindow):
def __init__(self):
super().__init__()
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.show()
self.images = {
'1': {
'original': self.ui.image_1_original,
'filtered': self.ui.image_1_after_filter,
'picker': self.ui.image_1_pick
},
'2': {
'original': self.ui.image_2_original,
'filtered': self.ui.image_2_after_filter,
'picker': self.ui.image_2_pick
}
}
self.img = {}
self.modes = {'Output 1': '', 'Output 2': ''}
self.output_channels = {
'Output 1': self.ui.output_1,
'Output 2': self.ui.output_2
}
self.output_channels_controlers = {
'': {
'select1': '',
'select2': '',
'slider1': 0,
'slider2': 0,
'type1': '',
'type2': '',
'percentage1': 0,
'percentage2': 0,
},
'Output 1': {
'select1': '',
'select2': '',
'slider1': 0,
'slider2': 0,
'type1': '',
'type2': '',
'percentage1': 0,
'percentage2': 0,
},
'Output 2': {
'select1': '',
'select2': '',
'slider1': 0,
'slider2': 0,
'type1': '',
'type2': '',
'percentage1': 0,
'percentage2': 0,
},
}
self.output_complementary = {
'': ['', 'Magnitude', 'Phase', 'Real', 'Imaginary', 'Uniform Magnitude', 'Uniform Phase'],
'Magnitude': ['Phase', 'Uniform Phase'],
'Phase': ['Magnitude', 'Uniform Magnitude'],
'Real': ['Imaginary'],
'Imaginary': ['Real'],
'Uniform Magnitude': ['Phase', 'Uniform Phase'],
'Uniform Phase': ['Magnitude', 'Uniform Magnitude'],
}
self.available_images = {
'': ''
}
self.enables = {
'': [self.ui.component_1_select, self.ui.component_2_select, self.ui.component_1_percentage,
self.ui.component_1_slider, self.ui.component_1_type,
self.ui.component_2_percentage, self.ui.component_2_slider, self.ui.component_2_type],
'output-select': [self.ui.component_1_select, self.ui.component_2_select],
'select1': [self.ui.component_1_percentage, self.ui.component_1_type],
'select2': [self.ui.component_2_percentage, self.ui.component_2_type],
'type1': [self.ui.component_1_slider],
'type2': [self.ui.component_2_slider]
}
self.current_output_channel = None
self.ui.action_new.triggered.connect(self.new_instance)
self.ui.action_exit.triggered.connect(self.close)
self.ui.action_open_image_1.triggered.connect(lambda: self.open_image(self.images['1'], 1))
self.ui.action_open_image_2.triggered.connect(lambda: self.open_image(self.images['2'], 2))
self.ui.image_1_pick.currentIndexChanged.connect(lambda: self.display_component(self.img['Image 1']))
self.ui.image_2_pick.currentIndexChanged.connect(lambda: self.display_component(self.img['Image 2']))
self.ui.output_select.currentIndexChanged.connect(lambda: self.pick_mixer_output())
self.ui.component_1_select.currentIndexChanged.connect(lambda: self.select_enable('select1', self.ui.component_1_select.currentText()))
self.ui.component_2_select.currentIndexChanged.connect(lambda: self.select_enable('select2', self.ui.component_2_select.currentText()))
self.ui.component_1_slider.sliderReleased.connect(lambda: self.mixer('slider1', str(self.ui.component_1_slider.value())))
self.ui.component_2_slider.sliderReleased.connect(lambda: self.mixer('slider2', str(self.ui.component_2_slider.value())))
self.ui.component_1_percentage.valueChanged.connect(lambda: self.change_image('percentage1', str(self.ui.component_1_percentage.value())))
self.ui.component_2_percentage.valueChanged.connect(lambda: self.change_image('percentage2', str(self.ui.component_2_percentage.value())))
self.ui.component_1_type.currentIndexChanged.connect(lambda: self.component_1_conplementary())
self.ui.component_1_type.currentIndexChanged.connect(lambda: self.select_enable('type1', str(self.ui.component_1_type.currentText())))
self.ui.component_2_type.currentIndexChanged.connect(lambda: self.select_enable('type2', str(self.ui.component_2_type.currentText())))
def new_instance(self) -> None:
self.child_window = MainWindow()
self.child_window.show()
def open_image(self, imageWidget: dict, channel: int) -> None:
image = Image()
if not image.path:
return
if len(self.img) == 1:
if f'Image {2//channel}' in self.img:
if not image.compare(self.img[f'Image {2//channel}']['image']):
qtw.QMessageBox.warning(self, 'failed', 'The Two Images Must be of the same size')
return
else :
self.img[f'Image {channel}'] = {'image': image, 'widgets': imageWidget}
if f'Image {channel}' not in self.available_images:
self.available_images[f'Image {channel}'] = f'Image {channel}'
self.append_outputs(isOneChanneled=False)
else :
self.img[f'Image {channel}'] = {'image': image, 'widgets': imageWidget}
elif len(self.img) >= 2:
if not image.compare(self.img[f'Image {2//channel}']['image']):
qtw.QMessageBox.warning(self, 'failed', 'The Two Images Must be of the same size')
return
self.img[f'Image {channel}']["image"] = image
self.img[f'Image {channel}']["widgets"] = imageWidget
else :
self.img[f'Image {channel}'] = {'image': image, 'widgets': imageWidget}
if f'Image {channel}' not in self.available_images:
self.available_images[f'Image {channel}'] = f'Image {channel}'
self.append_outputs(channel=self.available_images[f'Image {channel}'])
imageWidget['original'].setPixmap(image.get_pixmap().scaled(300,300, aspectRatioMode=qtc.Qt.KeepAspectRatio, transformMode=qtc.Qt.SmoothTransformation))
imageWidget['picker'].setDisabled(False)
self.ui.output_select.setDisabled(False)
def append_outputs(self, isOneChanneled: bool=True, channel: str='') -> None:
if isOneChanneled:
self.ui.component_1_select.addItem('')
self.ui.component_2_select.addItem('')
self.ui.component_1_select.setItemText(0, '')
self.ui.component_1_select.setItemText(1, channel)
self.ui.component_2_select.setItemText(0, '')
self.ui.component_2_select.setItemText(1, channel)
else:
self.ui.component_1_select.addItem('')
self.ui.component_2_select.addItem('')
self.ui.component_1_select.setItemText(0, '')
self.ui.component_1_select.setItemText(1, 'Image 1')
self.ui.component_1_select.setItemText(2, 'Image 2')
self.ui.component_2_select.setItemText(0, '')
self.ui.component_2_select.setItemText(1, 'Image 1')
self.ui.component_2_select.setItemText(2, 'Image 2')
def display_component(self, imageWidget: dict) -> None:
component = imageWidget['widgets']['picker'].currentText()
imageWidget['widgets']['filtered'].setPixmap(imageWidget['image'].get_component_pixmap(component).scaled(300,300, aspectRatioMode=qtc.Qt.KeepAspectRatio, transformMode=qtc.Qt.SmoothTransformation))
try:
os.remove('test.png')
except:
pass
def pick_mixer_output(self) -> None:
self.current_output_channel = self.ui.output_select.currentText()
self.ui.component_1_slider.setValue(int(self.output_channels_controlers[self.ui.output_select.currentText()]['slider1']))
self.ui.component_1_percentage.setValue(int(self.output_channels_controlers[self.ui.output_select.currentText()]['percentage1']))
self.ui.component_1_select.setCurrentText(self.output_channels_controlers[self.ui.output_select.currentText()]['select1'])
self.ui.component_1_type.setCurrentText(self.output_channels_controlers[self.ui.output_select.currentText()]['type1'])
self.ui.component_2_slider.setValue(int(self.output_channels_controlers[self.ui.output_select.currentText()]['slider2']))
self.ui.component_2_percentage.setValue(int(self.output_channels_controlers[self.ui.output_select.currentText()]['percentage2']))
self.ui.component_2_select.setCurrentText(self.output_channels_controlers[self.ui.output_select.currentText()]['select2'])
self.ui.component_2_type.setCurrentText(self.output_channels_controlers[self.ui.output_select.currentText()]['type2'])
if self.ui.output_select.currentText() != '':
self.set_mixer_components_disabled(self.enables['output-select'] ,False)
else:
self.set_mixer_components_disabled(self.enables['output-select'], True)
def set_mixer_components_disabled(self, components: list, logic: bool) -> None:
for component in components:
component.setDisabled(logic)
def select_enable(self, component: str, value: str):
self.change_image(component, value)
if value != '':
self.set_mixer_components_disabled(self.enables[component], False)
else:
self.set_mixer_components_disabled(self.enables[component], True)
def change_image(self, component: str, value: str) -> None:
self.output_channels_controlers[self.current_output_channel][component] = value
def component_1_conplementary(self):
self.ui.component_2_type.clear()
self.ui.component_2_type.addItems(self.output_complementary[self.ui.component_1_type.currentText()])
self.ui.component_2_type.update()
self.change_image('type1', self.ui.component_1_type.currentText())
def mixer(self, slider: str, sliderValue: str) -> None:
self.change_image(slider, sliderValue)
channel_1_ratio = float(self.output_channels_controlers[self.current_output_channel]['slider1']) / 100
channel_2_ratio = float(self.output_channels_controlers[self.current_output_channel]['slider2']) / 100
image_1 = self.output_channels_controlers[self.current_output_channel]['select1']
image_2 = self.output_channels_controlers[self.current_output_channel]['select2']
type1 = self.output_channels_controlers[self.current_output_channel]['type1']
type2 = self.output_channels_controlers[self.current_output_channel]['type2']
if image_1 == "" or image_2 == "" or type1 == "" or type2 == "":
return
try:
if (type1 in ['Magnitude', 'Phase', 'Uniform Magnitude', 'Uniform Phase']
and type2 in ['Magnitude', 'Phase', 'Uniform Magnitude', 'Uniform Phase']):
self.modes[self.current_output_channel] = 'mag-phase'
elif (type1 in ['Real', 'Imaginary']and type2 in ['Real', 'Imaginary']):
self.modes[self.current_output_channel] = 'real-imag'
else:
print('Error')
return
self.outImage = self.img[image_1]['image'].mix(self.img[image_2]['image'], self.output_channels_controlers[self.current_output_channel]['type1'], self.output_channels_controlers[self.current_output_channel]['type2'], channel_1_ratio, channel_2_ratio, self.modes[self.current_output_channel])
self.output_channels[self.current_output_channel].setPixmap(self.outImage.scaled(300,300, aspectRatioMode=qtc.Qt.KeepAspectRatio, transformMode=qtc.Qt.SmoothTransformation))
except:
pass
try:
os.remove('test.png')
except:
pass
def main_window():
app = qtw.QApplication(sys.argv)
app.setStyle("Fusion")
window = MainWindow()
sys.exit(app.exec_())
if __name__ == '__main__':
main_window() | 47.907749 | 303 | 0.632828 | 12,344 | 0.950782 | 0 | 0 | 0 | 0 | 0 | 0 | 1,829 | 0.140877 |
6ff8202663f6030215b86bce2a40c9dd2290ed53 | 1,029 | py | Python | main_project/tienda_lissa_app/forms.py | NahuelA/TiendaLissa_Django | 72599e7cb20e1ebe346b484836e2200b69389281 | [
"MIT"
]
| null | null | null | main_project/tienda_lissa_app/forms.py | NahuelA/TiendaLissa_Django | 72599e7cb20e1ebe346b484836e2200b69389281 | [
"MIT"
]
| null | null | null | main_project/tienda_lissa_app/forms.py | NahuelA/TiendaLissa_Django | 72599e7cb20e1ebe346b484836e2200b69389281 | [
"MIT"
]
| null | null | null | from django import forms
# Form for create sales
class FormSales(forms.Form):
# Atributes for form
# NOTE: date_creation and total fields, it will be created dynamically
name = forms.CharField(label="Nombre",
max_length=50,
required=True,
help_text="Ingrese su nombre")
description = forms.CharField(label="Descripción",
max_length=120,
help_text="Ingrese una descripción de su producto",
widget=forms.Textarea())
count = forms.IntegerField(label="Cantidad",min_value=0,required=True)
price = forms.DecimalField(label="Precio",min_value=0,required=True)
# Defaul value is True
paid_out = forms.BooleanField(label="Pagado",
initial=True,
help_text="Si es verdadero, está pagado",
widget=forms.CheckboxInput()) | 42.875 | 85 | 0.543246 | 982 | 0.95155 | 0 | 0 | 0 | 0 | 0 | 0 | 274 | 0.265504 |
6ff8a222216fa1ac81945884dc109adff1cb0094 | 2,159 | py | Python | src/vargenpath/pipeline.py | AldisiRana/VarGenPath | ff7285f5165855f8427de9ec30ade2a8fb1f0ca3 | [
"MIT"
]
| null | null | null | src/vargenpath/pipeline.py | AldisiRana/VarGenPath | ff7285f5165855f8427de9ec30ade2a8fb1f0ca3 | [
"MIT"
]
| 3 | 2019-11-11T12:34:03.000Z | 2019-11-13T13:37:40.000Z | src/vargenpath/pipeline.py | AldisiRana/VarGenPath | ff7285f5165855f8427de9ec30ade2a8fb1f0ca3 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
"""Pipeline for VarGenPath"""
from typing import Optional
from .constants import LINKSET_PATH, FILE_TYPES
from .utils import (
get_cytoscape_connection, get_associated_genes, var_genes_network, extend_vargen_network, save_session, save_image,
save_network
)
def get_vargenpath_network(
*,
variant_list: list,
network_name: Optional[str] = 'VarGenPath network',
linkset_path: Optional[str] = LINKSET_PATH,
session_path: Optional[str] = None,
image_path: Optional[str] = None,
extend_network: bool = True,
image_type: Optional[str] = 'svg',
network_path: Optional[str] = None,
network_file_path: Optional[str] = 'cyjs',
) -> dict:
"""
Pipeline for creating vargenpath network.
:param network_file_path: the type of network file to be saved.
:param network_path: if input path, the cytoscape network will be saved to this path.
:param variant_list: list of variants.
:param network_name: the name of the network.
:param linkset_path: the path to the linkset to extend network.
:param session_path: if input path, the cytoscape session will be saved to this path.
:param image_path: if input path, the image of the network will be saved to this path.
:param extend_network: if true, the network will be extended.
:param image_type: the type of the image to be saved.
:return: cytoscape network
"""
try:
cy = get_cytoscape_connection()
except Exception:
raise Exception('Uh-oh! Make sure that cytoscape is open then try again.')
vargen_df = get_associated_genes(variant_list)
network = var_genes_network(variants_genes_df=vargen_df, client=cy, network_name=network_name)
if extend_network:
network = extend_vargen_network(linkset_path, client=cy)
if session_path is not None:
save_session(session_file=session_path, client=cy)
if image_path is not None:
save_image(network_image=image_path, image_type=FILE_TYPES[image_type])
if network_path is not None:
save_network(network_path=network_path, file_type=FILE_TYPES[network_file_path])
return network
| 39.981481 | 119 | 0.727652 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 853 | 0.39509 |
6ff8f022a6fe57527d82ea4903beacbf84f7acb1 | 1,485 | py | Python | ott/examples/fairness/models.py | MUCDK/ott | f6c79d964d275aed12a7f9b66aa2b118423dff71 | [
"Apache-2.0"
]
| 232 | 2021-01-18T15:05:20.000Z | 2022-03-26T01:22:01.000Z | ott/examples/fairness/models.py | RamonYeung/ott | 800de80d6b2a0faf4fc7977b0673674468c70e3f | [
"Apache-2.0"
]
| 20 | 2021-02-25T04:38:34.000Z | 2022-01-24T16:21:25.000Z | ott/examples/fairness/models.py | RamonYeung/ott | 800de80d6b2a0faf4fc7977b0673674468c70e3f | [
"Apache-2.0"
]
| 29 | 2021-02-25T04:40:25.000Z | 2022-01-29T18:31:17.000Z | # coding=utf-8
# Copyright 2021 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A model for to embed structured features."""
from typing import Any, Tuple
import flax.linen as nn
import jax.numpy as jnp
class FeaturesEncoder(nn.Module):
"""Encodes structured features."""
input_dims: Tuple[int]
embed_dim: int = 32
@nn.compact
def __call__(self, x):
result = []
index = 0
for d in self.input_dims:
arr = x[..., index:index+d]
result.append(arr if d == 1 else nn.Dense(self.embed_dim)(arr))
index += d
return jnp.concatenate(result, axis=-1)
class AdultModel(nn.Module):
"""A model to predict if the income is above 50k (adult dataset)."""
encoder_cls: Any
hidden: Tuple[int] = (64, 64)
@nn.compact
def __call__(self, x, train: bool = True):
x = self.encoder_cls()(x)
for h in self.hidden:
x = nn.Dense(h)(x)
x = nn.relu(x)
x = nn.Dense(1)(x)
x = nn.sigmoid(x)
return x[..., 0]
| 27.5 | 74 | 0.676768 | 760 | 0.511785 | 0 | 0 | 485 | 0.326599 | 0 | 0 | 725 | 0.488215 |
6ff9bf08b760f07cf353cf2bb85e608d4d99b8bc | 699 | py | Python | src/create_hbjsons.py | karim-daw/pollination-app | ed87f3dbc2d93d25568707d6a6ad0ee35c5109b8 | [
"MIT"
]
| null | null | null | src/create_hbjsons.py | karim-daw/pollination-app | ed87f3dbc2d93d25568707d6a6ad0ee35c5109b8 | [
"MIT"
]
| null | null | null | src/create_hbjsons.py | karim-daw/pollination-app | ed87f3dbc2d93d25568707d6a6ad0ee35c5109b8 | [
"MIT"
]
| null | null | null | from models import shoe_box
def createHBjsons() -> None:
for i in range(4):
# increment on shoebox dimensions
_width = 4.0 + i*0.5
_height = 3.5 + i*0.1
_depth = 4.0 + i*0.5
# init shoe_box
sb = shoe_box.Shoebox(width=_width , height=_height , depth=_depth )
# set grid size and offset
sb.gridSize = 0.5
sb.gridOffset = 0.75
# set window to wall ratio
sb.wwr = 0.4
# create room
sb.createRoom()
# create model
sb.createModel()
# save to hbjson
sb.saveToHBJson()
# run
if __name__ == "__main__":
# create models
createHBjsons()
| 19.416667 | 76 | 0.53505 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 174 | 0.248927 |
6ffae9b25573e5f7348c89e03b62b498cbca2ea9 | 184 | py | Python | reikna/core/__init__.py | ringw/reikna | 0f27f86e35a9f06405de2d99580f766a1b504562 | [
"MIT"
]
| 122 | 2015-05-01T12:42:34.000Z | 2021-09-30T22:47:59.000Z | lib/python/reikna-0.7.5/reikna/core/__init__.py | voxie-viewer/voxie | d2b5e6760519782e9ef2e51f5322a3baa0cb1198 | [
"MIT"
]
| 42 | 2015-05-04T16:55:47.000Z | 2021-09-18T04:53:34.000Z | lib/python/reikna-0.7.5/reikna/core/__init__.py | voxie-viewer/voxie | d2b5e6760519782e9ef2e51f5322a3baa0cb1198 | [
"MIT"
]
| 14 | 2015-05-01T19:22:52.000Z | 2021-09-30T22:48:03.000Z | from reikna.core.signature import Type, Annotation, Parameter, Signature
from reikna.core.computation import Computation
from reikna.core.transformation import Transformation, Indices
| 46 | 72 | 0.858696 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
6ffbf82ddb4fd7b31865d27adb16c802d9e91417 | 1,234 | py | Python | docs/setup/mysite/models/__init__.py | pauleveritt/pyramid_sqltraversal | 1853b3b30fc9bdd453ce5c74b6a67668b21c5321 | [
"MIT"
]
| 6 | 2015-10-21T20:39:42.000Z | 2016-09-03T15:37:28.000Z | docs/setup/mysite/models/__init__.py | pauleveritt/pyramid_sqltraversal | 1853b3b30fc9bdd453ce5c74b6a67668b21c5321 | [
"MIT"
]
| 1 | 2015-11-30T19:18:29.000Z | 2015-12-01T08:23:08.000Z | docs/setup/mysite/models/__init__.py | pauleveritt/pyramid_sqltraversal | 1853b3b30fc9bdd453ce5c74b6a67668b21c5321 | [
"MIT"
]
| null | null | null | from sqlalchemy import engine_from_config
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy.schema import MetaData
import zope.sqlalchemy
from .node import Node
NAMING_CONVENTION = {
"ix": 'ix_%(column_0_label)s',
"uq": "uq_%(table_name)s_%(column_0_name)s",
"ck": "ck_%(table_name)s_%(constraint_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s"
}
metadata = MetaData(naming_convention=NAMING_CONVENTION)
Base = declarative_base(metadata=metadata)
def get_session(transaction_manager, dbmaker):
dbsession = dbmaker()
zope.sqlalchemy.register(dbsession,
transaction_manager=transaction_manager)
return dbsession
def get_engine(settings, prefix='sqlalchemy.'):
return engine_from_config(settings, prefix)
def get_dbmaker(engine):
dbmaker = sessionmaker()
dbmaker.configure(bind=engine)
return dbmaker
def includeme(config):
settings = config.get_settings()
dbmaker = get_dbmaker(get_engine(settings))
config.add_request_method(
lambda r: get_session(r.tm, dbmaker),
'dbsession',
reify=True
)
| 27.422222 | 72 | 0.723663 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 223 | 0.180713 |
6ffc37d2645887ce4dd7940f03465b3689f39751 | 458 | py | Python | nngen/onnx/shape.py | RyusukeYamano/nngen | 9ed1f7fb83908794aa94d70287d89545d45fe875 | [
"Apache-2.0"
]
| 207 | 2019-11-12T11:42:25.000Z | 2022-03-20T20:32:17.000Z | nngen/onnx/shape.py | RyusukeYamano/nngen | 9ed1f7fb83908794aa94d70287d89545d45fe875 | [
"Apache-2.0"
]
| 31 | 2019-11-25T07:33:30.000Z | 2022-03-17T12:34:34.000Z | nngen/onnx/shape.py | RyusukeYamano/nngen | 9ed1f7fb83908794aa94d70287d89545d45fe875 | [
"Apache-2.0"
]
| 29 | 2019-11-07T02:25:48.000Z | 2022-03-12T16:22:57.000Z | from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def Shape(visitor, node):
input = visitor.visit(node.input[0])
shape = input.shape
if (input.get_layout() is not None and input.get_onnx_layout() is not None and
input.get_layout() != input.get_onnx_layout()):
shape = [shape[input.get_layout().index(l)] for l in input.get_onnx_layout()]
return tuple(shape)
| 28.625 | 85 | 0.713974 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
6fff99cc890883af4358d405c2662b44fb123105 | 17,072 | py | Python | nales/NDS/interfaces.py | Jojain/Nales | dc1073967ba1a8c5bbc226dbfe7b2fb8afd44bd9 | [
"MIT"
]
| 5 | 2022-03-07T23:21:42.000Z | 2022-03-29T17:40:36.000Z | nales/NDS/interfaces.py | Jojain/Nales | dc1073967ba1a8c5bbc226dbfe7b2fb8afd44bd9 | [
"MIT"
]
| 3 | 2022-03-29T17:33:27.000Z | 2022-03-29T17:40:10.000Z | nales/NDS/interfaces.py | Jojain/Nales | dc1073967ba1a8c5bbc226dbfe7b2fb8afd44bd9 | [
"MIT"
]
| null | null | null | import typing
from typing import (
Any,
Callable,
Dict,
Iterable,
List,
Literal,
Optional,
Set,
Tuple,
Union,
)
from ncadquery import Workplane
from OCP.Quantity import Quantity_NameOfColor
from OCP.TCollection import TCollection_ExtendedString
from OCP.TDataStd import TDataStd_Name
from OCP.TDF import TDF_Label, TDF_TagSource
from OCP.TNaming import TNaming_Builder, TNaming_NamedShape
from OCP.TopoDS import TopoDS_Shape
from OCP.TPrsStd import TPrsStd_AISPresentation
from PyQt5.QtCore import QPersistentModelIndex, Qt
from nales.nales_cq_impl import NALES_TYPES, CQMethodCall, Part
from nales.utils import TypeChecker
from nales.widgets.msg_boxs import StdErrorMsgBox
class NNode:
def __init__(self, name=None, parent=None):
self._parent = parent
self._columns_nb = 1
self._childs = []
if parent:
self._row = len(parent._childs)
parent._childs.append(self)
parent._columns_nb = max(self.column, parent.column)
self._label = TDF_TagSource.NewChild_s(parent._label)
self._name = name
TDataStd_Name.Set_s(self._label, TCollection_ExtendedString(name))
else:
self._label = TDF_Label()
self._name = "root"
self._row = 0
def _create_sublabel(self):
"""
Create an additionnal OCCT label that is needed if you want to display several shapes
(It's one shape per label)
"""
sublabel = TDF_TagSource.NewChild_s(self._label)
TDataStd_Name.Set_s(
sublabel, TCollection_ExtendedString(f"{self.name} subshape")
)
return sublabel
def walk(self, node: "NNode" = None) -> "NNode":
"""
Walks all the node starting from 'node'
If 'node' is None, starts from the called node
"""
base_node = node if node else self
yield base_node
for child in base_node.childs:
yield from self.walk(child)
def find(self, node_name: str, node_type=None) -> "NNode" or None:
for node in self.walk():
if node.name == node_name:
if node_type:
if isinstance(node, node_type):
return node
else:
return node
def data(self, column):
if column >= 0 and column < len(self._data):
return self._data[column]
@property
def column(self):
return self._columns_nb
def child_count(self):
return len(self._childs)
def child(self, row) -> "NNode":
if row >= 0 and row < self.child_count():
return self._childs[row]
def has_children(self):
if len(self._childs) != 0:
return True
else:
return False
@property
def parent(self):
return self._parent
@property
def childs(self):
return self._childs
@childs.setter
def childs(self, new_childs):
self._childs = new_childs
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def root_node(self):
root = self.parent
while True:
if root.parent:
root = root.parent
else:
return root
@property
def row(self):
return self._row
class NPart(NNode):
def __init__(self, name: str, parent):
super().__init__(name, parent=parent)
self.visible = True
self._solid = TopoDS_Shape()
self._active_shape = None
self.display()
@property
def part(self):
return self.childs[-1].part_obj
def _update_display_shapes(self):
try:
solid = self.part._findSolid().wrapped
except ValueError:
solid = TopoDS_Shape()
self._solid = solid
if not (active_shape := self.part._val().wrapped) is solid and isinstance(
active_shape, TopoDS_Shape
):
self._active_shape = active_shape
else:
self._active_shape = None
def hide(self):
self.visible = False
self.ais_solid.Erase(remove=True)
self.ais_active_shape.Erase(remove=True)
self.root_node._viewer.Update()
def display(self, update=False):
"""
Builds the display object and attach it to the OCAF tree
"""
if update:
self.ais_solid.Erase(remove=True)
if self._active_shape:
self.ais_active_shape.Erase(remove=True)
self._update_display_shapes()
# self.root_node._viewer.Update()
solid_bldr = TNaming_Builder(self._label) # _label is TDF_Label
solid_bldr.Generated(self._solid)
solid_shape_attr = solid_bldr.NamedShape()
self.ais_solid = TPrsStd_AISPresentation.Set_s(solid_shape_attr)
if self._active_shape:
active_shape_bldr = TNaming_Builder(self._create_sublabel())
active_shape_bldr.Generated(self._active_shape)
active_shape_attr = active_shape_bldr.NamedShape()
self.ais_active_shape = TPrsStd_AISPresentation.Set_s(active_shape_attr)
self.ais_active_shape.Display(update=True)
self.root_node._viewer.Update()
# There is color mixing due to overlapping, maybe this can help to solve the issue :
# https://dev.opencascade.org/doc/refman/html/class_a_i_s___interactive_context.html#a1e0f9550cc001adbb52329ac243bb3b2
# It's considered good enough for now
self.ais_solid.SetTransparency(0.9)
self.ais_solid.Display()
self.root_node._viewer.Update()
self.visible = True
def update(self):
"""
When called this method rebuild the entire Part, by calling each child Operation
"""
child_ops = self.childs
for pos, child_op in enumerate(child_ops):
child_op.update(pos)
def remove_operation(self, row: int):
"""
Remove an operation from the operation tree
"""
ops: List[NOperation] = self.childs
ops.pop(row)
ops[row - 1].update_from_node()
class NShape(NNode):
def __init__(self, name, cq_shape, parent: NNode):
self._occt_shape = shape = cq_shape.wrapped
self.shape = cq_shape
self.visible = True
super().__init__(name, parent=parent)
self.bldr = TNaming_Builder(self._label) # _label is TDF_Label
self.bldr.Generated(shape)
named_shape = self.bldr.NamedShape()
self._label.FindAttribute(TNaming_NamedShape.GetID_s(), named_shape)
self.ais_shape = TPrsStd_AISPresentation.Set_s(named_shape)
self.ais_shape.SetTransparency(0.5)
self.ais_shape.SetColor(Quantity_NameOfColor.Quantity_NOC_ALICEBLUE)
self.ais_shape.Display(update=True)
def hide(self):
self.visible = False
self.ais_shape.Erase()
self.root_node._viewer.Update()
def display(self, update=False):
"""
Builds the display object and attach it to the OCAF tree
"""
if update:
self.ais_shape.Erase(remove=True)
self.root_node._viewer.Update()
self.bldr = TNaming_Builder(self._label) # _label is TDF_Label
self.bldr.Generated(self._occt_shape)
named_shape = self.bldr.NamedShape()
self._label.FindAttribute(TNaming_NamedShape.GetID_s(), named_shape)
self.ais_shape = TPrsStd_AISPresentation.Set_s(named_shape)
self.ais_shape.SetTransparency(0.5)
self.ais_shape.SetColor(Quantity_NameOfColor.Quantity_NOC_ALICEBLUE)
self.ais_shape.Display(update=True)
self.root_node._viewer.Update()
self.visible = True
def update(self):
"""
Update the shape object
"""
self._occt_shape = self.shape.wrapped
self.display(True)
class NShapeOperation(NNode):
def __init__(self, maker_method: Callable, shape_class, parent=None):
super().__init__(maker_method.__name__, parent)
self.maker_method = maker_method
self.shape_class = shape_class
def update(self) -> None:
args = [child.value for child in self.childs]
self.parent.shape = self.maker_method(self.shape_class, *args)
self.parent.update()
class NOperation(NNode):
def __init__(
self, method_name: str, part_obj: Part, parent: NNode, operation: CQMethodCall
):
super().__init__(method_name, parent=parent)
self.part_obj = part_obj
self.operation = operation
self.method = getattr(part_obj, method_name).__func__
if method_name == "Workplane":
self._root_operation = True
else:
self._root_operation = False
def update_from_node(self):
"""
Update the Part from this node
It recomputes every operation from this node to the end
"""
ops: List[NOperation] = self.parent.childs[self.row :]
for op in ops:
op.update()
self.parent.display(update=True)
def _update_init_part(self):
"""
This method is called when the user try to update __init__ method arguments
There is a special handling because it is a bit different from the regular methods
"""
args = [
child.value if not child.is_linked("obj") else child.linked_obj
for child in self.childs
]
try:
self.method(self.part_obj, *args, internal_call=True)
except Exception as exc:
StdErrorMsgBox(repr(exc))
def update(self) -> bool:
"""
Update the CQ objects stack from param modification in the GUI view
"""
# Special handling of __init__ method
if self.row == 0:
self._update_init_part()
return True
previous_operations: List[NOperation] = self.parent.childs[: self.row]
old_part_obj = previous_operations[-1].part_obj
args = [
child.value if not child.is_linked("obj") else child.linked_obj
for child in self.childs
]
try:
self.part_obj = self.method(old_part_obj, *args, internal_call=True)
return True
except ValueError as exc: # we update parent operations until pending wires have reset
if exc.args[0] == "No pending wires present":
tried_updates = [self]
# recursively call parent ops and store all the failed updates to update them again afterwards
while (tried_update := previous_operations[-1].update()) is False:
tried_updates.append(tried_update)
for tried_update in tried_updates:
tried_update.update()
else:
StdErrorMsgBox(repr(exc))
return False
except Exception as exc:
StdErrorMsgBox(repr(exc))
return False
def _restore_pending_wires(self):
index = 2
previous_ops = self.parent.childs[: self._row]
while len(self.parent.part.ctx.pendingWires) == 0:
op = previous_ops[-index]
op.update(len(previous_ops) - op._row)
index += 1
class NShapeArgument(NNode):
def __init__(self, name=None, parent=None):
super().__init__(name, parent)
class NArgument(NNode):
"""
The underlying data of an Argument is as follow :
name : cq argument name
value : value
linked_param : the name of the parameter linked to this arg, None if not connected to any
type: value type : a voir si je garde ca
If the Argument is linked to a Parameter, the Parameter name is displayed
"""
def __init__(self, arg_name: str, value, arg_type, parent: NNode, kwarg=False):
super().__init__(arg_name, parent=parent)
self._name = arg_name
self._type = arg_type
self._value = value
self._typechecker = TypeChecker(arg_type)
self._kwarg = kwarg # Boolean indicating if the arg is a kwarg or not
self._linked_param = None
self._linked_nobj_idx: QPersistentModelIndex = None
self._param_name_pidx = None
self._param_value_pidx = None
def link(
self,
by: Literal["param", "obj"],
value: Union[Tuple, QPersistentModelIndex, Any],
):
"""
Link this parameter to an object in available in the data model
"""
if by == "param":
raw_val = value[1]
if not self.is_type_compatible(raw_val):
raise TypeError("Couldn't link the param")
self._linked_param = value[0]
self._value = value[1]
self._param_name_pidx = value[2]
self._param_value_pidx = value[3]
else:
self._linked_nobj_idx = value
def unlink_param(self):
self._linked_param = None
self._param_name_pidx = None
self._param_value_pidx = None
def is_kwarg(self):
return self._kwarg
def is_linked(self, by: str = None):
if by == "obj":
return True if self._linked_nobj_idx else False
elif by == "param":
return True if self._linked_param else False
elif by is None:
if self._linked_param or self._linked_nobj_idx:
return True
else:
return False
else:
raise ValueError("Argument 'by' must be either 'obj' or 'param'")
def is_optional_type(self) -> bool:
"""
Indicates if the NArgument is optional, i.e the function signature looks something like :
method(nargument:Union[float,None] = None) or method(nargument:Optional[float] = None)
"""
if self.is_kwarg():
origin = typing.get_origin(self._type)
if origin == Optional:
return True
if origin == Union:
for allowed_type in typing.get_args(self._type):
if allowed_type == type(None):
return True
return False
else:
return False
else:
return False
def is_literal_type(self) -> bool:
origin = typing.get_origin(self.type)
if self.type == str or origin == Literal:
return True
if origin == Union:
possible_types = typing.get_args(self.type)
for possible_type in possible_types:
if possible_type == str or possible_type == Literal:
return True
return False
def is_type_compatible(self, value: str) -> bool:
return self._typechecker.check(value)
def _cast(self, value: Any):
if type(value) == self._type:
return value
return self._typechecker.cast(value)
@property
def type(self):
return self._type
@type.setter
def type(self, value):
self._type = value
@property
def linked_param(self):
if self.is_linked():
return self._linked_param
else:
raise ValueError("This argument is not linked to a param")
@property
def linked_node(self):
if not self._linked_nobj_idx:
raise ValueError("This argument isn't linked to any node")
else:
return self._linked_nobj_idx.data(Qt.EditRole)
@property
def linked_obj(self):
if self.is_linked(by="obj"):
if hasattr(self.linked_node, "part"):
return self.linked_node.part
elif hasattr(self.linked_node, "shape"):
return self.linked_node.shape
else:
raise NotImplementedError(
"This argument is linked to a object that is not supported yet"
)
else:
raise ValueError("This argument is not linked to an object")
@property
def columns_nb(self):
return 1
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def value(self):
if self.is_optional_type() and self._value is None:
return None
if self.is_linked(by="param"):
return self._cast(self._param_value_pidx.data())
elif self.is_linked(by="obj"):
return self.linked_obj
elif not isinstance(self._value, str):
# Upon argument creation self._value is already of the right type
return self._value
else:
# If self._value is a string, means the users modified the argument in the GUI
return self._cast(self._value)
@value.setter
def value(self, value):
self._value = value
@property
def linked_param(self):
return self._linked_param
| 30.594982 | 126 | 0.606666 | 16,335 | 0.95683 | 323 | 0.01892 | 2,647 | 0.155049 | 0 | 0 | 2,805 | 0.164304 |
b501613aec75d3cca56c1a86620e64aefbb4d375 | 5,134 | py | Python | plane_1.0/plane/hero.py | misaka46/Aircraft-war | f83cdc5237c01889d109c32b1e27aaaf2c118b94 | [
"MIT"
]
| null | null | null | plane_1.0/plane/hero.py | misaka46/Aircraft-war | f83cdc5237c01889d109c32b1e27aaaf2c118b94 | [
"MIT"
]
| null | null | null | plane_1.0/plane/hero.py | misaka46/Aircraft-war | f83cdc5237c01889d109c32b1e27aaaf2c118b94 | [
"MIT"
]
| null | null | null | from flyingObject import FlyingObject
from bullet import Bullet
import random
class Hero(FlyingObject):
"""英雄机"""
index = 2 # 标志位
def __init__(self, screen, images):
# self.screen = screen
self.images = images # 英雄级图片数组,为Surface实例
# image = pygame.image.load(images[0])
image = images[0]
x = screen.get_rect().centerx
y = screen.get_rect().bottom
super(Hero, self).__init__(screen, x, y, image)
self.life = 3 # 生命值为3
self.doubleFire = 100 # 初始火力值为0
self.RED = 1
self.PURPLE = 2
self.Fire_MOD = self.RED
self.Fire_speed_RED = 40
self.Fire_speed_PURPLE = 40
self.mod = 5
self.difficulty = 0
def addDifficulty(self):
self.difficulty+=1
def shoot_speed(self,x):
if x==self.RED:
return self.Fire_speed_RED
else:
return self.Fire_speed_PURPLE
def getFire_MOD(self):
"""返回武器类型,RED为1 PURPLE为2"""
return self.Fire_MOD
def setFire_MOD(self, x):
"""设置武器类型"""
if x == self.RED:
if self.getFire_MOD() == x:
self.addFire()
if self.shoot_speed(x) > 30:
self.Fire_speed_RED -= 10
else:
self.clearFire()
self.Fire_speed_PURPLE = 20
self.Fire_MOD = self.RED
if x == self.PURPLE:
if self.getFire_MOD() == x:
self.addFire()
if self.shoot_speed(x) > 20:
self.Fire_speed_PURPLE -= 10
else:
self.clearFire()
self.Fire_speed_RED = 20
self.Fire_MOD = self.PURPLE
def getFire(self):
"""获取火力值"""
return self.doubleFire
def setFire(self):
"""设置火力值"""
self.doubleFire = 400
def addFire(self):
"""增加火力值"""
self.doubleFire += 100
def clearFire(self):
"""清空火力值"""
self.doubleFire = 100
def addLife(self):
"""增命"""
self.life += 1
def sublife(self):
"""减命"""
self.life -= 1
def getLife(self):
"""获取生命值"""
return self.life
def reLife(self):
"""重置值"""
self.life = 3
self.clearFire()
def outOfBounds(self):
return False
def step(self):
"""动态显示飞机"""
if (len(self.images) > 0):
# fclock = pygame.time.Clock()
# fps = 10 # 帧数
# fclock.tick(fps)
Hero.index += 0.3
Hero.index %= len(self.images)
# self.image = pygame.image.load(self.images[int(Hero.index)]) # 切换图片
self.image = self.images[int(Hero.index)] # 切换图片
def move(self, x, y):
self.x = x - self.width / 2
self.y = y - self.height / 2
def shoot(self, image):
"""英雄机射击"""
xStep = int(self.width / 4 - 5)
yStep = 20
if self.mod > 0:
self.mod -= 1
else:
self.mod = 5
if self.getFire_MOD() == 1:
#print(self.getFire())
if self.doubleFire >= 400:
heroBullet = [Bullet(self.screen, image, (self.x - (self.mod - 1) * xStep), self.y - yStep),
Bullet(self.screen, image, (self.x + 2 * xStep), self.y - yStep),
Bullet(self.screen, image, (self.x + (self.mod + 3) * xStep), self.y - yStep)]
return heroBullet
elif self.doubleFire >= 200:
heroBullet = [Bullet(self.screen, image, (self.x - (5-self.mod - 1) * xStep), self.y - yStep),
Bullet(self.screen, image, (self.x + (5-self.mod + 3) * xStep), self.y - yStep)]
return heroBullet
else:
heroBullet = [Bullet(self.screen, image, self.x + 2 * xStep, self.y - yStep)]
return heroBullet
if self.getFire_MOD() == 2:
if self.doubleFire >= 400:
heroBullet = [Bullet(self.screen, image, self.x + 1 * xStep, self.y - yStep),
Bullet(self.screen, image, self.x + 2 * xStep, self.y - yStep),
Bullet(self.screen, image, self.x + 3 * xStep, self.y - yStep)]
return heroBullet
elif self.doubleFire >=200:
heroBullet = [Bullet(self.screen, image, self.x + 1 * xStep, self.y - yStep),
Bullet(self.screen, image, self.x + 3 * xStep, self.y - yStep)]
return heroBullet
else:
heroBullet = [Bullet(self.screen, image, self.x + 2 * xStep, self.y - yStep)]
return heroBullet
def hit(self, other):
"""英雄机和其他飞机"""
x1 = other.x - self.width / 2
x2 = other.x + self.width / 2 + other.width
y1 = other.y - self.height / 2
y2 = other.y + self.height / 2 + other.height
x = self.x + self.width / 2
y = self.y + self.height
return x > x1 and x < x2 and y > y1 and y < y2
| 31.304878 | 110 | 0.494351 | 5,255 | 0.98482 | 0 | 0 | 0 | 0 | 0 | 0 | 630 | 0.118066 |
b5017203cb6c1ccd2ec216b379d968a6634173ea | 21,118 | py | Python | chip9-emulator/emulator.py | Quphoria/CHIP9 | 1ead51ea717e30e927f9b2b811fadd0b0571d354 | [
"MIT"
]
| 1 | 2021-12-12T21:52:27.000Z | 2021-12-12T21:52:27.000Z | chip9-emulator/emulator.py | Quphoria/CHIP9 | 1ead51ea717e30e927f9b2b811fadd0b0571d354 | [
"MIT"
]
| null | null | null | chip9-emulator/emulator.py | Quphoria/CHIP9 | 1ead51ea717e30e927f9b2b811fadd0b0571d354 | [
"MIT"
]
| null | null | null | import sys
import pygame as pg
import numpy as np
import random
import time
pic = np.zeros(shape=(128,64))
width = 128
height = 64
refresh_rate = 60
interval = 1 / refresh_rate
bootrom_file = "bootrom0"
rom_file = "rom"
# rom_file = "hello_world"
debug = False
pg.display.init()
display = pg.display.set_mode((width*4, height*4), flags=0, depth=8)
screen = pg.Surface((width, height), flags=0, depth=8)
pg.transform.scale(screen, (width*4, height*4), display)
def screen_update(silent=True):
pg.transform.scale(screen, (width*4, height*4), display)
pg.display.flip()
if not silent:
print("Screen Update")
def screen_clear():
screen.fill((0,0,0))
#screen_update()
def screen_draw_line(x, y, pixels):
# print("----------DRAW----------")
# print("x:",x)
# print("y:",y)
# print("pix:",bin(pixels))
j = 0b10000000
for i in range(8):
x_pos = x + i
y_pos = y
if x_pos >= 0 and x_pos < width:
if y_pos >= 0 and y_pos < height:
if pixels & j:
pg.draw.rect(screen, 255, pg.Rect(x_pos,y_pos,1,1))
else:
pg.draw.rect(screen, 0, pg.Rect(x_pos,y_pos,1,1))
j = j >> 1
#screen_update()
screen_clear()
# screen_draw_line(0,0,0b10101011)
# input()
class memByte:
def __init__(self):
self.value = 0x00000000
def write(self, value):
self.value = value & 0xff
def readUpper(self):
return (self.value & 0b11110000) >> 4
def readLower(self):
return self.value & 0b1111
class Flags:
def __init__(self):
self.z = 0
self.n = 0
self.h = 0
self.c = 0
def setZero(self):
self.z = 1
def clearZero(self):
self.z = 0
def setNeg(self):
self.n = 1
def clearNeg(self):
self.n = 0
def setHalf(self):
self.h = 1
def clearHalf(self):
self.h = 0
def setCarry(self):
self.c = 1
def clearCarry(self):
self.c = 0
def clearFlags(self):
self.z = 0
self.n = 0
self.h = 0
self.c = 0
class reg:
def __init__(self):
self.value = 0b00000000
self.value = random.randint(0,255)
def send(self):
sys.stdout.write(chr(self.value))
sys.stdout.flush()
class Dreg:
def __init__(self, r1, r2):
self.r1 = r1
self.r2 = r2
def getvalue(self):
self.value = (self.r1.value << 8) + self.r2.value
def setvalue(self):
self.r1.value = self.value >> 8
self.r2.value = self.value & 0xff
class regPC:
def __init__(self):
self.value = 0x0
def inc(self, length=1):
self.value += length
self.value = self.value & 0xffff
def jump(self, address):
self.value = address & 0xffff
class regSP:
def __init__(self):
self.value = 0xfffe
def inc(self):
self.value += 2
self.value = self.value & 0xffff
def dec(self):
self.value -= 2
def setvalue(self):
#print("SPSET:",hex(self.value))
pass # JUST TO MAKE LDX SIMPLER
ONE_REG = reg()
ONE_REG.value = 1
FL = Flags()
halt = False
A = reg()
B = reg()
C = reg()
D = reg()
E = reg()
H = reg()
L = reg()
BC = Dreg(B, C)
DE = Dreg(D, E)
HL = Dreg(H, L)
#E.value = 0x1 # Randomness loop
PC = regPC()
SP = regSP()
memory = []
jumped = False
print("RESERVING MEMORY...")
for i in range(0x10000):
memory.append(memByte())
print("MEMORY RESERVED.")
print("LOADING MEMORY...")
f = open(bootrom_file, "rb")
rom_data = f.read()
f.close()
for i in range(len(rom_data)):
memory[i+0x0].value = rom_data[i]
f = open(rom_file, "rb")
rom_data = f.read()
f.close()
for i in range(len(rom_data)):
memory[i+0x597].value = rom_data[i]
print("MEMORY LOADED.")
def LDI(R, mem=False):
PC.inc()
if not mem:
R.value = memory[PC.value].value
else:
R.getvalue()
memory[R.value].value = memory[PC.value].value
def LDX(R):
PC.inc()
low = memory[PC.value].value
PC.inc()
R.value = low + (memory[PC.value].value << 8)
R.setvalue()
def PUSH_R(R, mem=False):
if not mem:
memory[SP.value].value = R.value
else:
R.getvalue()
memory[SP.value].value = memory[R.value].value
SP.dec()
def PUSH_RR(RR):
RR.getvalue()
memory[SP.value].value = RR.value & 0xff
memory[SP.value + 1].value = RR.value >> 8
SP.dec()
def POP_R(R, mem=False):
SP.inc()
if not mem:
#print(hex(SP.value))
R.value = memory[SP.value].value
else:
R.getvalue()
memory[R.value].value = memory[SP.value].value
def POP_RR(RR):
SP.inc()
RR.value = memory[SP.value].value + (memory[SP.value + 1].value << 8)
RR.setvalue()
MOV_REGISTERS = [B, C, D, E, H, L, HL, A]
MOVB_OPCODES = [0x09, 0x19, 0x29, 0x39, 0x49, 0x59, 0x69, 0x79]
MOVC_OPCODES = [0x99, 0x99, 0xA9, 0xB9, 0xC9, 0xD9, 0xE9, 0xF9]
MOVD_OPCODES = [0x0A, 0x1A, 0x2A, 0x3A, 0x4A, 0x5A, 0x6A, 0x7A]
MOVE_OPCODES = [0x8A, 0x9A, 0xAA, 0xBA, 0xCA, 0xDA, 0xEA, 0xFA]
MOVH_OPCODES = [0x0B, 0x1B, 0x2B, 0x3B, 0x4B, 0x5B, 0x6B, 0x7B]
MOVL_OPCODES = [0x8B, 0x9B, 0xAB, 0xBB, 0xCB, 0xDB, 0xEB, 0xFB]
MOVMHL_OPCODES = [0x0C, 0x1C, 0x2C, 0x3C, 0x4C, 0x5C, 0x6C, 0x7C]
MOVA_OPCODES = [0x8C, 0x9C, 0xAC, 0xBC, 0xCC, 0xDC, 0xEC, 0xFC]
def MOV(R1, R2index, mem=False):
R2 = MOV_REGISTERS[R2index]
if not mem:
if R2index == 6:
R2.getvalue()
R1.value = memory[R2.value].value
else:
R1.value = R2.value
else:
memory[R1.value].value = R2.value
R1.setvalue()
def MOV_RR(RR1, RR2):
RR2.getvalue()
RR1.value = RR2.value
RR1.setvalue()
def ADD_8(value1, value2):
nib = (value1 & 0xf) + (value2 & 0xf)
value = value1 + value2
FL.clearFlags()
if value & 0xff == 0:
FL.setZero()
if value & 0b10000000:
FL.setNeg()
if nib & 0xf0:
FL.setHalf()
if value >> 8:
FL.setCarry()
return value & 0xff
def ADD_R(R, mem=False):
if not mem:
value = ADD_8(A.value, R.value)
R.value = value
else:
R.getvalue()
value = ADD_8(A.value, memory[R.value].value)
memory[R.value].value = value
def ADD_16(value1, value2):
nib = (value1 & 0xf) + (value2 & 0xf)
value = value1 + value2
FL.clearFlags()
if value & 0xffff == 0:
FL.setZero()
if value & 0b1000000000000000:
FL.setNeg()
if nib & 0xf0:
FL.setHalf()
if value >> 16:
FL.setCarry()
return value & 0xffff
def ADDX_RR(RR):
RR.getvalue()
value = ADD_16(A.value, RR.value)
RR.value = value
RR.setvalue()
def SUB_8(value1, value2):
value = value1 - value2
if value < 0:
value += 0x100
FL.clearFlags()
if value == 0:
FL.setZero()
if value & 0b10000000:
FL.setNeg()
if (value1 & 0xf) <= (value2 & 0xf):
FL.setHalf()
if value1 <= value2:
FL.setCarry()
return value & 0xff
def SUB_R(R, compare_only, mem=False):
if not mem:
value = SUB_8(R.value, A.value)
if not compare_only:
R.value = value
else:
R.getvalue()
value = SUB_8(memory[R.value].value, A.value)
if not compare_only:
memory[R.value].value = value
def INC(R, mem=False):
if not mem:
value = ADD_8(ONE_REG.value, R.value)
R.value = value
else:
R.getvalue()
value = ADD_8(ONE_REG.value, memory[R.value].value)
memory[R.value].value = value
def DEC(R, mem=False):
if not mem:
value = SUB_8(R.value, ONE_REG.value)
R.value = value
else:
R.getvalue()
value = SUB_8(memory[R.value].value, ONE_REG.value)
memory[R.value].value = value
def AND_8(value1, value2):
value = value1 & value2
FL.clearFlags()
if value == 0:
FL.setZero()
if value & 0b10000000:
FL.setNeg()
return value & 0xff
def AND_R(R, mem=False):
if not mem:
value = AND_8(A.value, R.value)
R.value = value
else:
R.getvalue()
value = AND_8(A.value, memory[R.value].value)
memory[R.value].value = value
def OR_8(value1, value2):
value = value1 | value2
FL.clearFlags()
if value == 0:
FL.setZero()
if value & 0b10000000:
FL.setNeg()
return value & 0xff
def OR_R(R, mem=False):
if not mem:
value = OR_8(A.value, R.value)
R.value = value
else:
R.getvalue()
value = OR_8(A.value, memory[R.value].value)
memory[R.value].value = value
def XOR_8(value1, value2):
value = value1 ^ value2
FL.clearFlags()
if value == 0:
FL.setZero()
if value & 0b10000000:
FL.setNeg()
return value & 0xff
def XOR_R(R, mem=False):
if not mem:
value = XOR_8(A.value, R.value)
R.value = value
else:
R.getvalue()
value = XOR_8(A.value, memory[R.value].value)
memory[R.value].value = value
def CMPS(R, mem=False):
if not mem:
Rval = R.value
if Rval & 0b10000000:
Rval = - ((0x100 - Rval) & 0xff)
Aval = A.value
if Aval & 0b10000000:
Aval = - ((0x100 - Aval) & 0xff)
FL.clearFlags()
if Rval == Aval:
FL.setZero()
elif Rval < Aval:
FL.setNeg()
else:
R.getvalue()
Rval = memory[R.value].value
if Rval & 0b10000000:
Rval = - ((0x100 - Rval) & 0xff)
Aval = A.value
if Aval & 0b10000000:
Aval = - ((0x100 - Aval) & 0xff)
FL.clearFlags()
if Rval == Aval:
FL.setZero()
elif Rval < Aval:
FL.setNeg()
def JUMP():
PC.inc()
low = memory[PC.value].value
PC.inc()
high = memory[PC.value].value
global jumped
jumped = True
PC.value = (high << 8) + low
print("JUMP:",hex((high << 8) + low))
def REL_JUMP():
PC.inc()
value = memory[PC.value].value
if value & 0b10000000:
value = - ((0x100 - value) & 0xff)
# ACCORDING TO DOCUMENTATION RELATIVE JUMPS USE THE +2 PC INC
PC.value += value
screen_update()
last_update = time.time()
while not halt:
b_up = memory[PC.value].readUpper()
b_down = memory[PC.value].readLower()
b_val = memory[PC.value].value
jumped = False
if time.time() > last_update + interval:
screen_update()
last_update = time.time()
# Handle pygame events
for event in pg.event.get():
# print("EVENT:",event.type)
# input()
pass
if debug:
pass#input()
if debug or False:
print(hex(PC.value), hex(b_val))
# if b_val in [0x86, 0x96, 0xA6, 0xB6, 0xC6, 0xD6, 0xE6, 0xF6]:
# print("CMP R")
# input()
# if b_val == 0xF7:
# print("CMPI")
# input()
# HCF (HALT)
if b_val == 0x6C:
halt = True
# LDI R, xx
if b_val == 0x20:
LDI(B)
elif b_val == 0x30:
LDI(C)
elif b_val == 0x40:
LDI(D)
elif b_val == 0x50:
LDI(E)
elif b_val == 0x60:
LDI(H)
elif b_val == 0x70:
LDI(L)
elif b_val == 0x80:
LDI(HL, mem=True)
elif b_val == 0x90:
LDI(A)
# LDX RR, xxyy
elif b_val == 0x21:
LDX(BC)
elif b_val == 0x31:
LDX(DE)
elif b_val == 0x41:
LDX(HL)
elif b_val == 0x22:
LDX(SP)
# PUSH R
elif b_val == 0x81:
PUSH_R(B)
elif b_val == 0x91:
PUSH_R(C)
elif b_val == 0xA1:
PUSH_R(D)
elif b_val == 0xB1:
PUSH_R(E)
elif b_val == 0xC1:
PUSH_R(H)
elif b_val == 0xD1:
PUSH_R(L)
elif b_val == 0xC0:
PUSH_R(HL, mem=True)
elif b_val == 0xD0:
PUSH_R(A)
# PUSH RR
elif b_val == 0x51:
PUSH_RR(BC)
elif b_val == 0x61:
PUSH_RR(DE)
elif b_val == 0x71:
PUSH_RR(HL)
# POP R
elif b_val == 0x82:
POP_R(B)
elif b_val == 0x92:
POP_R(C)
elif b_val == 0xA2:
POP_R(D)
elif b_val == 0xB2:
POP_R(E)
elif b_val == 0xC2:
POP_R(H)
elif b_val == 0xD2:
POP_R(L)
elif b_val == 0xC3:
POP_R(HL, mem=True)
elif b_val == 0xD3:
POP_R(A)
# POP RR
elif b_val == 0x52:
POP_RR(BC)
elif b_val == 0x62:
POP_RR(DE)
elif b_val == 0x72:
POP_RR(HL)
# MOV R1, R2
elif b_val in MOVB_OPCODES:
MOV(B, MOVB_OPCODES.index(b_val))
elif b_val in MOVC_OPCODES:
MOV(C, MOVC_OPCODES.index(b_val))
elif b_val in MOVD_OPCODES:
MOV(D, MOVD_OPCODES.index(b_val))
elif b_val in MOVE_OPCODES:
MOV(E, MOVE_OPCODES.index(b_val))
elif b_val in MOVH_OPCODES:
MOV(H, MOVH_OPCODES.index(b_val))
elif b_val in MOVL_OPCODES:
MOV(L, MOVL_OPCODES.index(b_val))
elif b_val in MOVMHL_OPCODES:
MOV(HL, MOVMHL_OPCODES.index(b_val), mem=True)
elif b_val in MOVA_OPCODES:
MOV(A, MOVA_OPCODES.index(b_val))
# MOV RR1, RR2
elif b_val == 0xED:
MOV_RR(HL, BC)
elif b_val == 0xFD:
MOV_RR(HL, DE)
# CLRFLAG
elif b_val == 0x08:
FL.clearFlags()
# SETFLAG f, x
elif b_val == 0x18:
FL.setZero()
elif b_val == 0x28:
FL.clearZero()
elif b_val == 0x38:
FL.setNeg()
elif b_val == 0x48:
FL.clearNeg()
elif b_val == 0x58:
FL.setHalf()
elif b_val == 0x68:
FL.clearHalf()
elif b_val == 0x78:
FL.setCarry()
elif b_val == 0x88:
FL.clearCarry()
# ADD R
elif b_val == 0x04:
ADD_R(B)
elif b_val == 0x14:
ADD_R(C)
elif b_val == 0x24:
ADD_R(D)
elif b_val == 0x34:
ADD_R(E)
elif b_val == 0x44:
ADD_R(H)
elif b_val == 0x54:
ADD_R(L)
elif b_val == 0x64:
ADD_R(HL, mem=True)
elif b_val == 0x74:
ADD_R(A)
# ADDI xx
elif b_val == 0xA7:
PC.inc()
value = ADD_8(A.value, memory[PC.value].value)
A.value = value
# ADDX RR
elif b_val == 0x83:
ADDX_RR(BC)
elif b_val == 0x93:
ADDX_RR(DE)
elif b_val == 0xA3:
ADDX_RR(HL)
# SUB R | CMP R
elif b_val == 0x84 or b_val == 0x86:
SUB_R(B, b_val == 0x86)
elif b_val == 0x94 or b_val == 0x96:
SUB_R(C, b_val == 0x96)
elif b_val == 0xA4 or b_val == 0xA6:
SUB_R(D, b_val == 0xA6)
elif b_val == 0xB4 or b_val == 0xB6:
SUB_R(E, b_val == 0xB6)
elif b_val == 0xC4 or b_val == 0xC6:
SUB_R(H, b_val == 0xC6)
elif b_val == 0xD4 or b_val == 0xD6:
SUB_R(L, b_val == 0xD6)
elif b_val == 0xE4 or b_val == 0xE6:
SUB_R(HL, b_val == 0xE6, mem=True)
elif b_val == 0xF4 or b_val == 0xF6:
SUB_R(A, b_val == 0xF6)
# SUBI xx | CMPI xx
elif b_val == 0xB7 or b_val == 0xF7:
PC.inc()
value = SUB_8(A.value, memory[PC.value].value)
if b_val == 0xB7: # SUBI xx
A.value = value
# INC R
elif b_val == 0x03:
INC(B)
elif b_val == 0x13:
INC(C)
elif b_val == 0x23:
INC(D)
elif b_val == 0x33:
INC(E)
elif b_val == 0x43:
INC(H)
elif b_val == 0x53:
INC(L)
elif b_val == 0x63:
INC(HL, mem=True)
elif b_val == 0x73:
INC(A)
# INX RR
elif b_val == 0xA8:
BC.getvalue()
BC.value += 1
BC.value & 0xffff
BC.setvalue()
elif b_val == 0xB8:
DE.getvalue()
DE.value += 1
DE.value & 0xffff
DE.setvalue()
elif b_val == 0xC8:
HL.getvalue()
HL.value += 1
HL.value & 0xffff
HL.setvalue()
# DEC R
elif b_val == 0x07:
DEC(B)
elif b_val == 0x17:
DEC(C)
elif b_val == 0x27:
DEC(D)
elif b_val == 0x37:
DEC(E)
elif b_val == 0x47:
DEC(H)
elif b_val == 0x57:
DEC(L)
elif b_val == 0x67:
DEC(HL, mem=True)
elif b_val == 0x77:
DEC(A)
# AND R
elif b_val == 0x05:
AND_R(B)
elif b_val == 0x15:
AND_R(C)
elif b_val == 0x25:
AND_R(D)
elif b_val == 0x35:
AND_R(E)
elif b_val == 0x45:
AND_R(H)
elif b_val == 0x55:
AND_R(L)
elif b_val == 0x65:
AND_R(HL, mem=True)
elif b_val == 0x75:
AND_R(A)
# ANDI xx
elif b_val == 0xC7:
PC.inc()
value = AND_8(memory[PC.value].value, A.value)
A.value = value
# OR R
elif b_val == 0x85:
OR_R(B)
elif b_val == 0x95:
OR_R(C)
elif b_val == 0xA5:
OR_R(D)
elif b_val == 0xB5:
OR_R(E)
elif b_val == 0xC5:
OR_R(H)
elif b_val == 0xD5:
OR_R(L)
elif b_val == 0xE5:
OR_R(HL, mem=True)
elif b_val == 0xF5:
OR_R(A)
# ORI xx
elif b_val == 0xD7:
PC.inc()
value = OR_8(memory[PC.value].value, A.value)
A.value = value
# XOR R
elif b_val == 0x06:
XOR_R(B)
elif b_val == 0x16:
XOR_R(C)
elif b_val == 0x26:
XOR_R(D)
elif b_val == 0x36:
XOR_R(E)
elif b_val == 0x46:
XOR_R(H)
elif b_val == 0x56:
XOR_R(L)
elif b_val == 0x66:
XOR_R(HL, mem=True)
elif b_val == 0x76:
XOR_R(A)
# XORI xx
elif b_val == 0xE7:
PC.inc()
value = XOR_8(memory[PC.value].value, A.value)
A.value = value
# CMPS R
elif b_val == 0x0D:
CMPS(B)
elif b_val == 0x1D:
CMPS(C)
elif b_val == 0x2D:
CMPS(D)
elif b_val == 0x3D:
CMPS(E)
elif b_val == 0x4D:
CMPS(H)
elif b_val == 0x5D:
CMPS(L)
elif b_val == 0x6D:
CMPS(HL, mem=True)
elif b_val == 0x7D:
CMPS(A)
# SIN
elif b_val == 0xE0:
A.value = ord(sys.stdin.buffer.read(1)) & 0xff
pass
# SOUT
elif b_val == 0xE1:
print(chr(A.value),end="",flush=True)
if A.value == 7:
print("[BELL]")
pass
# CLRSCR
elif b_val == 0xF0:
screen_clear()
# DRAW
elif b_val == 0xF1:
x = C.value
if x & 0b10000000:
x = - ((0x100 - x) & 0xff)
y = B.value
if y & 0b10000000:
y = - ((0x100 - y) & 0xff)
screen_draw_line(x, y, A.value & 0xff)
# JMP xxyy
elif b_val == 0x0F:
JUMP()
# JMPcc xxyy
elif b_val == 0x1F:
if FL.z:
JUMP()
else:
PC.inc(2)
elif b_val == 0x2F:
if not FL.z:
JUMP()
else:
PC.inc(2)
elif b_val == 0x3F:
if FL.n:
JUMP()
else:
PC.inc(2)
elif b_val == 0x4F:
if not FL.n:
JUMP()
else:
PC.inc(2)
elif b_val == 0x5F:
if FL.h:
JUMP()
elif b_val == 0x6F:
if not FL.h:
JUMP()
else:
PC.inc(2)
elif b_val == 0x7F:
if FL.c:
JUMP()
else:
PC.inc(2)
elif b_val == 0x8F:
if not FL.c:
JUMP()
else:
PC.inc(2)
# JMP xx
elif b_val == 0x9F:
REL_JUMP()
# JMPcc xx
elif b_val == 0xAF:
if FL.z:
REL_JUMP()
else:
PC.inc()
elif b_val == 0xBF:
if not FL.z:
REL_JUMP()
else:
PC.inc()
elif b_val == 0xCF:
if FL.n:
REL_JUMP()
else:
PC.inc()
elif b_val == 0xDF:
if not FL.n:
REL_JUMP()
else:
PC.inc()
elif b_val == 0xEF:
if FL.h:
REL_JUMP()
else:
PC.inc()
elif b_val == 0xFF:
if not FL.h:
REL_JUMP()
else:
PC.inc()
elif b_val == 0xEE:
if FL.c:
REL_JUMP()
else:
PC.inc()
elif b_val == 0xFE:
if not FL.c:
REL_JUMP()
else:
PC.inc()
# CALL xxyy
elif b_val == 0x1E:
memory[SP.value].value = (PC.value+3) & 0xff
memory[SP.value + 1].value = (PC.value+3) >> 8
SP.dec()
JUMP()
# RET
elif b_val == 0x0E:
SP.inc()
PC.value = memory[SP.value].value + (memory[SP.value + 1].value << 8)
jumped = True
# NOP
elif b_val == 0x00:
pass
else:
pass
print("UNKNOWN:",hex(b_val),"@",hex(PC.value))
if debug:
BC.getvalue()
DE.getvalue()
HL.getvalue()
print("A:",hex(A.value),"B:",hex(B.value),"C:",hex(C.value),"D:",hex(D.value),"E:",hex(E.value),"H:",hex(H.value),
"L:",hex(L.value),"BC:",hex(BC.value),"DE:",hex(DE.value),"HL:",hex(HL.value),"PC:",hex(PC.value),"SP:",hex(SP.value))
if not jumped:
PC.inc()
else:
pass
#print("JUMPED")
| 23.997727 | 134 | 0.516479 | 1,815 | 0.085946 | 0 | 0 | 0 | 0 | 0 | 0 | 1,138 | 0.053888 |
b501d27bf56c98bbd366b6f8bf399a623a2e87f1 | 1,423 | py | Python | Burp/lib/data.py | wisdark/HUNT | 9b128913f62e01ed9e82db1e1eee79e4b88385fb | [
"Apache-2.0"
]
| 1,628 | 2017-08-07T17:27:17.000Z | 2022-03-28T18:43:34.000Z | Burp/lib/data.py | wisdark/HUNT | 9b128913f62e01ed9e82db1e1eee79e4b88385fb | [
"Apache-2.0"
]
| 48 | 2017-08-08T11:56:48.000Z | 2021-04-08T18:01:31.000Z | Burp/lib/data.py | wisdark/HUNT | 9b128913f62e01ed9e82db1e1eee79e4b88385fb | [
"Apache-2.0"
]
| 328 | 2017-08-08T05:12:48.000Z | 2022-03-31T15:15:20.000Z | from __future__ import print_function
import json
import os
class Data():
shared_state = {}
def __init__(self):
self.__dict__ = self.shared_state
self.set_checklist(None)
self.set_issues()
def set_checklist(self, file_name):
is_empty = file_name is None
if is_empty:
file_name = os.getcwd() + os.sep + "conf" + os.sep + "checklist.json"
try:
with open(file_name) as data_file:
data = json.load(data_file)
self.checklist = data["checklist"]
except Exception as e:
print(e)
def get_checklist(self):
return self.checklist
def set_issues(self):
file_name = os.getcwd() + os.sep + "conf" + os.sep + "issues.json"
try:
with open(file_name) as data_file:
self.issues = json.load(data_file)
except Exception as e:
print(e)
def get_issues(self):
return self.issues
def set_bugs(self, functionality_name, test_name, request, response):
bug = {
"request": request,
"response": response
}
self.checklist["Functionality"][functionality_name]["tests"][test_name]["bugs"].append(bug)
def set_notes(self, functionality_name, test_name, notes):
self.checklist["Functionality"][functionality_name]["tests"][test_name]["notes"] = notes
| 26.849057 | 99 | 0.595924 | 1,359 | 0.955025 | 0 | 0 | 0 | 0 | 0 | 0 | 128 | 0.089951 |
b502452c7fbc49bc12b35532d369cf96f4db27dd | 851 | py | Python | twisted/test/stdio_test_write.py | engdan77/otis_app | 6c6ea8da47d580e91a794663338572cf2a7368b6 | [
"MIT"
]
| null | null | null | twisted/test/stdio_test_write.py | engdan77/otis_app | 6c6ea8da47d580e91a794663338572cf2a7368b6 | [
"MIT"
]
| 1 | 2022-03-04T17:40:22.000Z | 2022-03-04T17:40:22.000Z | twisted/test/stdio_test_write.py | cpdean/twisted | e502df17e0704de42dc38b6e171ebbc7daf52c8a | [
"Unlicense",
"MIT"
]
| null | null | null | # -*- test-case-name: twisted.test.test_stdio.StandardInputOutputTests.test_write -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Main program for the child process run by
L{twisted.test.test_stdio.StandardInputOutputTests.test_write} to test that
ITransport.write() works for process transports.
"""
__import__('_preamble')
import sys
from twisted.internet import stdio, protocol
from twisted.python import reflect
class WriteChild(protocol.Protocol):
def connectionMade(self):
for ch in 'ok!':
self.transport.write(ch)
self.transport.loseConnection()
def connectionLost(self, reason):
reactor.stop()
if __name__ == '__main__':
reflect.namedAny(sys.argv[1]).install()
from twisted.internet import reactor
stdio.StandardIO(WriteChild())
reactor.run()
| 25.787879 | 85 | 0.72738 | 231 | 0.271445 | 0 | 0 | 0 | 0 | 0 | 0 | 355 | 0.417156 |
b504bcac5479aa4c47a0573d3779bba648bf5ec4 | 4,133 | py | Python | pyweb/overload.py | DrDaveD/wlcg-wpad | 9947173b770c385376e02763b1462911b0af6d7c | [
"BSD-3-Clause"
]
| null | null | null | pyweb/overload.py | DrDaveD/wlcg-wpad | 9947173b770c385376e02763b1462911b0af6d7c | [
"BSD-3-Clause"
]
| 2 | 2016-06-24T17:24:19.000Z | 2020-10-23T12:33:03.000Z | pyweb/overload.py | DrDaveD/wlcg-wpad | 9947173b770c385376e02763b1462911b0af6d7c | [
"BSD-3-Clause"
]
| 1 | 2020-09-07T11:21:53.000Z | 2020-09-07T11:21:53.000Z | # calculate the load on each org
import threading
# cannot use from wpad_dispatch here, have to import whole module,
# because of circular dependency
import wpad_dispatch
from wpad_utils import *
from wlcg_wpad import getiporg
orgcleanminutes = 5
orgcleantime = 0
# Minute records keep track of the number of requests in each minute
class MinuteRecord:
def __init__(self, now, older):
self.minute = now # minute of the record
self.requests = 0 # number of requests this minute
self.next = None # next MinuteRecord
if older != None:
older.next = self # point older record to this one
class OrgData:
def __init__(self):
self.lock = threading.Lock() # lock for this org
self.overloadminute = 0 # minute last overload triggered
self.total = 0 # total number of requests tracked
self.newest = None # newest MinuteRecord
self.oldest = None # oldest MinuteRecord
orgdata = {}
# lock for adding, deleting, and looking up an org
orgdatalock = threading.Lock()
# return triple of org name, minutes remaining in an overload,
# and percent of limit in the last minutes being tracked
def orgload(remoteip, limit, minutes, persist, now):
global orgcleantime
org = getiporg(remoteip)
if org == None:
return None, 0, 0
# See if this org is excluded
# wlcgwpadconf is occasionally replaced, so use a local variable for it
conf = wpad_dispatch.wlcgwpadconf
if 'overload' in conf and 'excludes' in conf['overload'] and \
org in conf['overload']['excludes']:
return None, 0, 0
now = now / 60 # this function deals only with minutes
orgdatalock.acquire()
if orgcleantime <= now - orgcleanminutes:
# clean out orgs that have had no updates in minutes or overload in
# persist minutes, except current org
orgcleantime = now
numorgs = 0
delorgs = 0
for oldorg in list(orgdata):
numorgs += 1
if org == oldorg:
continue
data = orgdata[oldorg]
if persist < now - data.overloadminute and \
data.newest != None and data.newest.minute < now - minutes:
# Note that there is a race condition where this could
# delete an org from orgdata at the same time as another
# request comes in to another thread to prolong it, but
# that would only result in the loss of one count, it would
# not be fatal. The only way data.newest can equal None
# is if the organization is in the process of being created
# by another thread, leave that one alone.
del orgdata[oldorg]
delorgs += 1
if delorgs > 0:
orgdatalock.release()
logmsg('-', '-', '', 'cleaned load data from ' + str(delorgs) + ' orgs, ' + str(numorgs-delorgs) + ' still active')
orgdatalock.acquire()
# get the data for this org
if org in orgdata:
data = orgdata[org]
else:
data = OrgData()
orgdata[org] = data
orgdatalock.release()
data.lock.acquire()
# remove any minute records that are too old
record = data.oldest
while record != None and record.minute <= now - minutes:
data.total -= record.requests
record = record.next
data.oldest = record
record = data.newest
if record == None or record.minute != now:
# add new minute record
record = MinuteRecord( now, record )
data.newest = record
if data.oldest == None:
data.oldest = record
# add one to this minute and the total
record.requests += 1
data.total = data.total + 1
percent = int(data.total * 100.0 / limit)
if percent > 100:
data.overloadminute = now
overloadminute = data.overloadminute
data.lock.release()
return ( org, persist - (now - overloadminute), percent )
| 37.917431 | 127 | 0.605129 | 720 | 0.174208 | 0 | 0 | 0 | 0 | 0 | 0 | 1,474 | 0.356642 |
b50625d78235e9ce3f4c2da0638ea06afe91b30f | 865 | py | Python | test/bulk_insert.py | jfcarter2358/Ceres | cdfd92f9b1a71345a0b634722bc6c2be7f78cd91 | [
"MIT"
]
| null | null | null | test/bulk_insert.py | jfcarter2358/Ceres | cdfd92f9b1a71345a0b634722bc6c2be7f78cd91 | [
"MIT"
]
| null | null | null | test/bulk_insert.py | jfcarter2358/Ceres | cdfd92f9b1a71345a0b634722bc6c2be7f78cd91 | [
"MIT"
]
| null | null | null | import requests
from datetime import datetime
import time
headers = {
'Content-Type': 'application/json'
}
with open('test/logs.txt', 'r') as f:
logs = f.read().split('\n')
levels = ['INFO', 'WARN', 'DEBUG', 'ERROR', 'TRACE']
messages = []
for i in range(0, len(logs)):
if i % 100 == 0:
print('{} of {}'.format(i, len(logs)))
data = {
'messages': messages
}
requests.post('http://localhost:9090/insert', json=data, headers=headers)
messages = []
now = datetime.now()
message = {
'year': now.year,
'month': now.month,
'day': now.day,
'hour': now.hour,
'minute': now.minute,
'second': now.second,
'service': 'foobar',
'message': logs[i],
'level': levels[i % 5]
}
time.sleep(0.001)
messages.append(message) | 22.763158 | 81 | 0.528324 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 210 | 0.242775 |
b50682e6cffc6dbeabe21d64aa2a3d1319871a0b | 1,282 | py | Python | tests/test_jsons.py | jrinks/greater-chicago-food-despository | 67255aec807a1bb026d8ae2172b2bf353bb2cc46 | [
"MIT"
]
| null | null | null | tests/test_jsons.py | jrinks/greater-chicago-food-despository | 67255aec807a1bb026d8ae2172b2bf353bb2cc46 | [
"MIT"
]
| null | null | null | tests/test_jsons.py | jrinks/greater-chicago-food-despository | 67255aec807a1bb026d8ae2172b2bf353bb2cc46 | [
"MIT"
]
| null | null | null | import os
import sys
sys.path.append(os.path.abspath(''))
# Raises linting error because not at top of file
# Not sure how to resolve this with the pathing
from src import uploadJson # noqa: E402
import src.config as config # noqa: E402
# Taking out of commission until new geojson format requested developed
# def test_main():
# from src import main
# import json
# import geojson
# #from src import main
# main_dict = main.main(['county'])
# for v in main_dict.values():
# v_str = json.dumps(v)
# v_geojson = geojson.loads(v_str)
# assert v_geojson.is_valid == True
def test_requirements():
import pkg_resources
requirements_path = "requirements.txt"
with open(requirements_path) as f:
requirements = pkg_resources.parse_requirements(f)
for r in requirements:
r = str(r)
pkg_resources.require(r)
# breakpoint()
def test_auth():
db = uploadJson.auth_firebase()
cook = db.reference('/county_data/17031').get()
assert cook['NAME'] == 'Cook County, Illinois'
def test_secrets():
assert type(config.CENSUS_KEY) == str
assert type(config.FIREBASE_SERVICE_KEY) == str
assert config.CENSUS_KEY != ''
assert config.FIREBASE_SERVICE_KEY != ''
| 28.488889 | 71 | 0.670047 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 572 | 0.446178 |
b5068529d2a14e7fea802599be5ae03e8acda2e9 | 12,939 | py | Python | tetris.py | AkshayRaul/Python-Scripts | 52f1f9d329634830507fa7eeb885cb8d9c8afd88 | [
"MIT"
]
| null | null | null | tetris.py | AkshayRaul/Python-Scripts | 52f1f9d329634830507fa7eeb885cb8d9c8afd88 | [
"MIT"
]
| null | null | null | tetris.py | AkshayRaul/Python-Scripts | 52f1f9d329634830507fa7eeb885cb8d9c8afd88 | [
"MIT"
]
| null | null | null | #!/usr/bin/env python
"""
Code is personal property of the owner.Rights reserved by the user only.
Codes on https://github.com/kt97679/tetris
Attempt to spread the open source knowledge :)
"""
import sys
import select
import tty
import termios
import random
import os
import time
import fcntl
if (sys.hexversion >> 16) >= 0x202:
FCNTL = fcntl
else:
import FCNTL
import contextlib
PLAYFIELD_W = 10
PLAYFIELD_H = 20
PLAYFIELD_X = 30
PLAYFIELD_Y = 1
BORDER_COLOR = 'yellow'
HELP_X = 58
HELP_Y = 1
HELP_COLOR = 'cyan'
SCORE_X = 1
SCORE_Y = 2
SCORE_COLOR = 'green'
NEXT_X = 14
NEXT_Y = 11
GAMEOVER_X = 1
GAMEOVER_Y = PLAYFIELD_H + 3
INITIAL_MOVE_DOWN_DELAY = 1.0
DELAY_FACTOR = 0.8
LEVEL_UP = 20
NEXT_EMPTY_CELL = " "
PLAYFIELD_EMPTY_CELL = " ."
FILLED_CELL = "[]"
class TetrisScreen:
def __init__(self):
self.s = ''
self.no_color = False
self.color = {
'red': 1,
'green': 2,
'yellow': 3,
'blue': 4,
'fuchsia': 5,
'cyan': 6,
'white': 7
}
def xyprint(self, x, y, s):
self.s += "\x1b[{0};{1}H{2}".format(y, x, s)
def flush(self):
sys.stdout.write(self.s)
sys.stdout.flush()
self.s = ''
def puts(self, s):
self.s += s
def clear_screen(self):
self.s += "\x1b[2J"
def show_cursor(self):
self.s += "\x1b[?25h"
def hide_cursor(self):
self.s += "\x1b[?25l"
def set_fg(self, c):
if self.no_color:
return
self.s += "\x1b[3{0}m".format(self.color.get(c, 7))
def set_bg(self, c):
if self.no_color:
return
self.s += "\x1b[4{0}m".format(self.color.get(c, 7))
def reset_colors(self):
self.s += "\x1b[0m"
def set_bold(self):
self.s += "\x1b[1m"
def get_random_color(self):
k = self.color.keys()
random.shuffle(k)
return k[0]
def toggle_color(self):
self.no_color ^= True
class TetrisScreenItem(object):
def __init__(self, screen):
self.visible = True
self.screen = screen
def show(self):
if self.visible:
self.draw(True)
def hide(self):
if self.visible:
self.draw(False)
def toggle(self):
self.visible ^= True
self.draw(self.visible)
class TetrisHelp(TetrisScreenItem):
def __init__(self, screen):
super(TetrisHelp, self).__init__(screen)
self.color = HELP_COLOR
self.text = [
" Use cursor keys",
" or",
" s: rotate",
"a: left, d: right",
" space: drop",
" q: quit",
" c: toggle color",
"n: toggle show next",
"h: toggle this help"
]
def draw(self, visible):
self.screen.set_bold()
self.screen.set_fg(self.color)
i = 0
for s in self.text:
if not visible:
s = ' ' * len(s)
self.screen.xyprint(HELP_X, HELP_Y + i, s)
i += 1
self.screen.reset_colors()
class TetrisPlayField:
def __init__(self, screen):
self.screen = screen
self.cells = [[None] * PLAYFIELD_W for i in range(0, PLAYFIELD_H)]
def show(self):
y = 0
for row in self.cells:
self.screen.xyprint(PLAYFIELD_X, PLAYFIELD_Y + y, "")
y += 1
for cell in row:
if cell == None:
self.screen.puts(PLAYFIELD_EMPTY_CELL)
else:
self.screen.set_fg(cell)
self.screen.set_bg(cell)
self.screen.puts(FILLED_CELL)
self.screen.reset_colors()
def flatten_piece(self, piece):
for cell in piece.get_cells():
self.cells[cell[1]][cell[0]] = piece.color
def process_complete_lines(self):
cells = [row for row in self.cells if None in row]
complete_lines = PLAYFIELD_H - len(cells)
if complete_lines > 0:
self.cells = [[None] * PLAYFIELD_W for i in range(0, complete_lines)] + cells
return complete_lines
def draw_border(self):
self.screen.set_bold()
self.screen.set_fg(BORDER_COLOR)
for y in range(0, PLAYFIELD_H):
# 2 because border is 2 characters thick
self.screen.xyprint(PLAYFIELD_X - 2, PLAYFIELD_Y + y, "<|")
# 2 because each cell on play field is 2 characters wide
self.screen.xyprint(PLAYFIELD_X + PLAYFIELD_W * 2, PLAYFIELD_Y+ y, "|>")
y = 0
for s in ['==', '\/']:
self.screen.xyprint(PLAYFIELD_X, PLAYFIELD_Y + PLAYFIELD_H + y, s * PLAYFIELD_W)
y += 1
self.screen.reset_colors()
def position_ok(self, cells):
return all(
(0 <= x < PLAYFIELD_W) and
(0 <= y < PLAYFIELD_H) and
self.cells[y][x] is None
for x, y in cells
)
class TetrisPiece(TetrisScreenItem):
configurations = [
# 0123
# 4567
# 89ab
# cdef
[0x1256], # square
[0x159d, 0x4567], # line
[0x4512, 0x0459], # s
[0x0156, 0x1548], # z
[0x159a, 0x8456, 0x0159, 0x2654], # l
[0x1598, 0x0456, 0x2159, 0xa654], # inverted l
[0x1456, 0x1596, 0x4569, 0x4159] # t
]
def __init__(self, screen, origin, visible):
super(TetrisPiece, self).__init__(screen)
self.color = screen.get_random_color()
self.data = random.choice(self.configurations)
self.symmetry = len(self.data)
self.position = 0, 0, random.randint(0, self.symmetry - 1)
self.origin = origin
self.visible = visible
self.empty_cell = NEXT_EMPTY_CELL
def get_cells(self, new_position=None):
x, y, z = new_position or self.position
data = self.data[z]
return [[x + ((data >> (i * 4)) & 3), y + ((data >> (i * 4 + 2)) & 3)] for i in range(0, 4)]
def draw(self, visible):
if visible:
self.screen.set_fg(self.color)
self.screen.set_bg(self.color)
s = FILLED_CELL
else:
s = self.empty_cell
for cell in self.get_cells():
self.screen.xyprint(self.origin[0] + cell[0] * 2, self.origin[1] + cell[1], s)
self.screen.reset_colors()
def set_xy(self, x, y):
self.position = x, y, self.position[2]
def new_position(self, dx, dy, dz):
x, y, z = self.position
return x + dx, y + dy, (z + dz) % self.symmetry
class TetrisScore:
def __init__(self, screen, tetris_input_processor):
self.screen = screen
self.tetris_input_processor = tetris_input_processor
self.score = 0
self.level = 1
self.lines_completed = 0
def update(self, complete_lines):
self.lines_completed += complete_lines
self.score += (complete_lines * complete_lines)
if self.score > LEVEL_UP * self.level:
self.level += 1
self.tetris_input_processor.decrease_delay()
self.show()
def show(self):
self.screen.set_bold()
self.screen.set_fg(SCORE_COLOR)
self.screen.xyprint(SCORE_X, SCORE_Y, "Lines completed: {0}".format(self.lines_completed))
self.screen.xyprint(SCORE_X, SCORE_Y + 1, "Level: {0}".format(self.level))
self.screen.xyprint(SCORE_X, SCORE_Y + 2, "Score: {0}".format(self.score))
self.screen.reset_colors()
class TetrisController:
def __init__(self, screen, tetris_input_processor):
self.screen = screen
self.next_piece_visible = True
self.running = True
self.help = TetrisHelp(screen)
self.score = TetrisScore(screen, tetris_input_processor)
self.play_field = TetrisPlayField(screen)
self.get_next_piece()
self.get_current_piece()
self.redraw_screen()
screen.flush()
def get_current_piece(self):
self.next_piece.hide()
self.current_piece = self.next_piece
self.current_piece.set_xy((PLAYFIELD_W - 4) / 2, 0)
if not self.play_field.position_ok(self.current_piece.get_cells()):
self.cmd_quit()
return
self.current_piece.visible = True
self.current_piece.empty_cell = PLAYFIELD_EMPTY_CELL
self.current_piece.origin = (PLAYFIELD_X, PLAYFIELD_Y)
self.current_piece.show()
self.get_next_piece()
def get_next_piece(self):
self.next_piece = TetrisPiece(
self.screen,
(NEXT_X, NEXT_Y),
self.next_piece_visible,
)
self.next_piece.show()
def redraw_screen(self):
self.screen.clear_screen()
self.screen.hide_cursor()
self.play_field.draw_border()
for o in [self.help, self.play_field, self.score, self.next_piece, self.current_piece]:
o.show()
def cmd_quit(self):
self.running = False
self.screen.xyprint(GAMEOVER_X, GAMEOVER_Y, "Game over!")
self.screen.xyprint(GAMEOVER_X, GAMEOVER_Y + 1, "")
self.screen.show_cursor()
def process_fallen_piece(self):
self.play_field.flatten_piece(self.current_piece)
complete_lines = self.play_field.process_complete_lines()
if complete_lines > 0:
self.score.update(complete_lines)
self.play_field.show()
def move(self, dx, dy, dz):
position = self.current_piece.new_position(dx, dy, dz)
if self.play_field.position_ok(self.current_piece.get_cells(position)):
self.current_piece.hide()
self.current_piece.position = position
self.current_piece.show()
return True
return (dy == 0)
def cmd_right(self):
self.move(1, 0, 0)
def cmd_left(self):
self.move(-1, 0, 0)
def cmd_rotate(self):
self.move(0, 0, 1)
def cmd_down(self):
if self.move(0, 1, 0):
return True
self.process_fallen_piece()
self.get_current_piece()
return False
def cmd_drop(self):
while self.cmd_down():
pass
def toggle_help(self):
self.help.toggle()
def toggle_next(self):
self.next_piece_visible ^= True
self.next_piece.toggle()
def toggle_color(self):
self.screen.toggle_color()
self.redraw_screen()
@contextlib.contextmanager
def nonblocking_input():
fd = sys.stdin
try:
flags = fcntl.fcntl(fd, FCNTL.F_GETFL)
flags = flags | os.O_NONBLOCK
fcntl.fcntl(fd, FCNTL.F_SETFL, flags)
yield
finally:
flags = fcntl.fcntl(fd, FCNTL.F_GETFL)
flags = flags & ~os.O_NONBLOCK
fcntl.fcntl(fd, FCNTL.F_SETFL, flags)
@contextlib.contextmanager
def tcattr():
try:
old_settings = termios.tcgetattr(sys.stdin)
yield
finally:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_settings)
class TetrisInputProcessor:
delay = INITIAL_MOVE_DOWN_DELAY
def decrease_delay(self):
self.delay *= DELAY_FACTOR
def run():
input_processor = TetrisInputProcessor()
with nonblocking_input(), tcattr():
# tty.setcbreak(sys.stdin.fileno())
tty.setraw(sys.stdin.fileno())
key = [0, 0, 0]
ts = TetrisScreen()
ts.clear_screen()
tc = TetrisController(ts, input_processor)
commands = {
"\x03": tc.cmd_quit,
"q": tc.cmd_quit,
"C": tc.cmd_right,
"d": tc.cmd_right,
"D": tc.cmd_left,
"a": tc.cmd_left,
"A": tc.cmd_rotate,
"s": tc.cmd_rotate,
" ": tc.cmd_drop,
"h": tc.toggle_help,
"n": tc.toggle_next,
"c": tc.toggle_color
}
last_move_down_time = time.time()
while tc.running:
cmd = None
now = time.time()
select_timeout = input_processor.delay - (now - last_move_down_time)
if select_timeout < 0:
tc.cmd_down()
ts.flush()
last_move_down_time = now
select_timeout = input_processor.delay
if select.select([sys.stdin], [], [], input_processor.delay)[0]:
s = sys.stdin.read(16)
for c in s:
key[2] = key[1]
key[1] = key[0]
key[0] = c
if key[2] == '\x1b' and key[1] == '[': # x1b is ESC
cmd = commands.get(key[0], None)
else:
cmd = commands.get(key[0].lower(), None)
if cmd:
cmd()
ts.flush()
if __name__ == '__main__':
run()
| 28.626106 | 102 | 0.558312 | 9,856 | 0.761728 | 513 | 0.039648 | 567 | 0.043821 | 0 | 0 | 893 | 0.069016 |
b508d14c18fba6ec3f26029dc4d010c75e52591f | 11,471 | py | Python | cucm.py | PresidioCode/cucm-exporter | 8771042afcb2f007477f7b47c1516092c621da2e | [
"MIT"
]
| 10 | 2020-10-06T01:12:41.000Z | 2022-02-14T03:03:25.000Z | cucm.py | bradh11/cucm-exporter | 8771042afcb2f007477f7b47c1516092c621da2e | [
"MIT"
]
| 1 | 2020-10-14T13:47:36.000Z | 2020-10-17T03:06:25.000Z | cucm.py | PresidioCode/cucm-exporter | 8771042afcb2f007477f7b47c1516092c621da2e | [
"MIT"
]
| 4 | 2020-10-13T16:19:46.000Z | 2022-03-08T19:29:34.000Z | import time
import json
def export_users(ucm_axl):
"""
retrieve users from ucm
"""
try:
user_list = ucm_axl.get_users(
tagfilter={
"userid": "",
"firstName": "",
"lastName": "",
"directoryUri": "",
"telephoneNumber": "",
"enableCti": "",
"mailid": "",
"primaryExtension": {"pattern": "", "routePartitionName": ""},
"enableMobility": "",
"homeCluster": "",
"associatedPc": "",
"enableEmcc": "",
"imAndPresenceEnable": "",
"serviceProfile": {"_value_1": ""},
"status": "",
"userLocale": "",
"title": "",
"subscribeCallingSearchSpaceName": "",
}
)
all_users = []
for user in user_list:
# print(user)
user_details = {}
user_details['userid'] = user.userid
user_details['firstName'] = user.firstName
user_details['lastName'] = user.lastName
user_details['telephoneNumber'] = user.telephoneNumber
user_details['primaryExtension'] = user.primaryExtension.pattern
user_details['directoryUri'] = user.directoryUri
user_details['mailid'] = user.mailid
all_users.append(user_details)
print(
f"{user_details.get('userid')} -- {user_details.get('firstName')} {user_details.get('lastName')}: {user_details.get('primaryExtension')}"
)
print("-" * 35)
print(f"number of users: {len(all_users)}")
# print(user_list)
# print(json.dumps(all_users, indent=2))
return all_users
except Exception as e:
print(e)
return []
def export_phones(ucm_axl):
"""
Export Phones
"""
try:
phone_list = ucm_axl.get_phones(
tagfilter={
"name": "",
"description": "",
"product": "",
"model": "",
"class": "",
"protocol": "",
"protocolSide": "",
"callingSearchSpaceName": "",
"devicePoolName": "",
"commonDeviceConfigName": "",
"commonPhoneConfigName": "",
"networkLocation": "",
"locationName": "",
"mediaResourceListName": "",
"networkHoldMohAudioSourceId": "",
"userHoldMohAudioSourceId": "",
"loadInformation": "",
"securityProfileName": "",
"sipProfileName": "",
"cgpnTransformationCssName": "",
"useDevicePoolCgpnTransformCss": "",
"numberOfButtons": "",
"phoneTemplateName": "",
"primaryPhoneName": "",
"loginUserId": "",
"defaultProfileName": "",
"enableExtensionMobility": "",
"currentProfileName": "",
"loginTime": "",
"loginDuration": "",
# "currentConfig": "",
"ownerUserName": "",
"subscribeCallingSearchSpaceName": "",
"rerouteCallingSearchSpaceName": "",
"allowCtiControlFlag": "",
"alwaysUsePrimeLine": "",
"alwaysUsePrimeLineForVoiceMessage": "",
}
)
all_phones = []
for phone in phone_list:
# print(phone)
phone_details = {
"name": phone.name,
"description": phone.description,
"product": phone.product,
"model": phone.model,
"protocol": phone.protocol,
"protocolSide": phone.protocolSide,
"callingSearchSpaceName": phone.callingSearchSpaceName._value_1,
"devicePoolName": phone.defaultProfileName._value_1,
"commonDeviceConfigName": phone.commonDeviceConfigName._value_1,
"commonPhoneConfigName": phone.commonPhoneConfigName._value_1,
"networkLocation": phone.networkLocation,
"locationName": phone.locationName._value_1,
"mediaResourceListName": phone.mediaResourceListName._value_1,
"networkHoldMohAudioSourceId": phone.networkHoldMohAudioSourceId,
"userHoldMohAudioSourceId": phone.userHoldMohAudioSourceId,
"loadInformation": phone.loadInformation,
"securityProfileName": phone.securityProfileName._value_1,
"sipProfileName": phone.sipProfileName._value_1,
"cgpnTransformationCssName": phone.cgpnTransformationCssName._value_1,
"useDevicePoolCgpnTransformCss": phone.useDevicePoolCgpnTransformCss,
"numberOfButtons": phone.numberOfButtons,
"phoneTemplateName": phone.phoneTemplateName._value_1,
"primaryPhoneName": phone.primaryPhoneName._value_1,
"loginUserId": phone.loginUserId,
"defaultProfileName": phone.defaultProfileName._value_1,
"enableExtensionMobility": phone.enableExtensionMobility,
"currentProfileName": phone.currentProfileName._value_1,
"loginTime": phone.loginTime,
"loginDuration": phone.loginDuration,
# "currentConfig": phone.currentConfig,
"ownerUserName": phone.ownerUserName._value_1,
"subscribeCallingSearchSpaceName": phone.subscribeCallingSearchSpaceName._value_1,
"rerouteCallingSearchSpaceName": phone.rerouteCallingSearchSpaceName._value_1,
"allowCtiControlFlag": phone.allowCtiControlFlag,
"alwaysUsePrimeLine": phone.alwaysUsePrimeLine,
"alwaysUsePrimeLineForVoiceMessage": phone.alwaysUsePrimeLineForVoiceMessage,
}
line_details = ucm_axl.get_phone(name=phone.name)
# print(line_details.lines.line)
try:
for line in line_details.lines.line:
# print(line)
phone_details[f"line_{line.index}_dirn"] = line.dirn.pattern
phone_details[f"line_{line.index}_routePartitionName"] = line.dirn.routePartitionName._value_1
phone_details[f"line_{line.index}_display"] = line.display
phone_details[f"line_{line.index}_e164Mask"] = line.e164Mask
except Exception as e:
print(e)
all_phones.append(phone_details)
print(
f"exporting: {phone.name}: {phone.model} - {phone.description}")
print("-" * 35)
print(f"number of phones: {len(all_phones)}")
return all_phones
except Exception as e:
print(e)
return []
def export_siptrunks(ucm_axl):
try:
all_sip_trunks = []
sip_trunks = ucm_axl.get_sip_trunks(
tagfilter={
"name": "",
"description": "",
"devicePoolName": "",
"callingSearchSpaceName": "",
"sipProfileName": "",
"mtpRequired": "",
"sigDigits": "",
"destAddrIsSrv": "",
}
)
for siptrunk in sip_trunks:
trunk = {}
trunk["name"] = siptrunk.name
trunk["description"] = siptrunk.description
trunk["devicePoolName"] = siptrunk.devicePoolName._value_1
trunk["sipProfileName"] = siptrunk.sipProfileName._value_1
trunk["callingSearchSpace"] = siptrunk.callingSearchSpaceName._value_1
trunk["mtpRequired"] = siptrunk.mtpRequired
trunk["sigDigits"] = siptrunk.sigDigits._value_1
# TODO: get_siptrunk details for destinations
trunk_details = ucm_axl.get_sip_trunk(name=siptrunk.name)
destinations = trunk_details['return']['sipTrunk']['destinations']['destination']
# print(destinations)
for count, destination in enumerate(destinations):
trunk[f'addressIpv4_{count}'] = destination.addressIpv4
trunk[f'port_{count}'] = destination.port
trunk[f'sortOrder_{count}'] = destination.sortOrder
all_sip_trunks.append(trunk)
# print(siptrunk)
print(f"exporting: {siptrunk.name}: {siptrunk.description}")
print("-" * 35)
print(f"number of siptrunks: {len(all_sip_trunks)}")
return all_sip_trunks
except Exception as e:
print(e)
return []
def export_phone_registrations(ucm_axl, ucm_ris):
"""
Export Phone Registrations
"""
nodes = ucm_axl.list_process_nodes()
del nodes[0] # remove EnterpriseWideData node
subs = []
for node in nodes:
subs.append(node.name)
phones = ucm_axl.get_phones(tagfilter={"name": ""})
all_phones = []
phone_reg = []
reg = {}
for phone in phones:
all_phones.append(phone.name)
def limit(all_phones, n=1000): return [
all_phones[i: i + n] for i in range(0, len(all_phones), n)
]
groups = limit(all_phones)
for group in groups:
registered = ucm_ris.checkRegistration(group, subs)
if registered["TotalDevicesFound"] < 1:
print("no devices found!")
else:
reg["user"] = registered["LoginUserId"]
reg["regtime"] = time.strftime(
"%Y-%m-%d %H:%M:%S", time.localtime(registered["TimeStamp"]))
for item in registered["IPAddress"]:
reg["ip"] = item[1][0]["IP"]
for item in registered["LinesStatus"]:
reg["primeline"] = item[1][0]["DirectoryNumber"]
reg["name"] = registered["Name"]
print(f"exporting: {reg['name']}: {reg['ip']} - {reg['regtime']}")
phone_reg.append(reg)
print("-" * 35)
print(f"number of registered phones: {len(phone_reg)}")
return phone_reg
def export_translations(ucm_axl):
try:
all_translations = []
translations = ucm_axl.get_translations()
for translation in translations:
# print(translation)
xlate = {}
xlate["pattern"] = translation.pattern
xlate["routePartition"] = translation.routePartitionName._value_1
xlate["description"] = translation.description
xlate["callingSearchSpace"] = translation.callingSearchSpaceName._value_1
xlate["callingPartyTransformationMask"] = translation.callingPartyTransformationMask
xlate["digitDiscardInstructionName"] = translation.digitDiscardInstructionName._value_1
xlate["prefixDigitsOut"] = translation.prefixDigitsOut
xlate["calledPartyTransformationMask"] = translation.calledPartyTransformationMask
all_translations.append(xlate)
print(
f"exporting: {xlate['pattern']}: {xlate['routePartition']} - {xlate['description']} --> {xlate['calledPartyTransformationMask']}")
print("-" * 35)
print(f"number of translations: {len(all_translations)}")
return all_translations
except Exception as e:
return []
| 40.677305 | 154 | 0.555488 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,752 | 0.327086 |
b50af19fbfd3dbcdc0b721d01520bacb09232597 | 256 | py | Python | web.backend/config.py | Forevka/YTDownloader | fff92bfc13b70843472ec6cce13ed6609d89433a | [
"MIT"
]
| 1 | 2020-10-27T05:31:51.000Z | 2020-10-27T05:31:51.000Z | web.backend/config.py | Forevka/YTDownloader | fff92bfc13b70843472ec6cce13ed6609d89433a | [
"MIT"
]
| null | null | null | web.backend/config.py | Forevka/YTDownloader | fff92bfc13b70843472ec6cce13ed6609d89433a | [
"MIT"
]
| null | null | null | host = "localhost"
port = 9999
dboptions = {
"host": "194.67.198.163",
"user": "postgres",
"password": "werdwerd2012",
"database": "zno_bot",
'migrate': True
}
API_PATH = '/api/'
API_VERSION = 'v1'
API_URL = API_PATH + API_VERSION
| 14.222222 | 32 | 0.597656 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 112 | 0.4375 |
b50d91bfd6f0c80409d4587612832b09f44bae48 | 829 | py | Python | ml_train.py | ElvinOuyang/LearningFlask | eb94355c971897dc06943b8cfdbcdd0f0da44f51 | [
"MIT"
]
| null | null | null | ml_train.py | ElvinOuyang/LearningFlask | eb94355c971897dc06943b8cfdbcdd0f0da44f51 | [
"MIT"
]
| null | null | null | ml_train.py | ElvinOuyang/LearningFlask | eb94355c971897dc06943b8cfdbcdd0f0da44f51 | [
"MIT"
]
| null | null | null | import pandas as pd
import numpy as np
from sklearn.datasets import load_iris
from sklearn.naive_bayes import GaussianNB
import pickle
def train_iris_nb():
"""Train a GaussianNB model on iris dataset."""
X, y_train = load_iris(return_X_y=True, as_frame=True)
colnames = X.columns
X_train = X.values
model = GaussianNB()
model.fit(X_train, y_train)
return model
def dump_model(model_path, model):
"""Save model as binary pickle file."""
with open(model_path, 'wb') as file:
pickle.dump(model, file)
def load_model(model_path):
"""Load model to return for future use."""
with open(model_path, 'rb') as file:
model = pickle.load(file)
return model
def main():
model = train_iris_nb()
dump_model('model.pickle', model)
if __name__ == '__main__':
main() | 25.90625 | 58 | 0.683957 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 160 | 0.193004 |
b50dc3fa0e49a34e31b885324c803f179d4c5bd2 | 746 | py | Python | UFOscrapy/UFOscrapy/pipelines.py | anaheino/Ufo-sightings-map | 64af02093f97737cbbdfd8af9e1aeb4d8aa8fcdc | [
"MIT"
]
| null | null | null | UFOscrapy/UFOscrapy/pipelines.py | anaheino/Ufo-sightings-map | 64af02093f97737cbbdfd8af9e1aeb4d8aa8fcdc | [
"MIT"
]
| null | null | null | UFOscrapy/UFOscrapy/pipelines.py | anaheino/Ufo-sightings-map | 64af02093f97737cbbdfd8af9e1aeb4d8aa8fcdc | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
from .parserules import parseAll
class UfoscrapyPipeline(object):
def process_item(self, item, spider):
"""
Parsitaan regexin ja datetimen avulla kentät oikeaan muotoon.
"""
try:
item['loc'] = item['loc'][0]
except:
item['loc'] = ""
try:
item['shape'] = item['shape'][0]
except:
item['shape'] = ""
try:
item['state'] = item['state'][0]
except:
item['state'] = ""
try:
item['duration'] = item['duration'][0]
except:
item['duration'] = ""
parseAll(item)
return item
| 22.606061 | 66 | 0.430295 | 654 | 0.875502 | 0 | 0 | 0 | 0 | 0 | 0 | 198 | 0.26506 |
b50e4ff7686cf0dbd4629ae146083491535bb7be | 1,028 | py | Python | precompressed/context_processors.py | EightMedia/django-precompressed | 1b135b4784a96948237f93bf0648d3ab747fcfb3 | [
"MIT"
]
| 4 | 2015-05-05T06:58:35.000Z | 2018-10-15T18:53:50.000Z | precompressed/context_processors.py | EightMedia/django-precompressed | 1b135b4784a96948237f93bf0648d3ab747fcfb3 | [
"MIT"
]
| 2 | 2015-02-04T10:48:00.000Z | 2020-01-27T15:36:39.000Z | precompressed/context_processors.py | EightMedia/django-precompressed | 1b135b4784a96948237f93bf0648d3ab747fcfb3 | [
"MIT"
]
| 1 | 2019-02-20T20:40:04.000Z | 2019-02-20T20:40:04.000Z | # *****************************************************************************
# precompressed/context_processors.py
# *****************************************************************************
"""
A set of request processors that return dictionaries to be merged into a
template context. Each function takes the request object as its only parameter
and returns a dictionary to add to the context.
These are referenced from the setting TEMPLATE_CONTEXT_PROCESSORS and used by
RequestContext.
"""
from __future__ import absolute_import, division
from __future__ import print_function, unicode_literals
from precompressed import utils
# *****************************************************************************
# accepts_gzip
# *****************************************************************************
def accepts_gzip(request):
"""
defines ACCEPTS_GZIP -- a boolean which reflects whether
the request accepts Content-Type: gzip.
"""
return {'ACCEPTS_GZIP': utils.accepts_gzip(request)}
| 30.235294 | 79 | 0.539883 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 801 | 0.779183 |
b51221f86b651bcc587032cb289ea216ea969861 | 6,612 | py | Python | Code/pytorchFwdModel.py | mChataign/smileCompletion | 1bde2dd9fada2194c79cb3599bc9e9139cde6ee5 | [
"BSD-3-Clause"
]
| 4 | 2021-01-06T13:53:39.000Z | 2021-12-16T21:23:13.000Z | Code/pytorchFwdModel.py | mChataign/smileCompletion | 1bde2dd9fada2194c79cb3599bc9e9139cde6ee5 | [
"BSD-3-Clause"
]
| null | null | null | Code/pytorchFwdModel.py | mChataign/smileCompletion | 1bde2dd9fada2194c79cb3599bc9e9139cde6ee5 | [
"BSD-3-Clause"
]
| 2 | 2021-01-06T20:53:15.000Z | 2021-12-29T08:59:31.000Z | #Import modules
import os
import pandas as pd
import numpy as np
from pandas import DatetimeIndex
import dask
import scipy
import time
import glob
import torch
import torch.nn as nn
from live_plotter import live_plotter
import matplotlib.pyplot as plt
from mpl_toolkits import mplot3d
from functools import partial
from abc import ABCMeta, abstractmethod
import plottingTools
import pytorchModel
import loadData
class pytorchFwdModel(pytorchModel.pytorchModel) :
#######################################################################################################
#Construction functions
#######################################################################################################
def __init__(self,
learningRate,
hyperParameters,
nbUnitsPerLayer,
nbFactors,
modelName = "./bestPyTorchFwdModel"):
super().__init__(learningRate, hyperParameters, nbUnitsPerLayer, nbFactors,
modelName = modelName)
def buildModel(self):
self.fe = pytorchModel.Functional_encoder(self.nbFactors + 1) #Neural network architecture
return
#######################################################################################################
#Evaluation functions
#######################################################################################################
def evalBatch(self, batch, code):
batchLogMoneyness = self.getLogMoneyness(batch)
scaledMoneyness = (batchLogMoneyness.values - self.MeanLogMoneyness) / self.StdLogMoneyness
logMoneynessTensor = torch.Tensor(np.expand_dims(scaledMoneyness, 1)).float() #Log moneyness
# for j in np.random.choice(len(test[k]), 10):
# filt = test[k].nBizDays >= 10
batchLogMat = self.getLogMaturities(batch)
scaledMat = (batchLogMat.values - self.MeanLogMaturity) / self.StdLogMaturity
logMaturity = torch.tensor( np.expand_dims(scaledMat, 1) , requires_grad=True).float()
scaledFwd = (batch[2].values - self.MeanFwd) / self.StdFwd
fwdTensor = torch.tensor( np.expand_dims(scaledFwd, 1) ).float()
codeTensor = code.repeat(batch[0].shape[0], 1).float()
refVol = torch.tensor(batch[0].values)
inputTensor = torch.cat((logMoneynessTensor, logMaturity, fwdTensor, codeTensor), dim=1)
outputTensor = self.fe( inputTensor )[:, 0]
loss = torch.mean( (outputTensor - refVol)[~torch.isnan(outputTensor)] ** 2 )#torch.nanmean( (outputTensor - refVol) ** 2 )
return inputTensor, outputTensor, loss, logMaturity, codeTensor, logMoneynessTensor
def commonEvalSingleDayWithoutCalibration(self,
initialValueForFactors,
dataSetList,
computeSensi = False):
#Rebuild tensor graph
self.restoringGraph()
#Build tensor for reconstruction
nbObs = 1 if initialValueForFactors.ndim == 1 else initialValueForFactors.shape[0]
nbPoints = dataSetList[1].shape[0] if dataSetList[1].ndim == 1 else dataSetList[1].shape[1]
nbFactors = self.nbFactors
reshapedValueForFactors = np.reshape([initialValueForFactors],
(nbObs,nbFactors))
self.code = pytorchModel.Code(nbObs, self.nbFactors, initialValue = reshapedValueForFactors) #Latent variables
codeTensor = self.code.code[k, :].repeat(nbPoints, 1)
batchLogMoneyness = self.getLogMoneyness(dataSetList)
scaledMoneyness = (batchLogMoneyness.values - self.MeanLogMoneyness) / self.StdLogMoneyness
logMoneynessTensor = torch.Tensor(np.expand_dims(scaledMoneyness.values, 1)).float() #Log moneyness
scaledFwd = (dataSetList[2].values - self.MeanFwd) / self.StdFwd
fwdTensor = torch.tensor( np.expand_dims(scaledFwd, 1) ).float()
# for j in np.random.choice(len(test[k]), 10):
# filt = test[k].nBizDays >= 10
batchLogMat = self.getLogMaturities(dataSetList)
scaledMat = (batchLogMat.values - self.MeanLogMaturity) / self.StdLogMaturity
logMaturity = torch.tensor( np.expand_dims(scaledMat, 1) ).float()
inputTensor = torch.cat((logMoneynessTensor, logMaturity, fwdTensor, codeTensor), dim=1)
outputTensor = self.fe( inputTensor )[:, 0]
self.restoreWeights()
#Build tensor for reconstruction
# print("nbPoints : ", nbPoints)
# print("initialValueForFactors : ", initialValueForFactors)
# print("inputFeatures : ", inputFeatures)
# print("outputFeatures : ", outputFeatures)
# print("outputTensor : ", self.outputTensor)
reconstructedSurface = outputTensor.detach().numpy().reshape(batch[0].shape)
inputTensor = torch.cat((strikes, logMaturity, codeTensor), dim=1)
#if computeSensi :
# inputTensor.requires_grad = True
outputTensor = self.fe( inputTensor )[:, 0]
reshapedJacobian = None
if computeSensi :
reshapedJacobian = np.ones((nbObs, nbPoints, nbFactors)) if initialValueForFactors.ndim != 1 else np.ones((nbPoints, nbFactors))
#for p in range(nbPoints) :
# output.backward()
# jacobian = input.grad.data
# reshapedJacobian = tf.reshape(jacobian, shape = [nbObs, nbPoints, nbFactors])
# if self.verbose :
# print(reshapedJacobian)
calibratedSurfaces = outputTensor
factorSensi = None
if initialValueForFactors.ndim == 1 :
calibratedSurfaces = np.reshape(reconstructedSurface, (nbPoints))
if reshapedJacobian is not None :
factorSensi = np.reshape(reshapedJacobian, (nbPoints, nbFactors))
elif initialValueForFactors.ndim == 2 :
calibratedSurfaces = np.reshape(reconstructedSurface, (nbObs,nbPoints))
if reshapedJacobian is not None :
factorSensi = np.reshape(reshapedJacobian, (nbObs, nbPoints, nbFactors))
return calibratedSurfaces, factorSensi
| 43.788079 | 141 | 0.57078 | 6,160 | 0.931639 | 0 | 0 | 0 | 0 | 0 | 0 | 1,376 | 0.208106 |
b5122d95a7d1bf2036d53b90080cc31fd62623ab | 911 | py | Python | quickstart.py | s-samarth/Learning-Tensorflow | a46d8be1a741de5a0f366af83b8534cefcf0b615 | [
"MIT"
]
| null | null | null | quickstart.py | s-samarth/Learning-Tensorflow | a46d8be1a741de5a0f366af83b8534cefcf0b615 | [
"MIT"
]
| null | null | null | quickstart.py | s-samarth/Learning-Tensorflow | a46d8be1a741de5a0f366af83b8534cefcf0b615 | [
"MIT"
]
| null | null | null | import numpy as np
import tensorflow as tf
from tensorflow import keras
import warnings
warnings.filterwarnings('ignore')
mnist = tf.keras.datasets.mnist
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train, X_test = X_train / 255.0, X_test / 255.0
X_train.shape = (60000, 28, 28)
model = tf.keras.Sequential([
tf.keras.layers.Flatten(input_shape=(28, 28)),
tf.keras.layers.Dense(128, activation='relu'),
tf.keras.layers.Dropout(0.2),
tf.keras.layers.Dense(10)
])
# output shape is (None, 10) ie batch size, 10
# logits is the inverse of sigmoid
logits = model(X_train[:1]).numpy()
loss_fn = keras.losses.SparseCategoricalCrossentropy(from_logits=True)
model.compile(optimizer='adam', loss=loss_fn, metrics=['accuracy'])
model.fit(X_train, y_train, epochs=1)
model.evaluate(X_test, y_test, verbose=2)
probab_model = keras.Sequential([
model,
keras.layers.Softmax()
])
| 27.606061 | 70 | 0.728869 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 110 | 0.120746 |
b512b2de6527126b947a320b79c117609580ec75 | 114 | py | Python | pyquil/__init__.py | ftripier/pyquil | 573d5ae64bbc594917ad46885fca0d8f5f3fe0e9 | [
"Apache-2.0"
]
| null | null | null | pyquil/__init__.py | ftripier/pyquil | 573d5ae64bbc594917ad46885fca0d8f5f3fe0e9 | [
"Apache-2.0"
]
| null | null | null | pyquil/__init__.py | ftripier/pyquil | 573d5ae64bbc594917ad46885fca0d8f5f3fe0e9 | [
"Apache-2.0"
]
| null | null | null | __version__ = "2.1.0.dev0"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
| 22.8 | 53 | 0.807018 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 0.105263 |
b513b3e8cf7e95cf1452eb5dac904886fb0f42d5 | 1,546 | py | Python | equation_solution.py | free-free/Algorithm | 174b6bc7a73f5ec1393149d238fc4496b2baedaa | [
"Apache-2.0"
]
| 7 | 2015-11-15T05:34:33.000Z | 2021-04-29T00:39:08.000Z | equation_solution.py | free-free/Algorithm | 174b6bc7a73f5ec1393149d238fc4496b2baedaa | [
"Apache-2.0"
]
| null | null | null | equation_solution.py | free-free/Algorithm | 174b6bc7a73f5ec1393149d238fc4496b2baedaa | [
"Apache-2.0"
]
| null | null | null | #!/bin/env python3.5
import math
def test_func(x):
return x*x*x+1.1*x*x+0.9*x-1.4
def df_test_func(x):
return 3*x*x+2.2*x+0.9
# binary seperation
def binary_seperation(func, x, ap=0.001):
list(x).sort()
x2 = x.pop()
x1 = x.pop()
y1 = func(x1)
y2 = func(x2)
if y1 == 0:
return x1
if y2 == 0:
return x2
if y1*y2 > 0:
return ""
while True:
avg_x = (x1 + x2)/2
avg_y = func(avg_x)
if avg_y*y1 > 0:
x1 = avg_x
elif avg_y*y2 > 0:
x2 = avg_x
elif avg_y == 0:
return avg_x
elif abs(x1-x2) < ap:
return x1
else:
return ""
# tan line method
def tanline(func, dfunc, x, ap=0.0001):
list(x).sort()
x2 = x.pop()
x1 = x.pop()
y1 = func(x1)
y2 = func(x2)
if y1 == 0:
return x1
if y2 == 0:
return x2
if y1*y2 > 0:
return ""
mid_y1 = func((x1+x2)/2)
mid_y2 = (y1+y2)/2
if mid_y1 < mid_y2:
convex = -1
else:
convex = 1
if y1*convex > 0:
delta_x = x1
elif y2*convex > 0:
delta_x = x2
while True:
delta_y = func(delta_x)
if abs(delta_y) < ap:
return delta_x
delta_x = delta_x - func(delta_x)/dfunc(delta_x)
if __name__ == '__main__':
print(binary_seperation(test_func,[0,1] ))
print(tanline(test_func, df_test_func, [0,1]))
| 20.342105 | 57 | 0.467012 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 82 | 0.05304 |
b5141515336b431125ee2d8bfb2d41e31fd83729 | 3,428 | py | Python | test/test_sparql/test_sparql_parser.py | trishnaguha/rdfextras | c66b30de4a3b9cb67090add06cb8a9cf05d2c545 | [
"BSD-3-Clause"
]
| 3 | 2015-05-15T02:18:21.000Z | 2019-02-12T03:14:46.000Z | test/test_sparql/test_sparql_parser.py | trishnaguha/rdfextras | c66b30de4a3b9cb67090add06cb8a9cf05d2c545 | [
"BSD-3-Clause"
]
| 1 | 2015-11-05T15:18:36.000Z | 2015-11-05T16:44:04.000Z | test/test_sparql/test_sparql_parser.py | trishnaguha/rdfextras | c66b30de4a3b9cb67090add06cb8a9cf05d2c545 | [
"BSD-3-Clause"
]
| 4 | 2015-11-05T07:24:41.000Z | 2022-01-18T07:54:43.000Z | import unittest
from rdflib import Graph
def buildQueryArgs(q):
return dict(select="", where="", optional="")
class SPARQLParserTest(unittest.TestCase):
known_issue = True
def setUp(self):
self.graph = Graph()
pass
def tearDown(self):
pass
tests = [
("basic",
"""\
SELECT ?name
WHERE { ?a <http://xmlns.com/foaf/0.1/name> ?name }"""),
("simple_prefix",
"""\
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
SELECT ?name
WHERE { ?a foaf:name ?name }"""),
("base_statement",
"""\
BASE <http://xmlns.com/foaf/0.1/>
SELECT ?name
WHERE { ?a <name> ?name }"""),
("prefix_and_colon_only_prefix",
"""\
PREFIX : <http://xmlns.com/foaf/0.1/>
PREFIX vcard: <http://www.w3.org/2001/vcard-rdf/3.0#>
SELECT ?name ?title
WHERE {
?a :name ?name .
?a vcard:TITLE ?title
}"""),
("predicate_object_list_notation",
"""\
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
SELECT ?name ?mbox
WHERE {
?x foaf:name ?name ;
foaf:mbox ?mbox .
}"""),
("object_list_notation",
"""\
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
SELECT ?x
WHERE {
?x foaf:nick "Alice" ,
"Alice_" .
}
"""),
("escaped_literals",
"""\
PREFIX tag: <http://xmlns.com/foaf/0.1/>
PREFIX vcard: <http://www.w3.org/2001/vcard-rdf/3.0#>
SELECT ?name
WHERE {
?a tag:name ?name ;
vcard:TITLE "escape test vcard:TITLE " ;
<tag://test/escaping> "This is a ''' Test \"\"\"" ;
<tag://test/escaping> ?d
}
"""),
("key_word_as_variable",
"""\
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
SELECT ?PREFIX ?WHERE
WHERE {
?x foaf:name ?PREFIX ;
foaf:mbox ?WHERE .
}"""),
("key_word_as_prefix",
"""\
PREFIX WHERE: <http://xmlns.com/foaf/0.1/>
SELECT ?name ?mbox
WHERE {
?x WHERE:name ?name ;
WHERE:mbox ?mbox .
}"""),
("some_test_cases_from_grammar_py_1",
"""\
SELECT ?title
WHERE {
<http://example.org/book/book1>
<http://purl.org/dc/elements/1.1/title>
?title .
}"""),
("some_test_cases_from_grammar_py_2",
"""\
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
SELECT ?name ?mbox
WHERE { ?person foaf:name ?name .
OPTIONAL { ?person foaf:mbox ?mbox}
}"""),
("some_test_cases_from_grammar_py_3",
"""\
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
SELECT ?name ?name2
WHERE { ?person foaf:name ?name .
OPTIONAL { ?person foaf:knows ?p2 . ?p2 foaf:name ?name2 . }
}"""),
("some_test_cases_from_grammar_py_4",
"""\
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
#PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
SELECT ?name ?mbox
WHERE
{
{ ?person rdf:type foaf:Person } .
OPTIONAL { ?person foaf:name ?name } .
OPTIONAL {?person foaf:mbox ?mbox} .
}""")
]
def _buildQueryArg(q):
res = buildQueryArgs(q)
if res.get('select', False):
assert res["select"] is not None
if res.get('where', False):
assert res["where"] is not None
if res.get('optional', False):
assert res["optional"] is not None
# result = sparqlGr.query(select, where, optional)
# self.assert_(self.graph.query(q) is not None)
| 25.774436 | 66 | 0.540548 | 167 | 0.048716 | 0 | 0 | 0 | 0 | 0 | 0 | 2,714 | 0.791715 |
b514465b893f2b24e5e74f092847f939b1d7ff56 | 1,228 | py | Python | day1.py | derek-elliott/aoc2018 | b7fed374e94665cb70eff413297b2e3865f835c4 | [
"MIT"
]
| null | null | null | day1.py | derek-elliott/aoc2018 | b7fed374e94665cb70eff413297b2e3865f835c4 | [
"MIT"
]
| null | null | null | day1.py | derek-elliott/aoc2018 | b7fed374e94665cb70eff413297b2e3865f835c4 | [
"MIT"
]
| null | null | null | class Frequency:
def __init__(self):
self.frequency = 0
def increment(self, i):
self.frequency = self.frequency + i
def decrement(self, i):
self.frequency = self.frequency - i
def __str__(self):
return str(self.frequency)
def __repr__(self):
return str(self.frequency)
def get_steps(filename):
with open(f'data/{filename}', 'r') as f:
raw_steps = f.readlines()
steps = []
for i in raw_steps:
steps.append([i[0], int(i[1:])])
return steps
def part_one():
freq = Frequency()
steps = get_steps('day1-1.txt')
ops = {'+': freq.increment, '-': freq.decrement}
for i in steps:
ops[i[0]](i[1])
return freq
def part_two():
freq = Frequency()
steps = get_steps('day1-1.txt')
ops = {'+': freq.increment, '-': freq.decrement}
current = 0
already_seen = []
while current not in already_seen:
for i in steps:
if current in already_seen:
break
already_seen.append(int(str(freq)))
ops[i[0]](i[1])
current = int(str(freq))
return freq
if __name__ == '__main__':
print(f'Part 1: {part_one()}\nPart 2: {part_two()}')
| 25.583333 | 56 | 0.566775 | 328 | 0.267101 | 0 | 0 | 0 | 0 | 0 | 0 | 112 | 0.091205 |
b5155fe82e40d2b63e9f83d54603790af7163224 | 5,122 | py | Python | file_sync_tool/transfer/process.py | jackd248/file-sync-tool | 68fbca562f232c2bc064f546d9eade20a2ae456f | [
"MIT"
]
| null | null | null | file_sync_tool/transfer/process.py | jackd248/file-sync-tool | 68fbca562f232c2bc064f546d9eade20a2ae456f | [
"MIT"
]
| null | null | null | file_sync_tool/transfer/process.py | jackd248/file-sync-tool | 68fbca562f232c2bc064f546d9eade20a2ae456f | [
"MIT"
]
| null | null | null | #!/usr/bin/env python3
# -*- coding: future_fstrings -*-
from db_sync_tool.utility import mode, system, output, helper
from db_sync_tool.remote import client as remote_client
from file_sync_tool.transfer import utility
def transfer_files():
"""
Transfering configured files between clients
:return:
"""
if 'files' in system.config:
for config in system.config['files']['config']:
output.message(
output.Subject.INFO,
f'Starting rsync file transfer'
)
if 'exclude' not in config:
config['exclude'] = []
if mode.get_sync_mode() == mode.SyncMode.PROXY:
# Proxy mode: Transferring from origin to local and from local to target
utility.generate_temp_dir_name()
helper.check_and_create_dump_dir(mode.Client.LOCAL, utility.temp_data_dir)
synchronize(
origin_path=config[mode.Client.ORIGIN],
target_path=utility.temp_data_dir,
exclude=config['exclude'],
pseudo_client=mode.Client.ORIGIN
)
synchronize(
origin_path=f'{utility.temp_data_dir}/*',
target_path=config[mode.Client.TARGET],
exclude=config['exclude'],
pseudo_client=mode.Client.TARGET
)
utility.remove_temporary_dir()
elif mode.get_sync_mode() == mode.SyncMode.SYNC_REMOTE:
synchronize(
origin_path=config[mode.Client.ORIGIN],
target_path=config[mode.Client.TARGET],
exclude=config['exclude'],
client=mode.Client.ORIGIN,
force_remote=True
)
else:
synchronize(
origin_path=config[mode.Client.ORIGIN],
target_path=config[mode.Client.TARGET],
exclude=config['exclude']
)
else:
f'{output.Subject.WARNING} No file sync configuration provided'
def synchronize(origin_path, target_path, exclude, client=mode.Client.LOCAL, pseudo_client=None, force_remote=False):
"""
Using rsync command to synchronize files between systems
:param origin_path: String
:param target_path: String
:param exclude: List
:param client: String
:param pseudo_client: String Client, which will be forced as remote client. Necessary for proxy transfer.
:param force_remote: Boolean
:return:
"""
_remote_client = None
if force_remote:
remote_client.load_ssh_client_origin()
_origin_subject = f'{output.Subject.ORIGIN}{output.CliFormat.BLACK}[REMOTE]{output.CliFormat.ENDC} '
_target_subject = f'{output.Subject.TARGET}{output.CliFormat.BLACK}[REMOTE]{output.CliFormat.ENDC} '
elif mode.is_remote(mode.Client.ORIGIN) and pseudo_client != mode.Client.TARGET:
_remote_client = mode.Client.ORIGIN
_origin_subject = f'{output.Subject.ORIGIN}{output.CliFormat.BLACK}[REMOTE]{output.CliFormat.ENDC} '
_target_subject = f'{output.Subject.TARGET}{output.CliFormat.BLACK}[LOCAL]{output.CliFormat.ENDC} '
elif mode.is_remote(mode.Client.TARGET) and pseudo_client != mode.Client.ORIGIN:
_remote_client = mode.Client.TARGET
_origin_subject = f'{output.Subject.ORIGIN}{output.CliFormat.BLACK}[LOCAL]{output.CliFormat.ENDC} '
_target_subject = f'{output.Subject.TARGET}{output.CliFormat.BLACK}[REMOTE]{output.CliFormat.ENDC} '
elif not mode.is_remote(mode.Client.TARGET) and not mode.is_remote(mode.Client.ORIGIN):
_origin_subject = f'{output.Subject.ORIGIN}{output.CliFormat.BLACK}[LOCAL]{output.CliFormat.ENDC} '
_target_subject = f'{output.Subject.TARGET}{output.CliFormat.BLACK}[LOCAL]{output.CliFormat.ENDC} '
_origin_name = helper.get_ssh_host_name(mode.Client.ORIGIN, True) if _remote_client == mode.Client.ORIGIN else ''
_target_name = helper.get_ssh_host_name(mode.Client.TARGET, True) if _remote_client == mode.Client.TARGET else ''
if not system.config['mute']:
print(
f'{_origin_subject}'
f'{_origin_name}'
f'{output.CliFormat.BLACK}{origin_path}{output.CliFormat.ENDC}'
)
print(
f'{_target_subject}'
f'{_target_name}'
f'{output.CliFormat.BLACK}{target_path}{output.CliFormat.ENDC}'
)
_origin_user_host = utility.get_host(mode.Client.ORIGIN) if _remote_client == mode.Client.ORIGIN else ''
_target_user_host = utility.get_host(mode.Client.TARGET) if _remote_client == mode.Client.TARGET else ''
_output = mode.run_command(
f'{utility.get_password_environment(_remote_client)}rsync {utility.get_options()} '
f'{utility.get_authorization(_remote_client)} {utility.get_excludes(exclude)}'
f'{_origin_user_host}{origin_path} {_target_user_host}{target_path}',
client,
True
)
utility.read_stats(_output)
| 44.53913 | 117 | 0.639594 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,834 | 0.358063 |
82ecef2ff3628afa85b6185c93f911d25a8014e1 | 5,635 | py | Python | src/main/python/Training+Data+Analysis.py | sully90/dp-search-service | efc6a94dad1c5b3fc898da9ced1606aa345c7ecd | [
"MIT"
]
| null | null | null | src/main/python/Training+Data+Analysis.py | sully90/dp-search-service | efc6a94dad1c5b3fc898da9ced1606aa345c7ecd | [
"MIT"
]
| null | null | null | src/main/python/Training+Data+Analysis.py | sully90/dp-search-service | efc6a94dad1c5b3fc898da9ced1606aa345c7ecd | [
"MIT"
]
| null | null | null |
# coding: utf-8
# # Training Set Analysis
# The purpose of this notebook is to compute the kernel density estimate of the PDF between the judgement and each feature in a training set, in order to estimate how each feature is performing.
#
# # TODO
# Modify features to use custom function score queries, and use ML to optimise features given the below plot (plot ideal relationship between judgement and features, and optimise to get as close as possible).
# In[51]:
import sys
import numpy as np
import scipy
from scipy import stats
import matplotlib.pylab as plt
models_dir = "/Users/sullid/ONS/dp-search-service/src/main/resources/elastic.ltr/models"
model_name = sys.argv[1]
# In[52]:
class TrainingData(object):
"""
Class to handle the loading of training sets
"""
def __init__(self, model_dir, model_name):
self.model_dir = model_dir
self.model_name = model_name
self.data = {}
self.load()
def load(self):
fname = "%s/%s/ons_train.txt" % (self.model_dir, self.model_name)
with open(fname, 'r') as f:
lines = f.readlines()
qid_dict = {}
# First collect the qids
for line in lines:
parts = line.split("\t")
qid_part = parts[1]
qid = int(qid_part.split(":")[1])
if (qid not in qid_dict):
qid_dict[qid] = []
qid_dict[qid].append(line)
# Process each line by the qid
for qid in qid_dict.keys():
lines = qid_dict[qid]
self.data[qid] = {}
for line in lines:
if (line.startswith("#")):
continue
parts = line.split("\t")
if (len(parts) > 0):
for part in parts:
if ('#' in part):
part = part[0:part.index("#")].strip()
key = "J"
val = 0.0
if (":" in part):
key = part.split(":")[0]
val = float(part.split(":")[-1])
else:
val = float(part)
if (key not in self.data[qid]):
self.data[qid][key] = []
self.data[qid][key].append(val)
def get(self, qid, item):
return np.array(self.data[qid][item])
def qids(self):
return self.data.keys()
def keys(self, qid):
return self.data[qid].keys()
def min(self, qid, item):
return min(self.get(qid, item))
def max(self, item):
return max(self.get(qid, item))
def size(self, qid):
return len(self.get(qid, "J"))
def numFeatures(self, qid):
return len(self.keys(qid)) - 2 # - 2 to account for judgement (J) and qid
trainingData = TrainingData(models_dir, model_name)
# In[56]:
import matplotlib.gridspec as gridspec
import matplotlib.pylab as pylab
fs=25
params = {'legend.fontsize': 'x-large',
'figure.figsize': (15, 5),
'axes.labelsize': fs,
'axes.titlesize':fs,
'xtick.labelsize':fs,
'ytick.labelsize':fs}
pylab.rcParams.update(params)
def getValues(trainingData, qid, i):
if (i == 0):
return "Judgement", trainingData.get(qid, "J")
# elif (i == 1 or i == 9):
# return str(i), np.log10(trainingData[str(i)])
else:
return "Feature %d" % i, trainingData.get(qid, str(i))
def fitKernel(x,y,n=100j):
xmin,xmax,ymin,ymax=(x.min(),x.max(),y.min(),y.max())
X, Y = np.mgrid[xmin:xmax:n, ymin:ymax:n]
positions = np.vstack([X.ravel(), Y.ravel()])
values = np.vstack([x, y])
kernel = stats.gaussian_kde(values)
Z = np.reshape(kernel(positions).T, X.shape)
return np.rot90(Z), (xmin, xmax, ymin, ymax)
def forceAspect(ax,aspect=1):
im = ax.get_images()
extent = im[0].get_extent()
ax.set_aspect(abs((extent[1]-extent[0])/(extent[3]-extent[2]))/aspect)
print "QIDS: ", trainingData.qids()
for qid in trainingData.qids():
numFeatures = trainingData.numFeatures(qid) + 1
fig = plt.figure(figsize=(50, 50))
plt.suptitle('qid:%d' % qid, fontsize=fs*1.5)
gs = gridspec.GridSpec(numFeatures, numFeatures)
for i in range(numFeatures):
rowLabel, rowValues = getValues(trainingData, qid, i)
labelRow = True
for j in range(i+1, numFeatures):
colLabel, colValues = getValues(trainingData, qid, j)
ax = plt.subplot(gs[i,j-numFeatures])
# ax.text(0.25, 0.5, "ax %d:%d" % (i,j))
if (labelRow):
ax.set_ylabel(rowLabel)
labelRow = False
if (j == (i+1)):
ax.set_xlabel(colLabel)
try:
Z, (xmin,xmax,ymin,ymax) = fitKernel(colValues, rowValues, 200j)
extent = [xmin,xmax,ymin,ymax]
ax.imshow(Z, cmap=plt.cm.gist_stern_r, extent=extent)
ax.set_xlim([xmin, xmax])
ax.set_ylim([ymin, ymax])
forceAspect(ax)
except:
pass
# ax.imshow(np.rot90(Z), cmap=plt.cm.gist_earth_r,
# extent=[xmin, xmax, ymin, ymax], aspect=50)
# ax.plot(x, y, 'k.', markersize=2)
# ax.set_xlim([xmin, xmax])
# ax.set_ylim([ymin, ymax])
plt.show()
| 31.836158 | 208 | 0.525998 | 2,389 | 0.423957 | 0 | 0 | 0 | 0 | 0 | 0 | 1,246 | 0.221118 |
82ef5f165b2867c26c9995bbb6a7009f9e7374ac | 3,623 | py | Python | src/utility_lib/collection_utilities/collection_utilities.py | DonalChilde/utility_lib | 9cf1cc142e5fcbf99f9f2e9bf6099520cc3eb545 | [
"MIT"
]
| null | null | null | src/utility_lib/collection_utilities/collection_utilities.py | DonalChilde/utility_lib | 9cf1cc142e5fcbf99f9f2e9bf6099520cc3eb545 | [
"MIT"
]
| null | null | null | src/utility_lib/collection_utilities/collection_utilities.py | DonalChilde/utility_lib | 9cf1cc142e5fcbf99f9f2e9bf6099520cc3eb545 | [
"MIT"
]
| null | null | null | from operator import attrgetter, itemgetter
from typing import (
Sequence,
Any,
Tuple,
Union,
List,
Iterable,
Dict,
Callable,
NamedTuple,
)
SortSpec = NamedTuple("SortSpec", [("sort_key", Union[str, int]), ("reversed", bool)])
def optional_collection(argument, collection_factory):
if argument is None:
return collection_factory()
return argument
def sort_in_place(
xs: List[Union[Sequence, Any]], specs: Sequence[SortSpec], use_get_item: bool,
):
if not hasattr(xs, "sort"):
raise AttributeError(
"Sortable must be a mutable type with a sort function, e.g. List"
)
if use_get_item:
for key, reverse in reversed(specs):
xs.sort(key=itemgetter(key), reverse=reverse)
return xs
for key, reverse in reversed(specs):
xs.sort(key=attrgetter(key), reverse=reverse) # type: ignore
return xs
def sort_to_new_list(
xs: Iterable[Union[Sequence, Any]],
specs: Sequence[Tuple[Union[str, int], bool]],
use_get_item: bool,
):
if use_get_item:
sorted_list = []
for index, spec in enumerate(reversed(specs)):
key, reverse = spec
if index == 0:
sorted_list = sorted(xs, key=itemgetter(key), reverse=reverse)
sorted_list = sorted(sorted_list, key=itemgetter(key), reverse=reverse)
return sorted_list
sorted_list = []
for index, spec in enumerate(reversed(specs)):
key, reverse = spec
if index == 0:
sorted_list = sorted(xs, key=itemgetter(key), reverse=reverse)
sorted_list = sorted(sorted_list, key=attrgetter(key), reverse=reverse) # type: ignore
return sorted_list
def index_list_of_dicts(
data: Sequence[Dict[str, Any]], key_field: str
) -> Dict[str, Dict[str, Any]]:
# TODO save to utilities
result = {}
for item in data:
key_field_value = item[key_field] # will error if field not found
result[str(key_field_value)] = item
return result
def index_list_of_objects(
data: Iterable[Union[Sequence, Any]],
key_field,
use_get_item: bool,
cast_index: Callable = None,
):
"""
Will index a list of objects based on key_field.
Returns a dict with key based on key_field of object
Parameters
----------
data : Iterable[Union[Sequence, Any]]
[description]
key : [type]
[description]
use_get_item : bool
[description]
cast_index : Callable, optional
[description], by default None
Returns
-------
[type]
[description]
"""
if use_get_item:
indexer = itemgetter(key_field)
else:
indexer = attrgetter(key_field)
result = {}
for item in data:
key_field_value = indexer(item)
if cast_index is not None:
key_field_value = cast_index(key_field_value)
result[key_field_value] = item
return result
def index_list_of_objects_multiple(
data: Iterable[Union[Sequence, Any]],
key_field,
use_get_item: bool,
cast_index: Callable = None,
) -> Dict[Any, List[Any]]:
if use_get_item:
indexer = itemgetter(key_field)
else:
indexer = attrgetter(key_field)
result: Dict[Any, List[Any]] = {}
for item in data:
key_field_value = indexer(item)
if cast_index is not None:
key_field_value = cast_index(key_field_value)
indexed_field = result.get(key_field_value, [])
indexed_field.append(item)
result[key_field_value] = indexed_field
return result
| 27.44697 | 95 | 0.629865 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 627 | 0.173061 |
82efe2ac2a9247e0f62ea4de8994ddbcc198a68c | 368 | py | Python | manage_it/catalog/admin.py | ShangShungInstitute/django-manage-it | 13cb23b57ce3577db7f69250741bcbfe82b69a57 | [
"MIT",
"Unlicense"
]
| 1 | 2015-01-20T14:34:32.000Z | 2015-01-20T14:34:32.000Z | manage_it/catalog/admin.py | ShangShungInstitute/django-manage-it | 13cb23b57ce3577db7f69250741bcbfe82b69a57 | [
"MIT",
"Unlicense"
]
| null | null | null | manage_it/catalog/admin.py | ShangShungInstitute/django-manage-it | 13cb23b57ce3577db7f69250741bcbfe82b69a57 | [
"MIT",
"Unlicense"
]
| null | null | null | from django.contrib import admin
from models import Location, ItemTemplate, Log, Inventory, Supplier
class ItemTemplateAdmin(admin.ModelAdmin):
filter_horizontal = ('supplies', 'suppliers')
admin.site.register(Location)
admin.site.register(ItemTemplate, ItemTemplateAdmin)
admin.site.register(Log)
admin.site.register(Inventory)
admin.site.register(Supplier)
| 24.533333 | 67 | 0.807065 | 92 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 21 | 0.057065 |
82f077676382d093577f0c067da14ea95e4000cd | 1,557 | py | Python | core_get/actions/module.py | core-get/core-get | 8fb960e4e51d0d46b5e3b2f4832eb4a39e0e60f7 | [
"MIT"
]
| null | null | null | core_get/actions/module.py | core-get/core-get | 8fb960e4e51d0d46b5e3b2f4832eb4a39e0e60f7 | [
"MIT"
]
| null | null | null | core_get/actions/module.py | core-get/core-get | 8fb960e4e51d0d46b5e3b2f4832eb4a39e0e60f7 | [
"MIT"
]
| null | null | null | from typing import Dict, Type
from injector import Module, Binder
from core_get.actions.action import Action
from core_get.actions.init.init import Init
from core_get.actions.init.init_options import InitOptions
from core_get.actions.install.install import Install
from core_get.actions.install.install_options import InstallOptions
from core_get.actions.login.login import Login
from core_get.actions.login.login_options import LoginOptions
from core_get.actions.package.package import Package
from core_get.actions.package.package_options import PackageOptions
from core_get.actions.publish.publish import Publish
from core_get.actions.publish.publish_options import PublishOptions
from core_get.actions.remove.remove import Remove
from core_get.actions.remove.remove_options import RemoveOptions
from core_get.actions.test.test import Test
from core_get.actions.test.test_options import TestOptions
from core_get.actions.yank.yank import Yank
from core_get.actions.yank.yank_options import YankOptions
from core_get.options.options import Options
from core_get.utils.injection import MultiMapClassProvider
class ActionsModule(Module):
def configure(self, binder: Binder) -> None:
binder.multibind(Dict[Type[Options], Action], to=MultiMapClassProvider({
InitOptions: Init,
InstallOptions: Install,
LoginOptions: Login,
PackageOptions: Package,
PublishOptions: Publish,
RemoveOptions: Remove,
TestOptions: Test,
YankOptions: Yank,
}))
| 39.923077 | 80 | 0.787412 | 443 | 0.284522 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
82f09047154f2ff625d401ca2b201a78ede23eab | 8,773 | py | Python | venv/Lib/site-packages/direct/particles/ParticleEffect.py | ferris77/pacman | 9d793146189630b4305af0bc7af65ce822b3998f | [
"MIT"
]
| null | null | null | venv/Lib/site-packages/direct/particles/ParticleEffect.py | ferris77/pacman | 9d793146189630b4305af0bc7af65ce822b3998f | [
"MIT"
]
| 20 | 2021-05-03T18:02:23.000Z | 2022-03-12T12:01:04.000Z | Lib/site-packages/direct/particles/ParticleEffect.py | fochoao/cpython | 3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9 | [
"bzip2-1.0.6",
"0BSD"
]
| 1 | 2021-04-09T00:02:59.000Z | 2021-04-09T00:02:59.000Z |
from panda3d.core import *
# Leave these imports in, they may be used by ptf files.
from panda3d.physics import *
from . import Particles
from . import ForceGroup
from direct.directnotify import DirectNotifyGlobal
class ParticleEffect(NodePath):
notify = DirectNotifyGlobal.directNotify.newCategory('ParticleEffect')
pid = 1
def __init__(self, name=None, particles=None):
if name is None:
name = 'particle-effect-%d' % ParticleEffect.pid
ParticleEffect.pid += 1
NodePath.__init__(self, name)
# Record particle effect name
self.name = name
# Enabled flag
self.fEnabled = 0
# Dictionary of particles and forceGroups
self.particlesDict = {}
self.forceGroupDict = {}
# The effect's particle system
if particles is not None:
self.addParticles(particles)
self.renderParent = None
def cleanup(self):
self.removeNode()
self.disable()
if self.__isValid():
for f in self.forceGroupDict.values():
f.cleanup()
for p in self.particlesDict.values():
p.cleanup()
del self.forceGroupDict
del self.particlesDict
del self.renderParent
def getName(self):
# override NodePath.getName()
return self.name
def reset(self):
self.removeAllForces()
self.removeAllParticles()
self.forceGroupDict = {}
self.particlesDict = {}
def start(self, parent=None, renderParent=None):
assert self.notify.debug('start() - name: %s' % self.name)
self.renderParent = renderParent
self.enable()
if parent is not None:
self.reparentTo(parent)
def enable(self):
# band-aid added for client crash - grw
if self.__isValid():
if self.renderParent:
for p in self.particlesDict.values():
p.setRenderParent(self.renderParent.node())
for f in self.forceGroupDict.values():
f.enable()
for p in self.particlesDict.values():
p.enable()
self.fEnabled = 1
def disable(self):
self.detachNode()
# band-aid added for client crash - grw
if self.__isValid():
for p in self.particlesDict.values():
p.setRenderParent(p.node)
for f in self.forceGroupDict.values():
f.disable()
for p in self.particlesDict.values():
p.disable()
self.fEnabled = 0
def isEnabled(self):
"""
Note: this may be misleading if enable(), disable() not used
"""
return self.fEnabled
def addForceGroup(self, forceGroup):
forceGroup.nodePath.reparentTo(self)
forceGroup.particleEffect = self
self.forceGroupDict[forceGroup.getName()] = forceGroup
# Associate the force group with all particles
for i in range(len(forceGroup)):
self.addForce(forceGroup[i])
def addForce(self, force):
for p in list(self.particlesDict.values()):
p.addForce(force)
def removeForceGroup(self, forceGroup):
# Remove forces from all particles
for i in range(len(forceGroup)):
self.removeForce(forceGroup[i])
forceGroup.nodePath.removeNode()
forceGroup.particleEffect = None
self.forceGroupDict.pop(forceGroup.getName(), None)
def removeForce(self, force):
for p in list(self.particlesDict.values()):
p.removeForce(force)
def removeAllForces(self):
for fg in list(self.forceGroupDict.values()):
self.removeForceGroup(fg)
def addParticles(self, particles):
particles.nodePath.reparentTo(self)
self.particlesDict[particles.getName()] = particles
# Associate all forces in all force groups with the particles
for fg in list(self.forceGroupDict.values()):
for i in range(len(fg)):
particles.addForce(fg[i])
def removeParticles(self, particles):
if particles is None:
self.notify.warning('removeParticles() - particles == None!')
return
particles.nodePath.detachNode()
self.particlesDict.pop(particles.getName(), None)
# Remove all forces from the particles
for fg in list(self.forceGroupDict.values()):
for f in fg:
particles.removeForce(f)
def removeAllParticles(self):
for p in list(self.particlesDict.values()):
self.removeParticles(p)
def getParticlesList(self):
return list(self.particlesDict.values())
def getParticlesNamed(self, name):
return self.particlesDict.get(name, None)
def getParticlesDict(self):
return self.particlesDict
def getForceGroupList(self):
return list(self.forceGroupDict.values())
def getForceGroupNamed(self, name):
return self.forceGroupDict.get(name, None)
def getForceGroupDict(self):
return self.forceGroupDict
def saveConfig(self, filename):
filename = Filename(filename)
with open(filename.toOsSpecific(), 'w') as f:
# Add a blank line
f.write('\n')
# Make sure we start with a clean slate
f.write('self.reset()\n')
pos = self.getPos()
hpr = self.getHpr()
scale = self.getScale()
f.write('self.setPos(%0.3f, %0.3f, %0.3f)\n' %
(pos[0], pos[1], pos[2]))
f.write('self.setHpr(%0.3f, %0.3f, %0.3f)\n' %
(hpr[0], hpr[1], hpr[2]))
f.write('self.setScale(%0.3f, %0.3f, %0.3f)\n' %
(scale[0], scale[1], scale[2]))
# Save all the particles to file
num = 0
for p in list(self.particlesDict.values()):
target = 'p%d' % num
num = num + 1
f.write(target + ' = Particles.Particles(\'%s\')\n' % p.getName())
p.printParams(f, target)
f.write('self.addParticles(%s)\n' % target)
# Save all the forces to file
num = 0
for fg in list(self.forceGroupDict.values()):
target = 'f%d' % num
num = num + 1
f.write(target + ' = ForceGroup.ForceGroup(\'%s\')\n' % \
fg.getName())
fg.printParams(f, target)
f.write('self.addForceGroup(%s)\n' % target)
def loadConfig(self, filename):
vfs = VirtualFileSystem.getGlobalPtr()
data = vfs.readFile(filename, 1)
data = data.replace(b'\r', b'')
try:
exec(data)
except:
self.notify.warning('loadConfig: failed to load particle file: '+ repr(filename))
raise
def accelerate(self,time,stepCount = 1,stepTime=0.0):
for particles in self.getParticlesList():
particles.accelerate(time,stepCount,stepTime)
def clearToInitial(self):
for particles in self.getParticlesList():
particles.clearToInitial()
def softStop(self):
for particles in self.getParticlesList():
particles.softStop()
def softStart(self, firstBirthDelay=None):
if self.__isValid():
for particles in self.getParticlesList():
if firstBirthDelay is not None:
particles.softStart(br=-1, first_birth_delay=firstBirthDelay)
else:
particles.softStart()
else:
# Not asserting here since we want to crash live clients for more expedient bugfix
# (Sorry, live clients)
self.notify.error('Trying to start effect(%s) after cleanup.' % (self.getName(),))
def __isValid(self):
return hasattr(self, 'forceGroupDict') and \
hasattr(self, 'particlesDict')
# Snake-case aliases.
is_enabled = isEnabled
add_force_group = addForceGroup
add_force = addForce
remove_force_group = removeForceGroup
remove_force = removeForce
remove_all_forces = removeAllForces
add_particles = addParticles
remove_particles = removeParticles
remove_all_particles = removeAllParticles
get_particles_list = getParticlesList
get_particles_named = getParticlesNamed
get_particles_dict = getParticlesDict
get_force_group_list = getForceGroupList
get_force_group_named = getForceGroupNamed
get_force_group_dict = getForceGroupDict
save_config = saveConfig
load_config = loadConfig
clear_to_initial = clearToInitial
soft_stop = softStop
soft_start = softStart
| 33.484733 | 94 | 0.596831 | 8,553 | 0.974923 | 0 | 0 | 0 | 0 | 0 | 0 | 1,270 | 0.144762 |
82f2ce225147eef11b48a4a976040f87c859485c | 7,566 | py | Python | orchestra/contrib/payments/models.py | udm88/django-orchestra | 49c84f13a8f92427b01231615136549fb5be3a78 | [
"Unlicense"
]
| 68 | 2015-02-09T10:28:44.000Z | 2022-03-12T11:08:36.000Z | orchestra/contrib/payments/models.py | ferminhg/django-orchestra | 49c84f13a8f92427b01231615136549fb5be3a78 | [
"Unlicense"
]
| 17 | 2015-05-01T18:10:03.000Z | 2021-03-19T21:52:55.000Z | orchestra/contrib/payments/models.py | ferminhg/django-orchestra | 49c84f13a8f92427b01231615136549fb5be3a78 | [
"Unlicense"
]
| 29 | 2015-03-31T04:51:03.000Z | 2022-02-17T02:58:50.000Z | from django.core.exceptions import ValidationError
from django.db import models
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from jsonfield import JSONField
from orchestra.models.fields import PrivateFileField
from orchestra.models.queryset import group_by
from . import settings
from .methods import PaymentMethod
class PaymentSourcesQueryset(models.QuerySet):
def get_default(self):
return self.filter(is_active=True).first()
class PaymentSource(models.Model):
account = models.ForeignKey('accounts.Account', verbose_name=_("account"),
related_name='paymentsources')
method = models.CharField(_("method"), max_length=32,
choices=PaymentMethod.get_choices())
data = JSONField(_("data"), default={})
is_active = models.BooleanField(_("active"), default=True)
objects = PaymentSourcesQueryset.as_manager()
def __str__(self):
return "%s (%s)" % (self.label, self.method_class.verbose_name)
@cached_property
def method_class(self):
return PaymentMethod.get(self.method)
@cached_property
def method_instance(self):
""" Per request lived method_instance """
return self.method_class(self)
@cached_property
def label(self):
return self.method_instance.get_label()
@cached_property
def number(self):
return self.method_instance.get_number()
def get_bill_context(self):
method = self.method_instance
return {
'message': method.get_bill_message(),
}
def get_due_delta(self):
return self.method_instance.due_delta
def clean(self):
self.data = self.method_instance.clean_data()
class TransactionQuerySet(models.QuerySet):
group_by = group_by
def create(self, **kwargs):
source = kwargs.get('source')
if source is None or not hasattr(source.method_class, 'process'):
# Manual payments don't need processing
kwargs['state'] = self.model.WAITTING_EXECUTION
amount = kwargs.get('amount')
if amount == 0:
kwargs['state'] = self.model.SECURED
return super(TransactionQuerySet, self).create(**kwargs)
def secured(self):
return self.filter(state=Transaction.SECURED)
def exclude_rejected(self):
return self.exclude(state=Transaction.REJECTED)
def amount(self):
return next(iter(self.aggregate(models.Sum('amount')).values())) or 0
def processing(self):
return self.filter(state__in=[Transaction.EXECUTED, Transaction.WAITTING_EXECUTION])
class Transaction(models.Model):
WAITTING_PROCESSING = 'WAITTING_PROCESSING' # CREATED
WAITTING_EXECUTION = 'WAITTING_EXECUTION' # PROCESSED
EXECUTED = 'EXECUTED'
SECURED = 'SECURED'
REJECTED = 'REJECTED'
STATES = (
(WAITTING_PROCESSING, _("Waitting processing")),
(WAITTING_EXECUTION, _("Waitting execution")),
(EXECUTED, _("Executed")),
(SECURED, _("Secured")),
(REJECTED, _("Rejected")),
)
STATE_HELP = {
WAITTING_PROCESSING: _("The transaction is created and requires processing by the "
"specific payment method."),
WAITTING_EXECUTION: _("The transaction is processed and its pending execution on "
"the related financial institution."),
EXECUTED: _("The transaction is executed on the financial institution."),
SECURED: _("The transaction ammount is secured."),
REJECTED: _("The transaction has failed and the ammount is lost, a new transaction "
"should be created for recharging."),
}
bill = models.ForeignKey('bills.bill', verbose_name=_("bill"),
related_name='transactions')
source = models.ForeignKey(PaymentSource, null=True, blank=True, on_delete=models.SET_NULL,
verbose_name=_("source"), related_name='transactions')
process = models.ForeignKey('payments.TransactionProcess', null=True, blank=True,
on_delete=models.SET_NULL, verbose_name=_("process"), related_name='transactions')
state = models.CharField(_("state"), max_length=32, choices=STATES,
default=WAITTING_PROCESSING)
amount = models.DecimalField(_("amount"), max_digits=12, decimal_places=2)
currency = models.CharField(max_length=10, default=settings.PAYMENT_CURRENCY)
created_at = models.DateTimeField(_("created"), auto_now_add=True)
modified_at = models.DateTimeField(_("modified"), auto_now=True)
objects = TransactionQuerySet.as_manager()
def __str__(self):
return "#%i" % self.id
@property
def account(self):
return self.bill.account
def clean(self):
if not self.pk:
amount = self.bill.transactions.exclude(state=self.REJECTED).amount()
if amount >= self.bill.total:
raise ValidationError(
_("Bill %(number)s already has valid transactions that cover bill total amount (%(amount)s).") % {
'number': self.bill.number,
'amount': amount,
}
)
def get_state_help(self):
if self.source:
return self.source.method_instance.state_help.get(self.state) or self.STATE_HELP.get(self.state)
return self.STATE_HELP.get(self.state)
def mark_as_processed(self):
self.state = self.WAITTING_EXECUTION
self.save(update_fields=('state', 'modified_at'))
def mark_as_executed(self):
self.state = self.EXECUTED
self.save(update_fields=('state', 'modified_at'))
def mark_as_secured(self):
self.state = self.SECURED
self.save(update_fields=('state', 'modified_at'))
def mark_as_rejected(self):
self.state = self.REJECTED
self.save(update_fields=('state', 'modified_at'))
class TransactionProcess(models.Model):
"""
Stores arbitrary data generated by payment methods while processing transactions
"""
CREATED = 'CREATED'
EXECUTED = 'EXECUTED'
ABORTED = 'ABORTED'
COMMITED = 'COMMITED'
STATES = (
(CREATED, _("Created")),
(EXECUTED, _("Executed")),
(ABORTED, _("Aborted")),
(COMMITED, _("Commited")),
)
data = JSONField(_("data"), blank=True)
file = PrivateFileField(_("file"), blank=True)
state = models.CharField(_("state"), max_length=16, choices=STATES, default=CREATED)
created_at = models.DateTimeField(_("created"), auto_now_add=True, db_index=True)
updated_at = models.DateTimeField(_("updated"), auto_now=True)
class Meta:
verbose_name_plural = _("Transaction processes")
def __str__(self):
return '#%i' % self.id
def mark_as_executed(self):
self.state = self.EXECUTED
for transaction in self.transactions.all():
transaction.mark_as_executed()
self.save(update_fields=('state', 'updated_at'))
def abort(self):
self.state = self.ABORTED
for transaction in self.transactions.all():
transaction.mark_as_rejected()
self.save(update_fields=('state', 'updated_at'))
def commit(self):
self.state = self.COMMITED
for transaction in self.transactions.processing():
transaction.mark_as_secured()
self.save(update_fields=('state', 'updated_at'))
| 35.85782 | 118 | 0.648163 | 7,171 | 0.947793 | 0 | 0 | 463 | 0.061195 | 0 | 0 | 1,381 | 0.182527 |
82f6b63b42d651692733eea279e00b78a2b3c076 | 1,264 | py | Python | src/visualisation/draw_map.py | Aluriak/24hducode2016 | 629a3225c5a426d3deb472d67f0e0091904d1547 | [
"Unlicense"
]
| null | null | null | src/visualisation/draw_map.py | Aluriak/24hducode2016 | 629a3225c5a426d3deb472d67f0e0091904d1547 | [
"Unlicense"
]
| null | null | null | src/visualisation/draw_map.py | Aluriak/24hducode2016 | 629a3225c5a426d3deb472d67f0e0091904d1547 | [
"Unlicense"
]
| null | null | null | # -*- coding: utf-8 -*-
##########
# IMPORT #
##########
import plotly.offline as py
import plotly.tools as tls
tls.set_credentials_file(username='ducktypers', api_key='fd81wnx3lh')
########
# MAIN #
########
def draw_map(lon, lat, text, titre='NO TITLE'):
"""
Take 3 list as input, and the title of the map.
"""
py.plot({ # use `py.iplot` inside the ipython notebook
"data": [{
'type': 'scattergeo',
'locationmode': 'france',
'lon': lon,
'lat': lat,
'text': text,
'mode': 'markers',
'marker': dict(
size = 8,
opacity = 0.8,
line = dict(width=1, color="rgb(102,102,102)")
)
}],
"layout": {
'title': str(titre),
'geo': dict(scope='europe',
projection=dict( type='albers europe' ),
showland = True,
landcolor = "rgb(200, 200, 200)",
subunitcolor = "rgb(217, 217, 217)",
countrycolor = "rgb(217, 217, 217)",
countrywidth = 1,
subunitwidth = 1)
}
}, filename='interactive_map', # name of the file as saved in your plotly account
#sharing='public'
)
| 25.28 | 90 | 0.476266 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 509 | 0.402372 |
82f77b388ad6652721586ee5ac9c7649d9fe0b48 | 89,313 | py | Python | config/custom_components/mbapi2020/proto/acp_pb2.py | mhaack/home-assistant-conf | 7cb856bee67906ba066adffe2151f6b50b6b73ce | [
"MIT"
]
| 28 | 2019-05-31T12:30:15.000Z | 2022-03-10T18:54:57.000Z | config/custom_components/mbapi2020/proto/acp_pb2.py | mhaack/home-assistant-conf | 7cb856bee67906ba066adffe2151f6b50b6b73ce | [
"MIT"
]
| 2 | 2020-04-15T20:02:42.000Z | 2021-03-09T19:45:21.000Z | config/custom_components/mbapi2020/proto/acp_pb2.py | mhaack/home-assistant-conf | 7cb856bee67906ba066adffe2151f6b50b6b73ce | [
"MIT"
]
| 2 | 2021-03-31T08:27:19.000Z | 2021-04-30T15:13:24.000Z | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: acp.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import custom_components.mbapi2020.proto.gogo_pb2 as gogo__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='acp.proto',
package='proto',
syntax='proto3',
serialized_options=b'\n\032com.daimler.mbcarkit.proto\220\001\001\320\341\036\001',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\tacp.proto\x12\x05proto\x1a\ngogo.proto\"\xa5\x02\n\x03VVA\"v\n\x0c\x43ommandState\x12\x19\n\x15UNKNOWN_COMMAND_STATE\x10\x00\x12\x0c\n\x07\x43REATED\x10\xf2\x07\x12\r\n\x08\x45NQUEUED\x10\xf8\x07\x12\x0f\n\nPROCESSING\x10\xf4\x07\x12\x0e\n\tSUSPENDED\x10\xf9\x07\x12\r\n\x08\x46INISHED\x10\xfa\x07\"\xa5\x01\n\x10\x43ommandCondition\x12\x1d\n\x19UNKNWON_COMMAND_CONDITION\x10\x00\x12\t\n\x04NONE\x10\xe8\x07\x12\r\n\x08\x41\x43\x43\x45PTED\x10\xe9\x07\x12\r\n\x08REJECTED\x10\xea\x07\x12\x0e\n\tTERMINATE\x10\xeb\x07\x12\x0c\n\x07SUCCESS\x10\xf3\x07\x12\x0b\n\x06\x46\x41ILED\x10\xf5\x07\x12\x10\n\x0bOVERWRITTEN\x10\xf6\x07\x12\x0c\n\x07TIMEOUT\x10\xf7\x07\"\x8f\x0b\n\nVehicleAPI\"~\n\x0c\x43ommandState\x12\x19\n\x15UNKNOWN_COMMAND_STATE\x10\x00\x12\x0e\n\nINITIATION\x10\x01\x12\x0c\n\x08\x45NQUEUED\x10\x02\x12\x0e\n\nPROCESSING\x10\x03\x12\x0b\n\x07WAITING\x10\x04\x12\x0c\n\x08\x46INISHED\x10\x05\x12\n\n\x06\x46\x41ILED\x10\x06\"S\n\x0f\x41ttributeStatus\x12\r\n\tVALUE_SET\x10\x00\x12\x11\n\rVALUE_NOT_SET\x10\x01\x12\x0b\n\x07INVALID\x10\x03\x12\x11\n\rNOT_AVAILABLE\x10\x04\"\xab\t\n\tQueueType\x12\x1b\n\x17UNKNOWNCOMMANDQUEUETYPE\x10\x00\x12\t\n\x05\x44OORS\x10\n\x12\x0b\n\x07\x41UXHEAT\x10\x0b\x12\x0b\n\x07PRECOND\x10\x0c\x12\r\n\tCHARGEOPT\x10\r\x12\x0f\n\x0bMAINTENANCE\x10\x0e\x12\x07\n\x03TCU\x10\x0f\x12\x08\n\x04\x46\x45\x45\x44\x10\x10\x12\x15\n\x11SERVICEACTIVATION\x10\x11\x12\x07\n\x03\x41TP\x10\x12\x12\x0e\n\nASSISTANCE\x10\x13\x12\x08\n\x04RACP\x10\x14\x12\x0f\n\x0bWEEKPROFILE\x10\x15\x12\x13\n\x0fREMOTEDIAGNOSIS\x10\x16\x12\x08\n\x04\x46LSH\x10\x17\x12\x0f\n\x0bTEMPERATURE\x10\x18\x12\x0c\n\x08TRIPCOMP\x10\x19\x12\n\n\x06\x45NGINE\x10\x1a\x12\x0e\n\nTHEFTALARM\x10\x1b\x12\n\n\x06WINDOW\x10\x1c\x12\x0c\n\x08HEADUNIT\x10\x1d\x12\n\n\x06MECALL\x10\x1f\x12\x0f\n\x0bIMMOBILIZER\x10 \x12\x10\n\x0cRENTALSIGNAL\x10!\x12\x07\n\x03\x42\x43\x46\x10\"\x12\x11\n\rPLUGANDCHARGE\x10#\x12\x14\n\x10\x43\x41RSHARINGMODULE\x10$\x12\x0b\n\x07\x42\x41TTERY\x10%\x12\x11\n\rONBOARDFENCES\x10&\x12\x0f\n\x0bSPEEDFENCES\x10\'\x12\x13\n\x0f\x43HARGINGTARIFFS\x10(\x12\r\n\tRTMCONFIG\x10)\x12\x17\n\x13MAINTENANCECOMPUTER\x10*\x12\x0b\n\x07MECALL2\x10+\x12\x19\n\x15\x41UTOMATEDVALETPARKING\x10,\x12\x11\n\rCHARGECONTROL\x10-\x12\x0e\n\nSPEEDALERT\x10.\x12\x1b\n\x17unknowncommandqueuetype\x10\x00\x12\t\n\x05\x64oors\x10\n\x12\x0b\n\x07\x61uxheat\x10\x0b\x12\x0b\n\x07precond\x10\x0c\x12\r\n\tchargeopt\x10\r\x12\x0f\n\x0bmaintenance\x10\x0e\x12\x07\n\x03tcu\x10\x0f\x12\x08\n\x04\x66\x65\x65\x64\x10\x10\x12\x15\n\x11serviceactivation\x10\x11\x12\x07\n\x03\x61tp\x10\x12\x12\x0e\n\nassistance\x10\x13\x12\x08\n\x04racp\x10\x14\x12\x0f\n\x0bweekprofile\x10\x15\x12\x13\n\x0fremotediagnosis\x10\x16\x12\x08\n\x04\x66lsh\x10\x17\x12\x0f\n\x0btemperature\x10\x18\x12\x0c\n\x08tripcomp\x10\x19\x12\n\n\x06\x65ngine\x10\x1a\x12\x0e\n\ntheftalarm\x10\x1b\x12\n\n\x06window\x10\x1c\x12\x0c\n\x08headunit\x10\x1d\x12\n\n\x06mecall\x10\x1f\x12\x0f\n\x0bimmobilizer\x10 \x12\x10\n\x0crentalsignal\x10!\x12\x07\n\x03\x62\x63\x66\x10\"\x12\x11\n\rplugandcharge\x10#\x12\x14\n\x10\x63\x61rsharingmodule\x10$\x12\x0b\n\x07\x62\x61ttery\x10%\x12\x11\n\ronboardfences\x10&\x12\x0f\n\x0bspeedfences\x10\'\x12\x13\n\x0f\x63hargingtariffs\x10(\x12\r\n\trtmconfig\x10)\x12\x17\n\x13maintenancecomputer\x10*\x12\x0b\n\x07mecall2\x10+\x12\x19\n\x15\x61utomatedvaletparking\x10,\x12\x11\n\rchargecontrol\x10-\x12\x0e\n\nspeedalert\x10.\x1a\x02\x10\x01\"\xa9\x31\n\x03\x41\x43P\"\xa1\x31\n\x0b\x43ommandType\x12\x16\n\x12UNKNOWNCOMMANDTYPE\x10\x00\x12\r\n\tDOORSLOCK\x10\x64\x12\x0f\n\x0b\x44OORSUNLOCK\x10n\x12\x0f\n\x0bTRUNKUNLOCK\x10s\x12\x12\n\x0e\x46UELFLAPUNLOCK\x10t\x12\x14\n\x10\x43HARGEFLAPUNLOCK\x10u\x12\x17\n\x13\x43HARGECOUPLERUNLOCK\x10v\x12\x16\n\x12\x44OORSPREPARERENTAL\x10x\x12\x17\n\x12\x44OORSSECUREVEHICLE\x10\x82\x01\x12\x11\n\x0c\x41UXHEATSTART\x10\xac\x02\x12\x10\n\x0b\x41UXHEATSTOP\x10\xb6\x02\x12\x15\n\x10\x41UXHEATCONFIGURE\x10\xc0\x02\x12\x19\n\x14TEMPERATURECONFIGURE\x10\xde\x02\x12\x19\n\x14WEEKPROFILECONFIGURE\x10\xe8\x02\x12\x1b\n\x16WEEKPROFILEV2CONFIGURE\x10\xf2\x02\x12\x11\n\x0cPRECONDSTART\x10\x90\x03\x12\x10\n\x0bPRECONDSTOP\x10\x9a\x03\x12\x15\n\x10PRECONDCONFIGURE\x10\xa4\x03\x12\x1a\n\x15PRECONDCONFIGURESEATS\x10\xa9\x03\x12\x17\n\x12\x43HARGEOPTCONFIGURE\x10\xae\x03\x12\x13\n\x0e\x43HARGEOPTSTART\x10\xb8\x03\x12\x12\n\rCHARGEOPTSTOP\x10\xc2\x03\x12\x0c\n\x07\x46\x45\x45\x44POI\x10\xf4\x03\x12\x11\n\x0c\x46\x45\x45\x44\x46REETEXT\x10\xfe\x03\x12\x10\n\x0b\x45NGINESTART\x10\xa6\x04\x12\x0f\n\nENGINESTOP\x10\xb0\x04\x12\x13\n\x0e\x45NGINEAVPSTART\x10\xba\x04\x12\x0e\n\tTCUWAKEUP\x10\xd8\x04\x12\x10\n\x0bTCUSWUPDATE\x10\xe2\x04\x12\x10\n\x0bTCURCSRESET\x10\xec\x04\x12\x15\n\x10TCUINTERROGATION\x10\xf6\x04\x12\x14\n\x0fSPEEDALERTSTART\x10\xc6\x05\x12\x13\n\x0eSPEEDALERTSTOP\x10\xd0\x05\x12\x0e\n\tFLSHSTART\x10\xee\x05\x12\r\n\x08\x46LSHSTOP\x10\xf8\x05\x12\x10\n\x0bSIGPOSSTART\x10\x82\x06\x12\x16\n\x11\x43ONTRACTCONFIGURE\x10\xa0\x06\x12\x13\n\x0e\x43ONTRACTREMOVE\x10\xaa\x06\x12\x12\n\rROOTCONFIGURE\x10\xb4\x06\x12\x0f\n\nROOTREMOVE\x10\xbe\x06\x12\r\n\x08TRIPCOMP\x10\xd2\x06\x12\x19\n\x14MAINTENANCECONFIGURE\x10\xa2\x07\x12\x1e\n\x19MAINTENANCECOMPUTEROFFSET\x10\xa3\x07\x12\x15\n\x10SHORTTESTEXECUTE\x10\xa7\x07\x12\x1f\n\x1aSERVICEACTIVATIONCONFIGURE\x10\xac\x07\x12\"\n\x1d\x44\x43\x32SERVICEACTIVATIONCONFIGURE\x10\xb1\x07\x12\x13\n\x0e\x44\x43\x32RAWDOWNLOAD\x10\xb6\x07\x12\x1d\n\x18\x41PPLICATIONCONFIGURATION\x10\xbb\x07\x12\x15\n\x10\x44\x43\x32STARTTRACKING\x10\xc0\x07\x12\x10\n\x0b\x41TPSEQUENCE\x10\xde\x07\x12\x1d\n\x18THEFTALARMTOGGLEINTERIOR\x10\xe8\x07\x12\x18\n\x13THEFTALARMTOGGLETOW\x10\xf2\x07\x12 \n\x1bTHEFTALARMSELECTINTERIORTOW\x10\xfc\x07\x12\"\n\x1dTHEFTALARMDESELECTINTERIORTOW\x10\x86\x08\x12\x13\n\x0eTHEFTALARMSTOP\x10\x90\x08\x12\x0f\n\nWINDOWOPEN\x10\xcc\x08\x12\x10\n\x0bWINDOWCLOSE\x10\xd6\x08\x12\x14\n\x0fWINDOWVENTILATE\x10\xe0\x08\x12\x0f\n\nWINDOWMOVE\x10\xe1\x08\x12\r\n\x08ROOFOPEN\x10\xea\x08\x12\x0e\n\tROOFCLOSE\x10\xf4\x08\x12\r\n\x08ROOFLIFT\x10\xfe\x08\x12\r\n\x08ROOFMOVE\x10\xff\x08\x12\x12\n\rBATTERYMAXSOC\x10\xd0\x0f\x12\x19\n\x14\x42\x41TTERYCHARGEPROGRAM\x10\xda\x0f\x12\x1b\n\x16\x43HARGEPROGRAMCONFIGURE\x10\xe4\x0f\x12\x18\n\x13ONBOARDFENCESCREATE\x10\xb4\x10\x12\x18\n\x13ONBOARDFENCESUPDATE\x10\xbe\x10\x12\x18\n\x13ONBOARDFENCESDELETE\x10\xc8\x10\x12\x16\n\x11SPEEDFENCESCREATE\x10\x98\x11\x12\x16\n\x11SPEEDFENCESUPDATE\x10\xa2\x11\x12\x16\n\x11SPEEDFENCESDELETE\x10\xac\x11\x12\x1a\n\x15\x43HARGINGTARIFFSCREATE\x10\xfc\x11\x12\x1a\n\x15\x43HARGINGTARIFFSUPDATE\x10\x86\x12\x12\x1a\n\x15\x43HARGINGTARIFFSDELETE\x10\x90\x12\x12\x14\n\x0fTHEFTALARMSTART\x10\xc4\x13\x12\x1d\n\x18THEFTALARMSELECTINTERIOR\x10\xce\x13\x12\x1f\n\x1aTHEFTALARMDESELECTINTERIOR\x10\xd8\x13\x12\x18\n\x13THEFTALARMSELECTTOW\x10\xe2\x13\x12\x1a\n\x15THEFTALARMDESELECTTOW\x10\xec\x13\x12$\n\x1fTHEFTALARMSELECTDAMAGEDETECTION\x10\xf6\x13\x12&\n!THEFTALARMDESELECTDAMAGEDETECTION\x10\x80\x14\x12%\n THEFTALARMCONFIRMDAMAGEDETECTION\x10\x8a\x14\x12\x11\n\x0cMECALL2START\x10\xa8\x14\x12\x1e\n\x19UDXTRIGGERSYNCHRONIZATION\x10\xb0\t\x12\x19\n\x14UDXACTIVEUSERPROFILE\x10\xba\t\x12\x15\n\x10UDXRESETUSERDATA\x10\xc4\t\x12\x12\n\rUSERPROFSYNCH\x10\xce\t\x12\x12\n\rUSERDATARESET\x10\xd8\t\x12\x17\n\x12PROFACTIVATIONSNAP\x10\xe2\t\x12\x19\n\x14PROFACTIVATIONDIRECT\x10\xe7\t\x12\x13\n\x0eSOFTWAREUPDATE\x10\xec\t\x12\x15\n\x10PUSHNOTIFICATION\x10\xf6\t\x12\x12\n\rMECALLCOMMAND\x10\x9e\n\x12\x14\n\x0fPRECONDSTARTRCS\x10\xf8\n\x12\x13\n\x0ePRECONDSTOPRCS\x10\x82\x0b\x12\x18\n\x13PRECONDCONFIGURERCS\x10\x8c\x0b\x12\x11\n\x0cTCUCONFIGURE\x10\x96\x0b\x12\x1c\n\x17\x45\x44ISONSERVICEACTIVATION\x10\x97\x0b\x12\x11\n\x0cTESTSEQUENCE\x10\x98\x0b\x12\x19\n\x14PRECONDCONFIGURERACP\x10\x99\x0b\x12\x1b\n\x16\x43HARGEOPTCONFIGURERACP\x10\x9a\x0b\x12\x18\n\x13TARIFFTABLEDOWNLOAD\x10\x9b\x0b\x12\x15\n\x10PRECONDSTARTRACP\x10\x9c\x0b\x12\x14\n\x0fPRECONDSTOPRACP\x10\x9d\x0b\x12\x1a\n\x15ROOTCERTIFICATEREMOVE\x10\x9e\x0b\x12\x19\n\x14ONREQUESTPROBEUPLOAD\x10\x9f\x0b\x12\x1c\n\x17ROOTCERTIFICATEDOWNLOAD\x10\xa0\x0b\x12\x1e\n\x19\x43ONTRACTCERTIFICATEREMOVE\x10\xa1\x0b\x12 \n\x1b\x43ONTRACTCERTIFICATEDOWNLOAD\x10\xa2\x0b\x12\x1d\n\x18PROBECONFIGURATIONUPDATE\x10\xa3\x0b\x12\x13\n\x0eRDIAGDELETEECU\x10\xdc\x0b\x12\x16\n\x11RDIAGSTATUSREPORT\x10\xdd\x0b\x12\x13\n\x0eRDIAGEXECUTION\x10\xde\x0b\x12\x19\n\x14IMMOBILIZERCHALLENGE\x10\xc0\x0c\x12\x1d\n\x18IMMOBILIZERSEARCHKEYLINE\x10\xca\x0c\x12\x1e\n\x19IMMOBILIZERRELEASEKEYLINE\x10\xd4\x0c\x12\x1b\n\x16IMMOBILIZERLOCKKEYLINE\x10\xde\x0c\x12\x1b\n\x16IMMOBILIZERLOCKVEHICLE\x10\xdf\x0c\x12\x1e\n\x19IMMOBILIZERRELEASEVEHICLE\x10\xd5\x0c\x12\x14\n\x0fSETRENTALSIGNAL\x10\xa4\r\x12\x19\n\x14\x42LACKCHANNELDOWNLOAD\x10\x88\x0e\x12\x17\n\x12\x42LACKCHANNELUPLOAD\x10\x92\x0e\x12\x11\n\x0c\x43ONFIGURECSM\x10\xec\x0e\x12\x16\n\x11UPDATEVEHICLEINFO\x10\xed\x0e\x12\x16\n\x11RELAYMESSAGETOCSM\x10\xee\x0e\x12\x1c\n\x17RELAYRENTALREQUESTTOCSB\x10\xef\x0e\x12\x16\n\x11RTMDOWNLOADCONFIG\x10\xe0\x12\x12\x12\n\rRTMREADCONFIG\x10\xea\x12\x12\x10\n\x0b\x41VPACTIVATE\x10\x8c\x15\x12\x1b\n\x16\x43HARGECONTROLCONFIGURE\x10\xf0\x15\x12\x16\n\x12unknownCommandType\x10\x00\x12\r\n\tdoorsLock\x10\x64\x12\x0f\n\x0b\x64oorsUnlock\x10n\x12\x0f\n\x0btrunkUnlock\x10s\x12\x12\n\x0e\x66uelflapUnlock\x10t\x12\x14\n\x10\x63hargeflapUnlock\x10u\x12\x17\n\x13\x63hargecouplerUnlock\x10v\x12\x16\n\x12\x64oorsPrepareRental\x10x\x12\x17\n\x12\x64oorsSecureVehicle\x10\x82\x01\x12\x11\n\x0c\x61uxheatStart\x10\xac\x02\x12\x10\n\x0b\x61uxheatStop\x10\xb6\x02\x12\x15\n\x10\x61uxheatConfigure\x10\xc0\x02\x12\x19\n\x14temperatureConfigure\x10\xde\x02\x12\x19\n\x14weekprofileConfigure\x10\xe8\x02\x12\x1b\n\x16weekprofileV2Configure\x10\xf2\x02\x12\x11\n\x0cprecondStart\x10\x90\x03\x12\x10\n\x0bprecondStop\x10\x9a\x03\x12\x15\n\x10precondConfigure\x10\xa4\x03\x12\x1a\n\x15precondConfigureSeats\x10\xa9\x03\x12\x17\n\x12\x63hargeoptConfigure\x10\xae\x03\x12\x13\n\x0e\x63hargeoptStart\x10\xb8\x03\x12\x12\n\rchargeoptStop\x10\xc2\x03\x12\x0c\n\x07\x66\x65\x65\x64Poi\x10\xf4\x03\x12\x11\n\x0c\x66\x65\x65\x64\x46reetext\x10\xfe\x03\x12\x10\n\x0b\x65ngineStart\x10\xa6\x04\x12\x0f\n\nengineStop\x10\xb0\x04\x12\x13\n\x0e\x65ngineAvpstart\x10\xba\x04\x12\x0e\n\ttcuWakeup\x10\xd8\x04\x12\x10\n\x0btcuSwUpdate\x10\xe2\x04\x12\x10\n\x0btcuRcsReset\x10\xec\x04\x12\x15\n\x10tcuInterrogation\x10\xf6\x04\x12\x14\n\x0fspeedalertStart\x10\xc6\x05\x12\x13\n\x0espeedalertStop\x10\xd0\x05\x12\x0e\n\tflshStart\x10\xee\x05\x12\r\n\x08\x66lshStop\x10\xf8\x05\x12\x10\n\x0bsigposStart\x10\x82\x06\x12\x16\n\x11\x63ontractConfigure\x10\xa0\x06\x12\x13\n\x0e\x63ontractRemove\x10\xaa\x06\x12\x12\n\rrootConfigure\x10\xb4\x06\x12\x0f\n\nrootRemove\x10\xbe\x06\x12\r\n\x08tripcomp\x10\xd2\x06\x12\x19\n\x14maintenanceConfigure\x10\xa2\x07\x12\x1e\n\x19maintenanceComputerOffset\x10\xa3\x07\x12\x15\n\x10shorttestExecute\x10\xa7\x07\x12\x1f\n\x1aserviceactivationConfigure\x10\xac\x07\x12\"\n\x1d\x64\x63\x32ServiceactivationConfigure\x10\xb1\x07\x12\x13\n\x0e\x64\x63\x32RawDownload\x10\xb6\x07\x12\x1d\n\x18\x61pplicationConfiguration\x10\xbb\x07\x12\x15\n\x10\x64\x63\x32StartTracking\x10\xc0\x07\x12\x10\n\x0b\x61tpSequence\x10\xde\x07\x12\x1d\n\x18theftalarmToggleInterior\x10\xe8\x07\x12\x18\n\x13theftalarmToggleTow\x10\xf2\x07\x12 \n\x1btheftalarmSelectInteriorTow\x10\xfc\x07\x12\"\n\x1dtheftalarmDeselectInteriorTow\x10\x86\x08\x12\x13\n\x0etheftalarmStop\x10\x90\x08\x12\x0f\n\nwindowOpen\x10\xcc\x08\x12\x10\n\x0bwindowClose\x10\xd6\x08\x12\x14\n\x0fwindowVentilate\x10\xe0\x08\x12\x0f\n\nwindowMove\x10\xe1\x08\x12\r\n\x08roofOpen\x10\xea\x08\x12\x0e\n\troofClose\x10\xf4\x08\x12\r\n\x08roofLift\x10\xfe\x08\x12\r\n\x08roofMove\x10\xff\x08\x12\x12\n\rbatteryMaxsoc\x10\xd0\x0f\x12\x19\n\x14\x62\x61tteryChargeprogram\x10\xda\x0f\x12\x1b\n\x16\x63hargeprogramconfigure\x10\xe4\x0f\x12\x18\n\x13onboardfencesCreate\x10\xb4\x10\x12\x18\n\x13onboardfencesUpdate\x10\xbe\x10\x12\x18\n\x13onboardfencesDelete\x10\xc8\x10\x12\x16\n\x11speedfencesCreate\x10\x98\x11\x12\x16\n\x11speedfencesUpdate\x10\xa2\x11\x12\x16\n\x11speedfencesDelete\x10\xac\x11\x12\x1a\n\x15\x63hargingtariffsCreate\x10\xfc\x11\x12\x1a\n\x15\x63hargingtariffsUpdate\x10\x86\x12\x12\x1a\n\x15\x63hargingtariffsDelete\x10\x90\x12\x12\x14\n\x0ftheftalarmstart\x10\xc4\x13\x12\x1d\n\x18theftalarmselectinterior\x10\xce\x13\x12\x1f\n\x1atheftalarmdeselectinterior\x10\xd8\x13\x12\x18\n\x13theftalarmselecttow\x10\xe2\x13\x12\x1a\n\x15theftalarmdeselecttow\x10\xec\x13\x12$\n\x1ftheftalarmselectdamagedetection\x10\xf6\x13\x12&\n!theftalarmdeselectdamagedetection\x10\x80\x14\x12%\n theftalarmconfirmdamagedetection\x10\x8a\x14\x12\x11\n\x0cmecall2start\x10\xa8\x14\x12\x1e\n\x19udxTriggerSynchronization\x10\xb0\t\x12\x19\n\x14udxActiveUserProfile\x10\xba\t\x12\x15\n\x10udxResetUserData\x10\xc4\t\x12\x12\n\ruserProfSynch\x10\xce\t\x12\x12\n\ruserDataReset\x10\xd8\t\x12\x17\n\x12profActivationSnap\x10\xe2\t\x12\x19\n\x14profActivationDirect\x10\xe7\t\x12\x13\n\x0esoftwareUpdate\x10\xec\t\x12\x15\n\x10pushNotification\x10\xf6\t\x12\x12\n\rmecallcommand\x10\x9e\n\x12\x14\n\x0fprecondStartRcs\x10\xf8\n\x12\x13\n\x0eprecondStopRcs\x10\x82\x0b\x12\x18\n\x13precondConfigureRcs\x10\x8c\x0b\x12\x11\n\x0ctcuConfigure\x10\x96\x0b\x12\x1c\n\x17\x65\x64isonServiceActivation\x10\x97\x0b\x12\x11\n\x0ctestSequence\x10\x98\x0b\x12\x19\n\x14precondConfigureRacp\x10\x99\x0b\x12\x1b\n\x16\x63hargeoptConfigureRacp\x10\x9a\x0b\x12\x18\n\x13tariffTableDownload\x10\x9b\x0b\x12\x15\n\x10precondStartRacp\x10\x9c\x0b\x12\x14\n\x0fprecondStopRacp\x10\x9d\x0b\x12\x1a\n\x15rootCertificateRemove\x10\x9e\x0b\x12\x19\n\x14onRequestProbeUpload\x10\x9f\x0b\x12\x1c\n\x17rootCertificateDownload\x10\xa0\x0b\x12\x1e\n\x19\x63ontractCertificateRemove\x10\xa1\x0b\x12 \n\x1b\x63ontractCertificateDownload\x10\xa2\x0b\x12\x1d\n\x18probeConfigurationUpdate\x10\xa3\x0b\x12\x13\n\x0erdiagDeleteEcu\x10\xdc\x0b\x12\x16\n\x11rdiagStatusReport\x10\xdd\x0b\x12\x13\n\x0erdiagExecution\x10\xde\x0b\x12\x19\n\x14immobilizerChallenge\x10\xc0\x0c\x12\x1d\n\x18immobilizerSearchKeyline\x10\xca\x0c\x12\x1e\n\x19immobilizerReleaseKeyline\x10\xd4\x0c\x12\x1b\n\x16immobilizerLockKeyline\x10\xde\x0c\x12\x1b\n\x16immobilizerLockVehicle\x10\xdf\x0c\x12\x1e\n\x19immobilizerReleaseVehicle\x10\xd5\x0c\x12\x14\n\x0fsetRentalSignal\x10\xa4\r\x12\x19\n\x14\x62lackchannelDownload\x10\x88\x0e\x12\x17\n\x12\x62lackchannelUpload\x10\x92\x0e\x12\x11\n\x0c\x63onfigurecsm\x10\xec\x0e\x12\x16\n\x11updatevehicleinfo\x10\xed\x0e\x12\x16\n\x11relaymessagetocsm\x10\xee\x0e\x12\x1c\n\x17relayrentalrequesttocsb\x10\xef\x0e\x12\x16\n\x11rtmDownloadConfig\x10\xe0\x12\x12\x12\n\rrtmReadConfig\x10\xea\x12\x12\x10\n\x0b\x61vpActivate\x10\x8c\x15\x12\x1b\n\x16\x63hargecontrolconfigure\x10\xf0\x15\x1a\x02\x10\x01\x42#\n\x1a\x63om.daimler.mbcarkit.proto\x90\x01\x01\xd0\xe1\x1e\x01\x62\x06proto3'
,
dependencies=[gogo__pb2.DESCRIPTOR,])
_VVA_COMMANDSTATE = _descriptor.EnumDescriptor(
name='CommandState',
full_name='proto.VVA.CommandState',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_COMMAND_STATE', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CREATED', index=1, number=1010,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENQUEUED', index=2, number=1016,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PROCESSING', index=3, number=1012,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SUSPENDED', index=4, number=1017,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FINISHED', index=5, number=1018,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=40,
serialized_end=158,
)
_sym_db.RegisterEnumDescriptor(_VVA_COMMANDSTATE)
_VVA_COMMANDCONDITION = _descriptor.EnumDescriptor(
name='CommandCondition',
full_name='proto.VVA.CommandCondition',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNWON_COMMAND_CONDITION', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NONE', index=1, number=1000,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ACCEPTED', index=2, number=1001,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='REJECTED', index=3, number=1002,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TERMINATE', index=4, number=1003,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SUCCESS', index=5, number=1011,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FAILED', index=6, number=1013,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='OVERWRITTEN', index=7, number=1014,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TIMEOUT', index=8, number=1015,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=161,
serialized_end=326,
)
_sym_db.RegisterEnumDescriptor(_VVA_COMMANDCONDITION)
_VEHICLEAPI_COMMANDSTATE = _descriptor.EnumDescriptor(
name='CommandState',
full_name='proto.VehicleAPI.CommandState',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_COMMAND_STATE', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='INITIATION', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENQUEUED', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PROCESSING', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WAITING', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FINISHED', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FAILED', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=343,
serialized_end=469,
)
_sym_db.RegisterEnumDescriptor(_VEHICLEAPI_COMMANDSTATE)
_VEHICLEAPI_ATTRIBUTESTATUS = _descriptor.EnumDescriptor(
name='AttributeStatus',
full_name='proto.VehicleAPI.AttributeStatus',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='VALUE_SET', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='VALUE_NOT_SET', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='INVALID', index=2, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NOT_AVAILABLE', index=3, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=471,
serialized_end=554,
)
_sym_db.RegisterEnumDescriptor(_VEHICLEAPI_ATTRIBUTESTATUS)
_VEHICLEAPI_QUEUETYPE = _descriptor.EnumDescriptor(
name='QueueType',
full_name='proto.VehicleAPI.QueueType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWNCOMMANDQUEUETYPE', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DOORS', index=1, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AUXHEAT', index=2, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PRECOND', index=3, number=12,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CHARGEOPT', index=4, number=13,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MAINTENANCE', index=5, number=14,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TCU', index=6, number=15,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FEED', index=7, number=16,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SERVICEACTIVATION', index=8, number=17,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ATP', index=9, number=18,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ASSISTANCE', index=10, number=19,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RACP', index=11, number=20,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WEEKPROFILE', index=12, number=21,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='REMOTEDIAGNOSIS', index=13, number=22,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FLSH', index=14, number=23,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TEMPERATURE', index=15, number=24,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TRIPCOMP', index=16, number=25,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENGINE', index=17, number=26,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THEFTALARM', index=18, number=27,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WINDOW', index=19, number=28,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='HEADUNIT', index=20, number=29,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MECALL', index=21, number=31,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='IMMOBILIZER', index=22, number=32,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RENTALSIGNAL', index=23, number=33,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BCF', index=24, number=34,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PLUGANDCHARGE', index=25, number=35,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CARSHARINGMODULE', index=26, number=36,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BATTERY', index=27, number=37,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ONBOARDFENCES', index=28, number=38,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SPEEDFENCES', index=29, number=39,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CHARGINGTARIFFS', index=30, number=40,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RTMCONFIG', index=31, number=41,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MAINTENANCECOMPUTER', index=32, number=42,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MECALL2', index=33, number=43,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AUTOMATEDVALETPARKING', index=34, number=44,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CHARGECONTROL', index=35, number=45,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SPEEDALERT', index=36, number=46,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='unknowncommandqueuetype', index=37, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='doors', index=38, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='auxheat', index=39, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='precond', index=40, number=12,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='chargeopt', index=41, number=13,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='maintenance', index=42, number=14,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='tcu', index=43, number=15,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='feed', index=44, number=16,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='serviceactivation', index=45, number=17,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='atp', index=46, number=18,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='assistance', index=47, number=19,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='racp', index=48, number=20,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='weekprofile', index=49, number=21,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='remotediagnosis', index=50, number=22,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='flsh', index=51, number=23,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='temperature', index=52, number=24,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='tripcomp', index=53, number=25,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='engine', index=54, number=26,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='theftalarm', index=55, number=27,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='window', index=56, number=28,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='headunit', index=57, number=29,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='mecall', index=58, number=31,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='immobilizer', index=59, number=32,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='rentalsignal', index=60, number=33,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='bcf', index=61, number=34,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='plugandcharge', index=62, number=35,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='carsharingmodule', index=63, number=36,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='battery', index=64, number=37,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='onboardfences', index=65, number=38,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='speedfences', index=66, number=39,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='chargingtariffs', index=67, number=40,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='rtmconfig', index=68, number=41,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='maintenancecomputer', index=69, number=42,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='mecall2', index=70, number=43,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='automatedvaletparking', index=71, number=44,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='chargecontrol', index=72, number=45,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='speedalert', index=73, number=46,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=b'\020\001',
serialized_start=557,
serialized_end=1752,
)
_sym_db.RegisterEnumDescriptor(_VEHICLEAPI_QUEUETYPE)
_ACP_COMMANDTYPE = _descriptor.EnumDescriptor(
name='CommandType',
full_name='proto.ACP.CommandType',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWNCOMMANDTYPE', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DOORSLOCK', index=1, number=100,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DOORSUNLOCK', index=2, number=110,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TRUNKUNLOCK', index=3, number=115,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FUELFLAPUNLOCK', index=4, number=116,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CHARGEFLAPUNLOCK', index=5, number=117,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CHARGECOUPLERUNLOCK', index=6, number=118,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DOORSPREPARERENTAL', index=7, number=120,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DOORSSECUREVEHICLE', index=8, number=130,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AUXHEATSTART', index=9, number=300,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AUXHEATSTOP', index=10, number=310,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AUXHEATCONFIGURE', index=11, number=320,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TEMPERATURECONFIGURE', index=12, number=350,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WEEKPROFILECONFIGURE', index=13, number=360,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WEEKPROFILEV2CONFIGURE', index=14, number=370,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PRECONDSTART', index=15, number=400,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PRECONDSTOP', index=16, number=410,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PRECONDCONFIGURE', index=17, number=420,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PRECONDCONFIGURESEATS', index=18, number=425,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CHARGEOPTCONFIGURE', index=19, number=430,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CHARGEOPTSTART', index=20, number=440,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CHARGEOPTSTOP', index=21, number=450,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FEEDPOI', index=22, number=500,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FEEDFREETEXT', index=23, number=510,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENGINESTART', index=24, number=550,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENGINESTOP', index=25, number=560,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ENGINEAVPSTART', index=26, number=570,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TCUWAKEUP', index=27, number=600,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TCUSWUPDATE', index=28, number=610,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TCURCSRESET', index=29, number=620,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TCUINTERROGATION', index=30, number=630,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SPEEDALERTSTART', index=31, number=710,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SPEEDALERTSTOP', index=32, number=720,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FLSHSTART', index=33, number=750,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FLSHSTOP', index=34, number=760,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SIGPOSSTART', index=35, number=770,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONTRACTCONFIGURE', index=36, number=800,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONTRACTREMOVE', index=37, number=810,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ROOTCONFIGURE', index=38, number=820,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ROOTREMOVE', index=39, number=830,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TRIPCOMP', index=40, number=850,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MAINTENANCECONFIGURE', index=41, number=930,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MAINTENANCECOMPUTEROFFSET', index=42, number=931,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SHORTTESTEXECUTE', index=43, number=935,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SERVICEACTIVATIONCONFIGURE', index=44, number=940,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DC2SERVICEACTIVATIONCONFIGURE', index=45, number=945,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DC2RAWDOWNLOAD', index=46, number=950,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='APPLICATIONCONFIGURATION', index=47, number=955,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DC2STARTTRACKING', index=48, number=960,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ATPSEQUENCE', index=49, number=990,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THEFTALARMTOGGLEINTERIOR', index=50, number=1000,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THEFTALARMTOGGLETOW', index=51, number=1010,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THEFTALARMSELECTINTERIORTOW', index=52, number=1020,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THEFTALARMDESELECTINTERIORTOW', index=53, number=1030,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THEFTALARMSTOP', index=54, number=1040,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WINDOWOPEN', index=55, number=1100,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WINDOWCLOSE', index=56, number=1110,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WINDOWVENTILATE', index=57, number=1120,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='WINDOWMOVE', index=58, number=1121,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ROOFOPEN', index=59, number=1130,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ROOFCLOSE', index=60, number=1140,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ROOFLIFT', index=61, number=1150,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ROOFMOVE', index=62, number=1151,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BATTERYMAXSOC', index=63, number=2000,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BATTERYCHARGEPROGRAM', index=64, number=2010,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CHARGEPROGRAMCONFIGURE', index=65, number=2020,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ONBOARDFENCESCREATE', index=66, number=2100,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ONBOARDFENCESUPDATE', index=67, number=2110,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ONBOARDFENCESDELETE', index=68, number=2120,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SPEEDFENCESCREATE', index=69, number=2200,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SPEEDFENCESUPDATE', index=70, number=2210,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SPEEDFENCESDELETE', index=71, number=2220,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CHARGINGTARIFFSCREATE', index=72, number=2300,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CHARGINGTARIFFSUPDATE', index=73, number=2310,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CHARGINGTARIFFSDELETE', index=74, number=2320,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THEFTALARMSTART', index=75, number=2500,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THEFTALARMSELECTINTERIOR', index=76, number=2510,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THEFTALARMDESELECTINTERIOR', index=77, number=2520,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THEFTALARMSELECTTOW', index=78, number=2530,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THEFTALARMDESELECTTOW', index=79, number=2540,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THEFTALARMSELECTDAMAGEDETECTION', index=80, number=2550,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THEFTALARMDESELECTDAMAGEDETECTION', index=81, number=2560,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THEFTALARMCONFIRMDAMAGEDETECTION', index=82, number=2570,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MECALL2START', index=83, number=2600,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='UDXTRIGGERSYNCHRONIZATION', index=84, number=1200,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='UDXACTIVEUSERPROFILE', index=85, number=1210,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='UDXRESETUSERDATA', index=86, number=1220,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='USERPROFSYNCH', index=87, number=1230,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='USERDATARESET', index=88, number=1240,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PROFACTIVATIONSNAP', index=89, number=1250,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PROFACTIVATIONDIRECT', index=90, number=1255,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SOFTWAREUPDATE', index=91, number=1260,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PUSHNOTIFICATION', index=92, number=1270,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MECALLCOMMAND', index=93, number=1310,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PRECONDSTARTRCS', index=94, number=1400,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PRECONDSTOPRCS', index=95, number=1410,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PRECONDCONFIGURERCS', index=96, number=1420,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TCUCONFIGURE', index=97, number=1430,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EDISONSERVICEACTIVATION', index=98, number=1431,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TESTSEQUENCE', index=99, number=1432,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PRECONDCONFIGURERACP', index=100, number=1433,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CHARGEOPTCONFIGURERACP', index=101, number=1434,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='TARIFFTABLEDOWNLOAD', index=102, number=1435,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PRECONDSTARTRACP', index=103, number=1436,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PRECONDSTOPRACP', index=104, number=1437,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ROOTCERTIFICATEREMOVE', index=105, number=1438,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ONREQUESTPROBEUPLOAD', index=106, number=1439,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ROOTCERTIFICATEDOWNLOAD', index=107, number=1440,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONTRACTCERTIFICATEREMOVE', index=108, number=1441,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONTRACTCERTIFICATEDOWNLOAD', index=109, number=1442,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PROBECONFIGURATIONUPDATE', index=110, number=1443,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RDIAGDELETEECU', index=111, number=1500,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RDIAGSTATUSREPORT', index=112, number=1501,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RDIAGEXECUTION', index=113, number=1502,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='IMMOBILIZERCHALLENGE', index=114, number=1600,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='IMMOBILIZERSEARCHKEYLINE', index=115, number=1610,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='IMMOBILIZERRELEASEKEYLINE', index=116, number=1620,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='IMMOBILIZERLOCKKEYLINE', index=117, number=1630,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='IMMOBILIZERLOCKVEHICLE', index=118, number=1631,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='IMMOBILIZERRELEASEVEHICLE', index=119, number=1621,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SETRENTALSIGNAL', index=120, number=1700,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BLACKCHANNELDOWNLOAD', index=121, number=1800,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BLACKCHANNELUPLOAD', index=122, number=1810,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CONFIGURECSM', index=123, number=1900,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='UPDATEVEHICLEINFO', index=124, number=1901,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RELAYMESSAGETOCSM', index=125, number=1902,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RELAYRENTALREQUESTTOCSB', index=126, number=1903,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RTMDOWNLOADCONFIG', index=127, number=2400,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='RTMREADCONFIG', index=128, number=2410,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AVPACTIVATE', index=129, number=2700,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='CHARGECONTROLCONFIGURE', index=130, number=2800,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='unknownCommandType', index=131, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='doorsLock', index=132, number=100,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='doorsUnlock', index=133, number=110,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='trunkUnlock', index=134, number=115,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='fuelflapUnlock', index=135, number=116,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='chargeflapUnlock', index=136, number=117,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='chargecouplerUnlock', index=137, number=118,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='doorsPrepareRental', index=138, number=120,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='doorsSecureVehicle', index=139, number=130,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='auxheatStart', index=140, number=300,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='auxheatStop', index=141, number=310,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='auxheatConfigure', index=142, number=320,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='temperatureConfigure', index=143, number=350,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='weekprofileConfigure', index=144, number=360,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='weekprofileV2Configure', index=145, number=370,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='precondStart', index=146, number=400,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='precondStop', index=147, number=410,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='precondConfigure', index=148, number=420,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='precondConfigureSeats', index=149, number=425,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='chargeoptConfigure', index=150, number=430,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='chargeoptStart', index=151, number=440,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='chargeoptStop', index=152, number=450,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='feedPoi', index=153, number=500,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='feedFreetext', index=154, number=510,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='engineStart', index=155, number=550,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='engineStop', index=156, number=560,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='engineAvpstart', index=157, number=570,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='tcuWakeup', index=158, number=600,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='tcuSwUpdate', index=159, number=610,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='tcuRcsReset', index=160, number=620,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='tcuInterrogation', index=161, number=630,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='speedalertStart', index=162, number=710,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='speedalertStop', index=163, number=720,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='flshStart', index=164, number=750,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='flshStop', index=165, number=760,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='sigposStart', index=166, number=770,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='contractConfigure', index=167, number=800,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='contractRemove', index=168, number=810,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='rootConfigure', index=169, number=820,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='rootRemove', index=170, number=830,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='tripcomp', index=171, number=850,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='maintenanceConfigure', index=172, number=930,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='maintenanceComputerOffset', index=173, number=931,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='shorttestExecute', index=174, number=935,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='serviceactivationConfigure', index=175, number=940,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='dc2ServiceactivationConfigure', index=176, number=945,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='dc2RawDownload', index=177, number=950,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='applicationConfiguration', index=178, number=955,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='dc2StartTracking', index=179, number=960,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='atpSequence', index=180, number=990,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='theftalarmToggleInterior', index=181, number=1000,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='theftalarmToggleTow', index=182, number=1010,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='theftalarmSelectInteriorTow', index=183, number=1020,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='theftalarmDeselectInteriorTow', index=184, number=1030,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='theftalarmStop', index=185, number=1040,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='windowOpen', index=186, number=1100,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='windowClose', index=187, number=1110,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='windowVentilate', index=188, number=1120,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='windowMove', index=189, number=1121,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='roofOpen', index=190, number=1130,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='roofClose', index=191, number=1140,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='roofLift', index=192, number=1150,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='roofMove', index=193, number=1151,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='batteryMaxsoc', index=194, number=2000,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='batteryChargeprogram', index=195, number=2010,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='chargeprogramconfigure', index=196, number=2020,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='onboardfencesCreate', index=197, number=2100,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='onboardfencesUpdate', index=198, number=2110,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='onboardfencesDelete', index=199, number=2120,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='speedfencesCreate', index=200, number=2200,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='speedfencesUpdate', index=201, number=2210,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='speedfencesDelete', index=202, number=2220,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='chargingtariffsCreate', index=203, number=2300,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='chargingtariffsUpdate', index=204, number=2310,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='chargingtariffsDelete', index=205, number=2320,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='theftalarmstart', index=206, number=2500,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='theftalarmselectinterior', index=207, number=2510,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='theftalarmdeselectinterior', index=208, number=2520,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='theftalarmselecttow', index=209, number=2530,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='theftalarmdeselecttow', index=210, number=2540,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='theftalarmselectdamagedetection', index=211, number=2550,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='theftalarmdeselectdamagedetection', index=212, number=2560,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='theftalarmconfirmdamagedetection', index=213, number=2570,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='mecall2start', index=214, number=2600,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='udxTriggerSynchronization', index=215, number=1200,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='udxActiveUserProfile', index=216, number=1210,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='udxResetUserData', index=217, number=1220,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='userProfSynch', index=218, number=1230,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='userDataReset', index=219, number=1240,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='profActivationSnap', index=220, number=1250,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='profActivationDirect', index=221, number=1255,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='softwareUpdate', index=222, number=1260,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='pushNotification', index=223, number=1270,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='mecallcommand', index=224, number=1310,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='precondStartRcs', index=225, number=1400,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='precondStopRcs', index=226, number=1410,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='precondConfigureRcs', index=227, number=1420,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='tcuConfigure', index=228, number=1430,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='edisonServiceActivation', index=229, number=1431,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='testSequence', index=230, number=1432,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='precondConfigureRacp', index=231, number=1433,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='chargeoptConfigureRacp', index=232, number=1434,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='tariffTableDownload', index=233, number=1435,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='precondStartRacp', index=234, number=1436,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='precondStopRacp', index=235, number=1437,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='rootCertificateRemove', index=236, number=1438,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='onRequestProbeUpload', index=237, number=1439,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='rootCertificateDownload', index=238, number=1440,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='contractCertificateRemove', index=239, number=1441,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='contractCertificateDownload', index=240, number=1442,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='probeConfigurationUpdate', index=241, number=1443,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='rdiagDeleteEcu', index=242, number=1500,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='rdiagStatusReport', index=243, number=1501,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='rdiagExecution', index=244, number=1502,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='immobilizerChallenge', index=245, number=1600,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='immobilizerSearchKeyline', index=246, number=1610,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='immobilizerReleaseKeyline', index=247, number=1620,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='immobilizerLockKeyline', index=248, number=1630,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='immobilizerLockVehicle', index=249, number=1631,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='immobilizerReleaseVehicle', index=250, number=1621,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='setRentalSignal', index=251, number=1700,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='blackchannelDownload', index=252, number=1800,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='blackchannelUpload', index=253, number=1810,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='configurecsm', index=254, number=1900,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='updatevehicleinfo', index=255, number=1901,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='relaymessagetocsm', index=256, number=1902,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='relayrentalrequesttocsb', index=257, number=1903,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='rtmDownloadConfig', index=258, number=2400,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='rtmReadConfig', index=259, number=2410,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='avpActivate', index=260, number=2700,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='chargecontrolconfigure', index=261, number=2800,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=b'\020\001',
serialized_start=1763,
serialized_end=8068,
)
_sym_db.RegisterEnumDescriptor(_ACP_COMMANDTYPE)
_VVA = _descriptor.Descriptor(
name='VVA',
full_name='proto.VVA',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_VVA_COMMANDSTATE,
_VVA_COMMANDCONDITION,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=33,
serialized_end=326,
)
_VEHICLEAPI = _descriptor.Descriptor(
name='VehicleAPI',
full_name='proto.VehicleAPI',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_VEHICLEAPI_COMMANDSTATE,
_VEHICLEAPI_ATTRIBUTESTATUS,
_VEHICLEAPI_QUEUETYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=329,
serialized_end=1752,
)
_ACP = _descriptor.Descriptor(
name='ACP',
full_name='proto.ACP',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_ACP_COMMANDTYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1755,
serialized_end=8068,
)
_VVA_COMMANDSTATE.containing_type = _VVA
_VVA_COMMANDCONDITION.containing_type = _VVA
_VEHICLEAPI_COMMANDSTATE.containing_type = _VEHICLEAPI
_VEHICLEAPI_ATTRIBUTESTATUS.containing_type = _VEHICLEAPI
_VEHICLEAPI_QUEUETYPE.containing_type = _VEHICLEAPI
_ACP_COMMANDTYPE.containing_type = _ACP
DESCRIPTOR.message_types_by_name['VVA'] = _VVA
DESCRIPTOR.message_types_by_name['VehicleAPI'] = _VEHICLEAPI
DESCRIPTOR.message_types_by_name['ACP'] = _ACP
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
VVA = _reflection.GeneratedProtocolMessageType('VVA', (_message.Message,), {
'DESCRIPTOR' : _VVA,
'__module__' : 'acp_pb2'
# @@protoc_insertion_point(class_scope:proto.VVA)
})
_sym_db.RegisterMessage(VVA)
VehicleAPI = _reflection.GeneratedProtocolMessageType('VehicleAPI', (_message.Message,), {
'DESCRIPTOR' : _VEHICLEAPI,
'__module__' : 'acp_pb2'
# @@protoc_insertion_point(class_scope:proto.VehicleAPI)
})
_sym_db.RegisterMessage(VehicleAPI)
ACP = _reflection.GeneratedProtocolMessageType('ACP', (_message.Message,), {
'DESCRIPTOR' : _ACP,
'__module__' : 'acp_pb2'
# @@protoc_insertion_point(class_scope:proto.ACP)
})
_sym_db.RegisterMessage(ACP)
DESCRIPTOR._options = None
_VEHICLEAPI_QUEUETYPE._options = None
_ACP_COMMANDTYPE._options = None
# @@protoc_insertion_point(module_scope)
| 43.631168 | 14,931 | 0.740687 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 22,066 | 0.247064 |
82f79bc5bf128b18eacd7d7a60bb82df70b8c973 | 5,632 | py | Python | src/promnesia/sources/reddit.py | seanbreckenridge/promnesia-fork | 92ac664176c8101672cbab90ea6964c360ff287e | [
"MIT"
]
| 2 | 2018-06-04T05:59:02.000Z | 2019-08-25T21:45:07.000Z | src/promnesia/sources/reddit.py | seanbreckenridge/promnesia-fork | 92ac664176c8101672cbab90ea6964c360ff287e | [
"MIT"
]
| 1 | 2019-07-14T13:23:45.000Z | 2019-07-14T13:23:45.000Z | src/promnesia/sources/reddit.py | seanbreckenridge/promnesia-fork | 92ac664176c8101672cbab90ea6964c360ff287e | [
"MIT"
]
| null | null | null | '''
Uses HPI [[https://github.com/karlicoss/HPI/blob/master/doc/MODULES.org#myreddit][reddit]] module
'''
from itertools import chain
from typing import Set, Optional
from ..common import Visit, Loc, extract_urls, Results, logger
def index(*, render_markdown: bool = False, renderer: Optional['RedditRenderer'] = None) -> Results:
from . import hpi
try:
from my.reddit.all import submissions, comments, saved, upvoted
except ModuleNotFoundError as e:
if "No module named 'my.reddit.all'" in str(e):
import warnings
warnings.warn("DEPRECATED/reddit: Using an old version of HPI, please update")
from my.reddit import submissions, comments, saved, upvoted # type: ignore[no-redef]
else:
raise e
if renderer is not None:
assert callable(renderer), f"{renderer} is not a callable (should be a subclass of RedditRenderer)"
r = renderer(render_markdown=render_markdown)
else:
r = RedditRenderer(render_markdown=render_markdown)
logger.info('processing saves')
for s in saved():
try:
yield from r._from_save(s)
except Exception as e:
yield e
logger.info('processing comments')
for c in comments():
try:
yield from r._from_comment(c)
except Exception as e:
yield e
logger.info('processing submissions')
for sub in submissions():
try:
yield from r._from_submission(sub)
except Exception as e:
yield e
logger.info('processing upvotes')
for u in upvoted():
try:
yield from r._from_upvote(u)
except Exception as e:
yield e
# mostly here so we can keep track of how the user
# wants to render markdown
class RedditRenderer:
def __init__(self, render_markdown: bool = False):
self._link_extractor = None
self._parser_cls = None
try:
from .markdown import TextParser, extract_from_text
self._link_extractor = extract_from_text
self._parser_cls = TextParser
except ImportError as import_err:
# TODO: add dummy _link_extractor and _parser_cls classes incase
# these are called by a subclass?
# only send error if the user is trying to enable this feature
if render_markdown:
logger.exception(import_err)
logger.critical("Could not import markdown module to render reddit markdown. Try 'python3 -m pip install mistletoe'")
render_markdown = False # force to be false, couldn't import
self.render_markdown = render_markdown
def _from_comment(self, i: 'Comment') -> Results:
locator = Loc.make(
title='Reddit comment',
href=i.url,
)
yield from self._from_common(i, locator=locator)
def _from_submission(self, i: 'Submission') -> Results:
locator = Loc.make(
title=f'Reddit submission: {i.title}',
href=i.url,
)
yield from self._from_common(i, locator=locator)
def _from_upvote(self, i: 'Upvote') -> Results:
locator = Loc.make(
title=f'Reddit upvote',
href=i.url,
)
yield from self._from_common(i, locator=locator)
def _from_save(self, i: 'Save') -> Results:
locator = Loc.make(
title='Reddit save',
href=i.url,
)
yield from self._from_common(i, locator=locator)
# to allow for possible subclassing by the user?
def _render_body(self, text: str) -> str:
if self.render_markdown and self._parser_cls is not None:
return self._parser_cls(text)._doc_ashtml()
else:
return text
def _from_common(self, i: 'RedditBase', locator: Loc) -> Results:
urls = [i.url]
# TODO this should belong to HPI.. fix permalink handling I guess
# ok, it's not present for all of them..
lurl = i.raw.get('link_url')
if lurl is not None:
urls.append(lurl)
lurl = i.raw.get('url')
if lurl is not None:
urls.append(lurl)
context = self._render_body(i.text)
emitted: Set[str] = set()
for url in chain(urls, extract_urls(i.text)):
if url in emitted:
continue
yield Visit(
url=url,
dt=i.created,
context=context,
locator=locator,
)
emitted.add(url)
# extract from markdown links like [link text](https://...)
# incase URLExtract missed any
#
# this should try to do this, even if the user didn't enable
# the render_markdown flag, as it may catch extra links that URLExtract didnt
# would still require mistletoe to be installed, but
# the user may already have it installed for the auto/markdown modules
if self._link_extractor is not None:
for res in self._link_extractor(i.text):
if isinstance(res, Exception):
yield res
continue
if res.url in emitted:
continue
yield Visit(
url=res.url,
dt=i.created,
context=context,
locator=locator,
)
emitted.add(res.url)
import typing
if typing.TYPE_CHECKING:
from my.reddit.common import Submission, Comment, Save, Upvote, RedditBase
| 32.182857 | 133 | 0.588956 | 3,701 | 0.657138 | 4,008 | 0.711648 | 0 | 0 | 0 | 0 | 1,407 | 0.249822 |
82f8c42e4b7f145f7c76d62522e5c2d2fdd59996 | 8,992 | py | Python | puwifi.py | SaicharanKandukuri/logmein | 9946488fb61093bf2394254da056d2ebd290e83a | [
"MIT"
]
| 4 | 2021-12-01T12:07:49.000Z | 2022-03-16T15:11:57.000Z | puwifi.py | SaicharanKandukuri/puwifi | 9946488fb61093bf2394254da056d2ebd290e83a | [
"MIT"
]
| 10 | 2021-12-01T11:41:04.000Z | 2022-03-16T16:12:36.000Z | puwifi.py | SaicharanKandukuri/logmein | 9946488fb61093bf2394254da056d2ebd290e83a | [
"MIT"
]
| 2 | 2021-11-29T16:16:22.000Z | 2021-11-30T05:06:05.000Z | import optparse
import sys
from sys import getsizeof
import logging
from signal import signal, SIGINT
import time
import requests
# MIT License
#
# Copyright (c) 2022 SaicharanKandukuri
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from rich.logging import RichHandler
FORMAT = "%(message)s"
logging.basicConfig(
level="NOTSET",
format=FORMAT,
datefmt="[%X]",
handlers=[RichHandler()]
)
logging.disable('DEBUG')
log = logging.getLogger("rich")
class WifiUtils:
"""class for wifi utils"""
def __init__(self, username, password, host, port):
self.username = username
self.password = password
self.host = host
self.port = port
@classmethod
def request(cls,
method,
username,
password,
host, port,
timeout) -> list:
"""request method: sends request to wifi host
Args:
method (str): interaction method "login.xml" or "logout.xml". Defaults to "login.xml".
username (str): username assigned by parul university to access wifi
password (str): password assigned by parul university to access wifi
host (str): hostname of the parul university wifi hotspot/routers Defaults to "
port (str): port to send login request. Defaults to "8090".
timeout (int): request timeout. Defaults to 10.
Returns:
list
server_request status[true|false]
response(xml data returned form server)
status_code(web request status code)
"""
url = ("http://"+host+":"+port+"/"+method)
body = ("mode=191&username=" + username + "&password=" + password +
"&a=1630404423764&producttype=0"
)
headers = {
"Host": "http://" + host + ":" + port + "",
"Content-Length": str(getsizeof(body)),
"User-Agent": "Chrome/92.0.4515.159 Safari/537.36",
"Content-Type": "application/x-www-form-urlencoded",
"Accept": "*/*",
"Origin": "http://" + host + ":" + port,
"Referer": "http://" + host + ":" + port + "/",
"Accept-Encoding": "gzip defalte",
"Accept-Language": "en-US,en;q=0.9",
"Connection": "close",
}
body_array = bytearray(body, 'utf-8')
req = requests.post(url,
data=body_array,
headers=headers,
timeout=timeout,
verify=False
)
return [(req.status_code == 200), req.text, req.status_code]
def login(self,
username,
password,
host,
port="8090",
method="login.xml",
timeout=10) -> list:
"""login: uses request method to send login web request with credentials to wifi host
Args:
username (str): username assigned by parul university to access wifi
password (str): password assigned by parul university to access wifi
host (str): hostname of the parul university wifi hotspot/routers
Defaults to "10.0.0.11"
port (str, optional): port to send login request. Defaults to "8090".
method (str, optional): interaction method
"login.xml" or "logout.xml". Defaults to "login.xml".
timeout (int, optional): request timeout. Defaults to 10.
"""
return self.request(method, username, password, host, port, timeout)
def logout(self,
username,
password,
host,
port="8090",
method="logout.xml",
timeout=10) -> list:
"""logout: uses request method to send logout web request with credentials to wifi host
Args:
username (str): username assigned by parul university to access wifi
password (str): password assigned by parul university to access wifi
host (str): hostname of the parul university wifi hotspot/routers
Defaults to "10.0.0.11"
port (str, optional): port to send login request. Defaults to "8090".
method (str, optional): interaction method
"login.xml" or "logout.xml". Defaults to "logout.xml".
timeout (int, optional): request timeout. Defaults to 10.
"""
return self.request(method, username, password, host, port, timeout)
# def get_xml_msg(xml): # for later (●'◡'●)
# return Et.parse(xml).getroot()[1]
def grey_print(_string):
"""prints outs grey text
Args:
_string (str)
"""
print(f"\033[90m{_string}\033[0m")
def connection_to(url, timeout=10):
"""checks if connection to url is available"""
try:
requests.get(url, timeout=timeout)
return True
except (requests.ConnectionError,
requests.Timeout):
return False
def keep_alive(username, password, host, port):
"""keeps connection alive to wifi host"""
while True:
if connection_to("http://10.0.0.11:8090/"):
log.info("connection to router \"available\"")
else:
log.critical("connection to router \"unavailable\"")
if connection_to("https://google.com"):
log.info("Connected to the internet")
else:
log.warning("Not connected to the internet")
log.info("Tying to login back")
try:
log.info(WifiUtils.login(username, password, host, port))
except (requests.ConnectionError,
requests.Timeout):
log.critical(
"Connection error: \"UNSTABLE CONNECTION TO HOST\"")
time.sleep(5)
def exit_handler(_signal, frame):
"""captures keyboard interrupts and kill signals & exits with messesage"""
log.warning('SIGINT or CTRL-C detected. Exiting gracefully')
grey_print("signal:"+str(_signal))
grey_print("frame:"+str(frame))
sys.exit(0)
if __name__ == '__main__':
signal(SIGINT, exit_handler)
parser = optparse.OptionParser()
parser.add_option('-u', '--username', dest='username',
help='username to login/logout with parul university wifi service')
parser.add_option('-p', '--password', dest='password',
help='password to login/logout with parul university wifi service')
parser.add_option('-H', '--host', dest='host',
default='10.0.0.11', type=str)
parser.add_option('-P', '--port', dest='port',
default='8090', type=str)
parser.add_option('-k', '--keep-alive', action='store_true',
help='keep connecting to wifi when it gets signed out', default=False)
parser.add_option('-o', '--logout', action='store_true',
help='logout from wifi', default=False)
parser.add_option('-l', '--login', action='store_true',
help='login to wifi', default=False)
options, args = parser.parse_args()
WifiUtils = WifiUtils(
options.username, options.password, options.host, options.port)
if options.login:
log.info("=> login <=")
log.info(WifiUtils.login(options.username,
options.password,
options.host, options.port,
))
sys.exit(0)
if options.logout:
log.info("=> logout <=")
log.info(WifiUtils.logout(options.username,
options.password,
options.host, options.port,
))
sys.exit(0)
if options.keep_alive:
log.info("=> keep alive <=")
keep_alive(options.username,
options.password,
options.host, options.port,
)
| 36.702041 | 98 | 0.585187 | 4,098 | 0.455435 | 0 | 0 | 1,998 | 0.222049 | 0 | 0 | 4,714 | 0.523894 |
82fa552a0171833c0c07fa00ed134b17a1330763 | 2,974 | py | Python | sphinxsimulink/diagram/application.py | dekalinowski/sphinx-simulink | f86daf2efdd7b0c84307bfb17c23efff0c72a8a9 | [
"MIT"
]
| 2 | 2017-12-06T00:58:05.000Z | 2020-05-27T21:00:59.000Z | sphinxsimulink/diagram/application.py | dekalinowski/sphinx-simulink | f86daf2efdd7b0c84307bfb17c23efff0c72a8a9 | [
"MIT"
]
| null | null | null | sphinxsimulink/diagram/application.py | dekalinowski/sphinx-simulink | f86daf2efdd7b0c84307bfb17c23efff0c72a8a9 | [
"MIT"
]
| 2 | 2020-01-24T09:17:11.000Z | 2020-04-02T10:15:02.000Z | """
sphinx-simulink.application
~~~~~~~~~~~~~~~~~~~~~~~
Embed Simulink diagrams on your documentation.
:copyright:
Copyright 2016 by Dennis Edward Kalinowski <[email protected]>.
:license:
MIT, see LICENSE for details.
"""
import matlab.engine
import os
from sphinx.errors import SphinxError
from sphinx.util.osutil import ensuredir
from sphinxsimulink.diagram import directives,nodes
from sphinxsimulink.metadata import __version__
engine = None
class SimulinkDiagramError(SphinxError):
pass
def render_diagram(app, node, docname):
global engine
uri = node['uri']
# do not regenerate
if os.path.exists( uri ):
pass
ensuredir( os.path.dirname( uri ) )
try:
# reuse last engine to save loading time
if engine == None:
engine = matlab.engine.start_matlab()
else:
# clean up used engines
engine.restoredefaultpath(nargout=0)
engine.close('all', nargout=0)
engine.bdclose('all', nargout=0)
engine.clear('classes', nargout=0)
# start engine from document directory
engine.cd( os.path.dirname( app.env.doc2path( docname ) ) )
# then, support changing directory (relative to document)
dir = node.get('dir')
if dir:
engine.cd( dir )
# finally, add the MATLAB paths relative to the changed directory
pathlist = node.get('addpath')
if pathlist:
for path in pathlist:
engine.addpath( path )
# preload script
preload = node.get('preload')
if preload:
engine.eval( preload + ';', nargout=0)
# load system
system = node.get('system')
if system:
engine.load_system( system );
# if subsystem specified, print from this layer
subsystem = node.get('subsystem')
if subsystem:
system = "/".join( [ system, subsystem ] )
# print from Simulink handle to .png
engine.eval(
"print( get_param( '{}', 'Handle' ), '-dpng', '{}' )".
format( system, uri ),
nargout=0
)
except matlab.engine.MatlabExecutionError as err:
raise SimulinkDiagramError('Unable to render Simulink diagram due ' +
'to MATLAB execution error'
)
def process_diagram_nodes(app, doctree, docname):
for node in doctree.traverse(nodes.diagram):
render_diagram(app, node, docname)
node.replace_self(node.children)
def terminate_matlab_engine(app, exception):
global engine
if engine is not None:
engine.quit()
engine = None
def setup(app):
app.add_directive('simulink-diagram', directives.SimulinkDiagramDirective)
app.connect('doctree-resolved', process_diagram_nodes)
app.connect('build-finished', terminate_matlab_engine)
return {'version': __version__}
| 23.983871 | 78 | 0.613652 | 49 | 0.016476 | 0 | 0 | 0 | 0 | 0 | 0 | 866 | 0.29119 |
82fb854e5de1301c151e67839d828f2c3db87864 | 221 | py | Python | bentoml/pytorch_lightning.py | francoisserra/BentoML | 213e9e9b39e055286f2649c733907df88e6d2503 | [
"Apache-2.0"
]
| 1 | 2021-06-12T17:04:07.000Z | 2021-06-12T17:04:07.000Z | bentoml/pytorch_lightning.py | francoisserra/BentoML | 213e9e9b39e055286f2649c733907df88e6d2503 | [
"Apache-2.0"
]
| 4 | 2021-05-16T08:06:25.000Z | 2021-11-13T08:46:36.000Z | bentoml/pytorch_lightning.py | francoisserra/BentoML | 213e9e9b39e055286f2649c733907df88e6d2503 | [
"Apache-2.0"
]
| null | null | null | from ._internal.frameworks.pytorch_lightning import load
from ._internal.frameworks.pytorch_lightning import save
from ._internal.frameworks.pytorch_lightning import load_runner
__all__ = ["load", "load_runner", "save"]
| 36.833333 | 63 | 0.828054 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 25 | 0.113122 |
82fd258b0956b6fcb923493f7bbd91bb6546c5c0 | 335 | py | Python | bugex_online/bugex_webapp/templatetags/custom_tags.py | fkleon/bugex-online | bf0687ff6167d66980eb44adcdb14e8fc65d9504 | [
"Apache-2.0"
]
| null | null | null | bugex_online/bugex_webapp/templatetags/custom_tags.py | fkleon/bugex-online | bf0687ff6167d66980eb44adcdb14e8fc65d9504 | [
"Apache-2.0"
]
| 7 | 2020-06-30T23:15:12.000Z | 2022-02-01T00:57:38.000Z | bugex_online/bugex_webapp/templatetags/custom_tags.py | fkleon/bugex-online | bf0687ff6167d66980eb44adcdb14e8fc65d9504 | [
"Apache-2.0"
]
| null | null | null | from django import template
from django.conf import settings
register = template.Library()
@register.simple_tag
def settings_value(name):
"""
This Tag allows to access values from the configuration file in the templates.
"""
try:
return settings.__getattr__(name)
except AttributeError:
return ""
| 22.333333 | 82 | 0.707463 | 0 | 0 | 0 | 0 | 241 | 0.719403 | 0 | 0 | 96 | 0.286567 |
82fd4ff37849618d568dd247a4ca0ff25544b1fd | 13,195 | py | Python | CreateRobot.py | KonstantinosAng/KinectPython | cb2c7822dd9ef959230d9488aaa3de8ec1816e08 | [
"MIT"
]
| 5 | 2020-08-06T04:28:27.000Z | 2022-03-23T09:10:29.000Z | CreateRobot.py | KonstantinosAng/KinectPython | cb2c7822dd9ef959230d9488aaa3de8ec1816e08 | [
"MIT"
]
| 4 | 2020-11-28T07:23:40.000Z | 2022-03-28T08:57:07.000Z | CreateRobot.py | KonstantinosAng/KinectPython | cb2c7822dd9ef959230d9488aaa3de8ec1816e08 | [
"MIT"
]
| 1 | 2020-10-18T02:39:55.000Z | 2020-10-18T02:39:55.000Z | """
Author: Konstantinos Angelopoulos
Date: 04/02/2020
All rights reserved.
Feel free to use and modify and if you like it give it a star.
Import the Robot's Step Files and Color/Scale/Assemble them using the instructions in /RoboDK/KUKA/KUKA LWR IV+ Description
(for Original=kuka_lwr_model_description.json, for custom=custom_kuka_lwr_model_description, for custom2=custom_kuka_lwr_model_description_2)
before running the code to complete the robot model.
#########################################################################
######### To quickly color and scale use the next lines of code #########
#########################################################################
from robolink import * # RoboDK API
from robodk import * # Robot toolbox
RDK = Robolink()
for station in RDK.ItemList():
for item in station.Childs():
item.Scale(1000)
item.setColor(255/255, 85/255, 0/255, 255/255)
########################################################################
#### For custom2 run these commands before assembling the stl files ####
########################################################################
from robolink import * # RoboDK API
from robodk import * # Robot toolbox
import numpy as np
RDK = Robolink()
for station in RDK.ItemList():
for item in station.Childs():
item.setGeometryPose(item.Pose()*rotz(np.pi))
item.Scale(1000)
item.setColor(255/255, 85/255, 0/255, 255/255)
and after building the mechanism and import it, in order to rotate the robot run:
from robolink import * # RoboDK API
from robodk import * # Robot toolbox
RDK = Robolink()
ref = RDK.Item('reference2')
ref.setPoseAbs(ref.Pose()*rotz(pi))
##############################################################################################
##### The original option is just the robot model without any inverted sense and joints ######
##### home are [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] ###########################################
##############################################################################################
##### The custom robot is the real model that has the same limitations, home joints and ######
##### senses as the REAl KUKA LWR but the X and Y axis system are inverted ###################
##############################################################################################
##### The custom2 robot is the same as the custom option but with the X and Y axis being #####
##### the same as the REAL KUKA ROBOT ########################################################
##############################################################################################
"""
# Start the RoboDK API
from robolink.robolink import *
from robodk.robodk import *
import json
import os
# ORIGINAL ROBOT DATA
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'RoboDK/KUKA/KUKA LWR IV+ Description/kuka_lwr_model_description.json')) as config_file:
data = json.load(config_file)
original_robot_name = data['Robot name']
original_robot_dof = data['DOF']
original_robot_joint1 = data['Joint 1']
original_robot_joint2 = data['Joint 2']
original_robot_joint3 = data['Joint 3']
original_robot_joint4 = data['Joint 4']
original_robot_joint5 = data['Joint 5']
original_robot_joint6 = data['Joint 6']
original_robot_joint7 = data['Joint 7']
original_robot_joints_build = [original_robot_joint1["Build joints"], original_robot_joint2["Build joints"], original_robot_joint3["Build joints"],
original_robot_joint4["Build joints"], original_robot_joint5["Build joints"], original_robot_joint6["Build joints"],
original_robot_joint7["Build joints"]]
original_robot_joints_home = [original_robot_joint1["Home"], original_robot_joint2["Home"], original_robot_joint3["Home"],
original_robot_joint4["Home"], original_robot_joint5["Home"], original_robot_joint6["Home"], original_robot_joint7["Home"]]
original_robot_parameters = [data["d1"], data["d3"], data["d5"], data["d7"], data["dtheta1"], data["dtheta2"], data["dtheta3"], data["dtheta4"],
data["dtheta5"], data["dtheta6"], data["dtheta7"]]
original_robot_joint_senses = [original_robot_joint1["Invert Sense"], original_robot_joint2["Invert Sense"], original_robot_joint3["Invert Sense"],
original_robot_joint4["Invert Sense"], original_robot_joint5["Invert Sense"], original_robot_joint6["Invert Sense"],
original_robot_joint7["Invert Sense"]]
original_robot_joint_lower_limit = [original_robot_joint1["Minimum limit"], original_robot_joint2["Minimum limit"], original_robot_joint3["Minimum limit"],
original_robot_joint4["Minimum limit"], original_robot_joint5["Minimum limit"], original_robot_joint6["Minimum limit"],
original_robot_joint7["Minimum limit"]]
original_robot_joint_upper_limit = [original_robot_joint1["Maximum limit"], original_robot_joint2["Maximum limit"], original_robot_joint3["Maximum limit"],
original_robot_joint4["Maximum limit"], original_robot_joint5["Maximum limit"], original_robot_joint6["Maximum limit"],
original_robot_joint7["Maximum limit"]]
original_robot_base_pose = data["Base shift"]
original_robot_tool_pose = data["End-effector shift"]
# CUSTOM ROBOT DATA
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'RoboDK/KUKA/KUKA LWR IV+ Description/custom_lwr_model_description.json')) as config_file:
data = json.load(config_file)
custom_robot_name = data['Robot name']
custom_robot_dof = data['DOF']
custom_robot_joint1 = data['Joint 1']
custom_robot_joint2 = data['Joint 2']
custom_robot_joint3 = data['Joint 3']
custom_robot_joint4 = data['Joint 4']
custom_robot_joint5 = data['Joint 5']
custom_robot_joint6 = data['Joint 6']
custom_robot_joint7 = data['Joint 7']
custom_robot_joints_build = [custom_robot_joint1["Build joints"], custom_robot_joint2["Build joints"], custom_robot_joint3["Build joints"],
custom_robot_joint4["Build joints"], custom_robot_joint5["Build joints"], custom_robot_joint6["Build joints"],
custom_robot_joint7["Build joints"]]
custom_robot_joints_home = [custom_robot_joint1["Home"], custom_robot_joint2["Home"], custom_robot_joint3["Home"],
custom_robot_joint4["Home"], custom_robot_joint5["Home"], custom_robot_joint6["Home"], custom_robot_joint7["Home"]]
custom_robot_parameters = [data["d1"], data["d3"], data["d5"], data["d7"], data["dtheta1"], data["dtheta2"], data["dtheta3"], data["dtheta4"],
data["dtheta5"], data["dtheta6"], data["dtheta7"]]
custom_robot_joint_senses = [custom_robot_joint1["Invert Sense"], custom_robot_joint2["Invert Sense"], custom_robot_joint3["Invert Sense"],
custom_robot_joint4["Invert Sense"], custom_robot_joint5["Invert Sense"], custom_robot_joint6["Invert Sense"],
custom_robot_joint7["Invert Sense"]]
custom_robot_joint_lower_limit = [custom_robot_joint1["Minimum limit"], custom_robot_joint2["Minimum limit"], custom_robot_joint3["Minimum limit"],
custom_robot_joint4["Minimum limit"], custom_robot_joint5["Minimum limit"], custom_robot_joint6["Minimum limit"],
custom_robot_joint7["Minimum limit"]]
custom_robot_joint_upper_limit = [custom_robot_joint1["Maximum limit"], custom_robot_joint2["Maximum limit"], custom_robot_joint3["Maximum limit"],
custom_robot_joint4["Maximum limit"], custom_robot_joint5["Maximum limit"], custom_robot_joint6["Maximum limit"],
custom_robot_joint7["Maximum limit"]]
custom_robot_base_pose = data["Base shift"]
custom_robot_tool_pose = data["End-effector shift"]
# CUSTOM 2 ROBOT DATA
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'RoboDK/KUKA/KUKA LWR IV+ Description/custom_lwr_model_description_2.json')) as config_file:
data = json.load(config_file)
custom_2_robot_name = data['Robot name']
custom_2_robot_dof = data['DOF']
custom_2_robot_joint1 = data['Joint 1']
custom_2_robot_joint2 = data['Joint 2']
custom_2_robot_joint3 = data['Joint 3']
custom_2_robot_joint4 = data['Joint 4']
custom_2_robot_joint5 = data['Joint 5']
custom_2_robot_joint6 = data['Joint 6']
custom_2_robot_joint7 = data['Joint 7']
custom_2_robot_joints_build = [custom_2_robot_joint1["Build joints"], custom_2_robot_joint2["Build joints"], custom_2_robot_joint3["Build joints"],
custom_2_robot_joint4["Build joints"], custom_2_robot_joint5["Build joints"], custom_2_robot_joint6["Build joints"],
custom_2_robot_joint7["Build joints"]]
custom_2_robot_joints_home = [custom_2_robot_joint1["Home"], custom_2_robot_joint2["Home"], custom_2_robot_joint3["Home"],
custom_2_robot_joint4["Home"], custom_2_robot_joint5["Home"], custom_2_robot_joint6["Home"], custom_2_robot_joint7["Home"]]
custom_2_robot_parameters = [data["d1"], data["d3"], data["d5"], data["d7"], data["dtheta1"], data["dtheta2"], data["dtheta3"], data["dtheta4"],
data["dtheta5"], data["dtheta6"], data["dtheta7"]]
custom_2_robot_joint_senses = [custom_2_robot_joint1["Invert Sense"], custom_2_robot_joint2["Invert Sense"], custom_2_robot_joint3["Invert Sense"],
custom_2_robot_joint4["Invert Sense"], custom_2_robot_joint5["Invert Sense"], custom_2_robot_joint6["Invert Sense"],
custom_2_robot_joint7["Invert Sense"]]
custom_2_robot_joint_lower_limit = [custom_2_robot_joint1["Minimum limit"], custom_2_robot_joint2["Minimum limit"], custom_2_robot_joint3["Minimum limit"],
custom_2_robot_joint4["Minimum limit"], custom_2_robot_joint5["Minimum limit"], custom_2_robot_joint6["Minimum limit"],
custom_2_robot_joint7["Minimum limit"]]
custom_2_robot_joint_upper_limit = [custom_2_robot_joint1["Maximum limit"], custom_2_robot_joint2["Maximum limit"], custom_2_robot_joint3["Maximum limit"],
custom_2_robot_joint4["Maximum limit"], custom_2_robot_joint5["Maximum limit"], custom_2_robot_joint6["Maximum limit"],
custom_2_robot_joint7["Maximum limit"]]
custom_2_robot_base_pose = data["Base shift"]
custom_2_robot_tool_pose = data["End-effector shift"]
RDK = Robolink()
custom = False
custom2 = True
if custom:
robot_name = custom_robot_name
DOFs = custom_robot_dof
joints_build = custom_robot_joints_build
joints_home = custom_robot_joints_home
parameters = custom_robot_parameters
joints_senses = custom_robot_joint_senses # -1 = Inverted, +1 = Not Inverted
lower_limits = custom_robot_joint_lower_limit
upper_limits = custom_robot_joint_upper_limit
base_pose = xyzrpw_2_pose(custom_robot_base_pose)
tool_pose = xyzrpw_2_pose(custom_robot_tool_pose)
list_objects = []
elif custom2:
robot_name = custom_2_robot_name
DOFs = custom_2_robot_dof
joints_build = custom_2_robot_joints_build
joints_home = custom_2_robot_joints_home
parameters = custom_2_robot_parameters
joints_senses = custom_2_robot_joint_senses # -1 = Inverted, +1 = Not Inverted
lower_limits = custom_2_robot_joint_lower_limit
upper_limits = custom_2_robot_joint_upper_limit
base_pose = xyzrpw_2_pose(custom_2_robot_base_pose)
tool_pose = xyzrpw_2_pose(custom_2_robot_tool_pose)
list_objects = []
else:
robot_name = original_robot_name
DOFs = original_robot_dof
joints_build = original_robot_joints_build
joints_home = original_robot_joints_home
parameters = original_robot_parameters
joints_senses = original_robot_joint_senses # -1 = Inverted, +1 = Not Inverted
lower_limits = original_robot_joint_lower_limit
upper_limits = original_robot_joint_upper_limit
base_pose = xyzrpw_2_pose(original_robot_base_pose)
tool_pose = xyzrpw_2_pose(original_robot_tool_pose)
list_objects = []
for i in range(DOFs + 1):
if i == 0:
itm = RDK.Item('base', ITEM_TYPE_OBJECT)
else:
itm = RDK.Item('link_'+str(i), ITEM_TYPE_OBJECT)
list_objects.append(itm)
new_robot = RDK.BuildMechanism(MAKE_ROBOT_7DOF, list_objects, parameters, joints_build, joints_home, joints_senses, lower_limits, upper_limits, base_pose, tool_pose, robot_name)
if not new_robot.Valid():
print("Failed to create the robot. Check input values.")
else:
print("Robot/mechanism created: " + new_robot.Name())
| 55.209205 | 177 | 0.649564 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5,079 | 0.384919 |
82fd84aa8cc3e3cc7d56b2efc824a18ff2e55d25 | 368 | py | Python | tests/Parser/701isNotSubtitleFile_test.py | Bas-Man/TVShowFile | 2f341c97dcbe52eee0c0e71752173c9e9442450c | [
"MIT"
]
| null | null | null | tests/Parser/701isNotSubtitleFile_test.py | Bas-Man/TVShowFile | 2f341c97dcbe52eee0c0e71752173c9e9442450c | [
"MIT"
]
| null | null | null | tests/Parser/701isNotSubtitleFile_test.py | Bas-Man/TVShowFile | 2f341c97dcbe52eee0c0e71752173c9e9442450c | [
"MIT"
]
| null | null | null | import unittest
from context import parser
class TVShowFileParserTests(unittest.TestCase):
def setUp(self):
self.filename = parser.Parser("test.2018.S01E01E02.mkv")
def tearDown(self):
self.filename = None
def testisSubtitleFileSRT(self):
self.assertFalse(self.filename.isSubs)
if __name__ == '__main__':
unittest.main()
| 19.368421 | 64 | 0.69837 | 273 | 0.741848 | 0 | 0 | 0 | 0 | 0 | 0 | 35 | 0.095109 |
82fdd7e670db27270125e392ffe7dfc17727a77e | 10,668 | py | Python | archdiffer/tests/tests_rest_routes.py | Kratochvilova/archdiffer | 06f2ef0bb232b1ffe46e9d50575c4b79b1cff191 | [
"MIT"
]
| null | null | null | archdiffer/tests/tests_rest_routes.py | Kratochvilova/archdiffer | 06f2ef0bb232b1ffe46e9d50575c4b79b1cff191 | [
"MIT"
]
| null | null | null | archdiffer/tests/tests_rest_routes.py | Kratochvilova/archdiffer | 06f2ef0bb232b1ffe46e9d50575c4b79b1cff191 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
# This file is part of Archdiffer and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
Created on Sat May 12 20:39:46 2018
@author: Pavla Kratochvilova <[email protected]>
"""
from random import choice
from datetime import datetime
from . import RESTTest
from ..constants import STATE_STRINGS
from .. import database
DATETIMES = [
'1000-01-01 00:00:00',
'2018-01-01 00:00:00',
'9999-01-01 00:00:00',
]
IDS = LIMITS = OFFSETS = ['0', '1', '2', '10', '999999']
STATES = list(STATE_STRINGS.values())
class RESTTestRoutes(RESTTest):
"""Tests for route 'rest'."""
expected_response = {
"/rest": {
"methods": [
"GET"
],
"routes": {
"/rest/comparison_types": {
"methods": [
"GET"
],
"routes": {
"/rest/comparison_types/<int:id>": {
"methods": [
"GET"
],
"routes": {}
}
}
},
"/rest/comparisons": {
"methods": [
"GET"
],
"routes": {
"/rest/comparisons/<int:id>": {
"methods": [
"GET"
],
"routes": {}
}
}
}
}
}
}
def assert_response(self):
"""Asert that response is as expected."""
self.assertEqual(self.response, self.expected_response)
def test_get_routes(self):
"""Test GET method on 'rest'."""
self.get('rest')
self.assert_code_ok()
self.assert_response()
class RESTTestLists(RESTTest):
"""For testing getting lists from the database."""
route = None
# Accepted query parameters (values are lists of tested values).
param_choices = {}
def setUp(self):
super().setUp()
self.params = {}
def set_params(self, parameters):
"""Randomly set parameters based on the given list.
:param list parameters: list of parameters to be set.
"""
for param in self.param_choices:
if param in parameters:
self.params[param] = choice(self.param_choices[param])
def form_request(self):
"""Form GET request based on route and parameters."""
self.get(self.route, params=self.params)
def form_request_one(self):
"""Form GET request for random item based on route and parameters."""
self.get(self.route, choice(IDS), params=self.params)
def deep_sort(self, response):
"""Order all lists in the response.
:param list response: response
:return list: ordered response
"""
if type(response) == list:
response = sorted(response, key=lambda k: k['id'])
new_response = []
for item in response:
new_response.append(self.deep_sort(item))
elif type(response) == dict:
for key, value in response.items():
response[key] = self.deep_sort(value)
return response
def assert_response(self, expected):
"""Assert that response is as expected, aside from lists ordering.
:param list expected: expected response
"""
self.assertEqual(
self.deep_sort(self.response), self.deep_sort(expected)
)
class RESTTestListsEmpty(RESTTestLists):
"""Tests for getting lists from empty database. Abstract."""
def run(self, result=None):
"""Overriden run so that it doesn't run these tests on this class."""
if type(self) == RESTTestListsEmpty:
return result
return super().run(result)
def test_basic(self):
"""Test getting list - with no params set."""
self.form_request()
self.assert_code_ok()
self.assert_response([])
def test_basic_one(self):
"""Test getting instance - with no params set."""
self.form_request_one()
self.assert_code_ok()
self.assert_response([])
def test_individual_params(self):
"""Test getting list - for each param set individually."""
for param in self.param_choices:
with self.subTest(param=param):
self.params = {}
self.set_params([param])
self.test_basic()
def test_all_params(self):
"""Test getting list - for all params set."""
self.params = {}
self.set_params(self.param_choices)
self.test_basic()
class RESTTestComparisonsEmpty(RESTTestListsEmpty):
"""Tests for getting comparisons from empty database."""
route = 'rest/comparisons'
param_choices = {
'id': IDS,
'state': STATES,
'before': DATETIMES,
'after': DATETIMES,
'comparison_type_id': IDS,
'comparison_type_name': [''],
'limit': LIMITS,
'offset': OFFSETS,
}
class RESTTestComparisonTypesEmpty(RESTTestListsEmpty):
"""Tests for getting comparison types from empty database."""
route = 'rest/comparison_types'
param_choices = {
'id': IDS,
'name': [''],
'limit': LIMITS,
'offset': OFFSETS,
}
class RESTTestListsFilled(RESTTestLists):
"""Tests for getting lists from filled database. Abstract."""
def run(self, result=None):
"""Overriden run so that it doesn't run these tests on this class."""
if type(self) == RESTTestListsFilled:
return result
return super().run(result)
def test_params(self):
"""Run test for each of the tuples_params_results. Check that with
given parameters the response is as expected."""
for params, expected in self.tuples_params_results:
with self.subTest(**params):
self.params = params
self.form_request()
self.assert_code_ok()
self.assert_response(expected)
class RESTTestComparisonsFilled(RESTTestListsFilled):
"""Tests for getting comparisons from filled database."""
route = RESTTestComparisonsEmpty.route
param_choices = RESTTestComparisonsEmpty.param_choices
def fill_db(self):
"""Fill database. Called in setUp."""
db_session = database.session()
comparison_types = [
database.ComparisonType(id=1, name='1'),
database.ComparisonType(id=2, name='2'),
]
comparisons = [
database.Comparison(
id=1, state=1, time=datetime(9999, 1, 1), comparison_type_id=1
),
database.Comparison(
id=2, state=0, time=datetime(1000, 1, 1), comparison_type_id=2
),
database.Comparison(
id=3, state=0, time=datetime(2018, 1, 1), comparison_type_id=1
),
database.Comparison(
id=4, state=1, time=datetime(2018, 3, 1), comparison_type_id=1
),
]
db_session.add_all(comparison_types)
db_session.add_all(comparisons)
db_session.commit()
db_session.close()
# Expected result for request without any parameters.
expected = [
{
'id': 1,
'state': STATE_STRINGS[1],
'time': '9999-01-01 00:00:00',
'comparison_type': {
'id': 1,
'name': '1',
},
},
{
'id': 2,
'state': STATE_STRINGS[0],
'time': '1000-01-01 00:00:00',
'comparison_type': {
'id': 2,
'name': '2',
},
},
{
'id': 3,
'state': STATE_STRINGS[0],
'time': '2018-01-01 00:00:00',
'comparison_type': {
'id': 1,
'name': '1',
},
},
{
'id': 4,
'state': STATE_STRINGS[1],
'time': '2018-03-01 00:00:00',
'comparison_type': {
'id': 1,
'name': '1',
},
},
]
# Tuples of query parameters and corresponding expected result.
tuples_params_results = [
({}, expected),
({'id': '2'}, [expected[1]]),
({'state': STATE_STRINGS[0]}, [expected[1], expected[2]]),
({'before': '2018-02-01 00:00:00'}, [expected[1], expected[2]]),
({'after': '2018-02-01 00:00:00'}, [expected[0], expected[3]]),
({'comparison_type_id': '1'}, [expected[0], expected[2], expected[3]]),
({'comparison_type_name': '2'}, [expected[1]]),
({'limit': '2'}, [expected[0], expected[1]]),
({'offset': '3'}, [expected[3]]),
(
{
'comparison_type_id': '1',
'state': STATE_STRINGS[1],
'after': '2017-01-01 00:00:00',
},
[expected[0], expected[3]]
),
]
class RESTTestComparisonTypesFilled(RESTTestListsFilled):
"""Tests for getting comparison types from filled database."""
route = RESTTestComparisonTypesEmpty.route
param_choices = RESTTestComparisonTypesEmpty.param_choices
def fill_db(self):
"""Fill database. Called in setUp."""
db_session = database.session()
comparison_types = [
database.ComparisonType(id=1, name='1'),
database.ComparisonType(id=2, name='2'),
database.ComparisonType(id=3, name='3'),
database.ComparisonType(id=4, name='4'),
database.ComparisonType(id=5, name='5'),
]
db_session.add_all(comparison_types)
db_session.commit()
db_session.close()
# Expected result for request without any parameters.
expected = [
{'id': 1, 'name': '1'},
{'id': 2, 'name': '2'},
{'id': 3, 'name': '3'},
{'id': 4, 'name': '4'},
{'id': 5, 'name': '5'},
]
# Tuples of query parameters and corresponding expected result.
tuples_params_results = [
({}, expected),
({'id': '2'}, [expected[1]]),
({'name': '4'}, [expected[3]]),
({'limit': '3'}, [expected[0], expected[1], expected[2]]),
({'offset': '3'}, [expected[3], expected[4]]),
({'id': '4', 'name': '4'}, [expected[3]]),
]
| 32.036036 | 79 | 0.522778 | 10,057 | 0.942726 | 0 | 0 | 0 | 0 | 0 | 0 | 3,167 | 0.296869 |
82ff0515b3da6ec57b02cf613a0fd6672311351d | 150 | py | Python | src/posts/templatetags/urlify.py | thunderoy/blogger | 8102d11c04fbc98a31298ebfdb75023e9207109f | [
"MIT"
]
| null | null | null | src/posts/templatetags/urlify.py | thunderoy/blogger | 8102d11c04fbc98a31298ebfdb75023e9207109f | [
"MIT"
]
| null | null | null | src/posts/templatetags/urlify.py | thunderoy/blogger | 8102d11c04fbc98a31298ebfdb75023e9207109f | [
"MIT"
]
| null | null | null | from urllib.parse import quote
from django import template
register = template.Library()
@register.filter
def urlify(value):
return quote(value) | 18.75 | 30 | 0.78 | 0 | 0 | 0 | 0 | 59 | 0.393333 | 0 | 0 | 0 | 0 |
82ff896e3d6c189f07e2be0a44d052ed10938137 | 330 | py | Python | producto/migrations/0004_auto_20180611_2350.py | JohanVasquez/crud-venta-libre | 557f82b5d88c42480020a65cc6034348ff20efce | [
"MIT"
]
| null | null | null | producto/migrations/0004_auto_20180611_2350.py | JohanVasquez/crud-venta-libre | 557f82b5d88c42480020a65cc6034348ff20efce | [
"MIT"
]
| null | null | null | producto/migrations/0004_auto_20180611_2350.py | JohanVasquez/crud-venta-libre | 557f82b5d88c42480020a65cc6034348ff20efce | [
"MIT"
]
| null | null | null | # Generated by Django 2.0.6 on 2018-06-11 23:50
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('producto', '0003_auto_20180611_2248'),
]
operations = [
migrations.RenameModel(
old_name='Venta',
new_name='Ventas',
),
]
| 18.333333 | 48 | 0.593939 | 245 | 0.742424 | 0 | 0 | 0 | 0 | 0 | 0 | 97 | 0.293939 |
d2013debcc235b195c7f8a356464f5f2511b9b80 | 4,173 | py | Python | service_matcher_app/service_matcher/models.py | City-of-Turku/PaohServiceMatchEngine | 39f580003f9c0d10708acd93644f796f764ec2f0 | [
"MIT"
]
| null | null | null | service_matcher_app/service_matcher/models.py | City-of-Turku/PaohServiceMatchEngine | 39f580003f9c0d10708acd93644f796f764ec2f0 | [
"MIT"
]
| null | null | null | service_matcher_app/service_matcher/models.py | City-of-Turku/PaohServiceMatchEngine | 39f580003f9c0d10708acd93644f796f764ec2f0 | [
"MIT"
]
| null | null | null | from datetime import datetime
from pydantic import BaseModel
from typing import Optional, List
from fastapi import Query
class Service(BaseModel):
"""
A class for single service
"""
id: str
ptvId: Optional[str] = None
type: Optional[str] = None
subtype: Optional[str] = None
organizations: list
name: dict
descriptions: dict
requirement: dict
targetGroups: dict
serviceClasses: dict
lifeEvents: dict
areas: dict
lastUpdated: datetime
nameAutoTranslated: dict
class ServiceClass(BaseModel):
"""
A class for single service class
"""
name: str
code: str
class ServiceChannel(BaseModel):
"""
A class for single service channel
"""
id: str
ptvId: Optional[str] = None
type: Optional[str] = None
areaType: Optional[str] = None
organizationId: Optional[str] = None
serviceIds: list
name: dict
descriptions: dict
webPages: dict
emails: dict
phoneNumbers: dict
areas: dict
addresses: dict
channelUrls: dict
lastUpdated: datetime
class ServiceQuery(BaseModel):
"""
A class for request payoad for service filtering
"""
include_channels: Optional[bool] = False
priorization: Optional[str] = Query("local", regex="^(local|national)$")
municipalities: Optional[List[str]] = []
life_events: Optional[List[str]] = []
service_classes: Optional[List[str]] = []
limit_k: Optional[int] = 20
translate_missing_texts: Optional[bool] = False
class ServiceRecommendQuery(BaseModel):
"""
A class for payload for free text service recommendation
"""
need_text: str
municipalities: Optional[List[str]] = []
life_events: Optional[List[str]] = []
service_classes: Optional[List[str]] = []
top_k: Optional[int] = 20
score_threshold: Optional[float] = 0.0
text_recommender: Optional[str] = Query("all", regex="^(nlp|lexical|all)$")
language: Optional[str] = Query("fi", regex="^(fi|en|sv)$")
translate_missing_texts: Optional[bool] = False
class ServiceRecommendConversationQuery(BaseModel):
"""
A class for payload for service recommendation based on conversation
"""
mode: Optional[str] = Query("infer", regex="^(search|intent|conversation|infer)$")
municipalities: Optional[List[str]] = []
life_events: Optional[List[str]] = []
service_classes: Optional[List[str]] = []
top_k: Optional[int] = 20
score_threshold: Optional[float] = 0.0
text_recommender: Optional[str] = Query("all", regex="^(nlp|lexical|all)$")
language: Optional[str] = Query("fi", regex="^(fi|en|sv)$")
translate_missing_texts: Optional[bool] = False
class ServiceRecommendIntentQuery(BaseModel):
"""
A class for payload for intent based service recommendation
"""
intent: str
municipalities: Optional[List[str]] = []
life_events: Optional[List[str]] = []
service_classes: Optional[List[str]] = []
translate_missing_texts: Optional[bool] = False
class ServiceRecommendIntentAndOptionsQuery(BaseModel):
"""
A class for payload for intent and options based service recommendation, options can be got from database or can be overwritten in the interface
"""
intent: str
municipalities: Optional[List[str]] = []
life_events: Optional[List[str]] = []
service_classes: Optional[List[str]] = []
score_threshold: Optional[float] = 0.0
need_text: Optional[str] = None
text_recommender: Optional[str] = Query("all", regex="^(nlp|lexical|all)$")
language: Optional[str] = Query("fi", regex="^(fi|en|sv)$")
priorization: Optional[str] = Query("local", regex="^(local|national)$")
limit_k: Optional[int] = 20
translate_missing_texts: Optional[bool] = False
class ServiceClassRecommendQuery(BaseModel):
"""
A class for payload for free text service class recommendation
"""
need_text: str
top_k: Optional[int] = 20
class ServiceClassRecommendConversationQuery(BaseModel):
"""
A class for payload for service class recommendation based on conversation
"""
top_k: Optional[int] = 20
| 29.595745 | 148 | 0.671699 | 4,020 | 0.963336 | 0 | 0 | 0 | 0 | 0 | 0 | 1,004 | 0.240594 |
d20192a90dd1bce99c4dd2075229189ba55979b5 | 6,481 | py | Python | src/test/testcases/testPSUReadSbeMem.py | open-power/sbe | 0208243c5bbd68fa36464397fa46a2940c827edf | [
"Apache-2.0"
]
| 9 | 2017-03-21T08:34:24.000Z | 2022-01-25T06:00:51.000Z | src/test/testcases/testPSUReadSbeMem.py | sumant8098/sbe | 0208243c5bbd68fa36464397fa46a2940c827edf | [
"Apache-2.0"
]
| 17 | 2016-11-04T00:46:43.000Z | 2021-04-13T16:31:11.000Z | src/test/testcases/testPSUReadSbeMem.py | sumant8098/sbe | 0208243c5bbd68fa36464397fa46a2940c827edf | [
"Apache-2.0"
]
| 17 | 2017-03-24T11:52:56.000Z | 2022-01-25T06:00:49.000Z | # IBM_PROLOG_BEGIN_TAG
# This is an automatically generated prolog.
#
# $Source: src/test/testcases/testPSUReadSbeMem.py $
#
# OpenPOWER sbe Project
#
# Contributors Listed Below - COPYRIGHT 2017,2019
# [+] International Business Machines Corp.
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
#
# IBM_PROLOG_END_TAG
from __future__ import print_function
import sys
sys.path.append("targets/p9_nimbus/sbeTest" )
sys.path.append("targets/p9_axone/sbeTest" )
import testPSUUtil
import testRegistry as reg
import testUtil
import testMemUtil
#-------------------------------
# This is a Test Expected Data
#-------------------------------
def getdoubleword(dataInInt):
hex_string = '0'*(16-len(str(hex(dataInInt))[:18][2:])) + str(hex(dataInInt))[:18][2:]
return hex_string
def readSeeprom(offset, size, destAddr, primStatus, secStatus):
'''
#------------------------------------------------------------------------------------------------------------------------------
# SBE side test data -
#------------------------------------------------------------------------------------------------------------------------------
'''
sbe_test_data = (
#-----------------------------------------------------------------------------------------------------
# OP Reg ValueToWrite size Test Expected Data Description
#-----------------------------------------------------------------------------------------------------
# FFDC Size, Pass CMD Size
["write", reg.REG_MBOX0, "0000010000F0D703", 8, "None", "Writing to MBOX0 address"],
# seeprom offset, Size
["write", reg.REG_MBOX1, getdoubleword((offset<<32)+size), 8, "None", "Writing to MBOX1 address"],
# response Addr
["write", reg.REG_MBOX2, getdoubleword(destAddr), 8, "None", "Writing to MBOX2 address"],
["write", reg.PSU_SBE_DOORBELL_REG_WO_OR, "8000000000000000", 8, "None", "Update SBE Doorbell register to interrupt SBE"],
)
'''
#---------------------
# Host side test data - SUCCESS
#---------------------
'''
host_test_data_success = (
#----------------------------------------------------------------------------------------------------------------
# OP Reg ValueToWrite size Test Expected Data Description
#----------------------------------------------------------------------------------------------------------------
["read", reg.REG_MBOX4, "0", 8, getdoubleword((primStatus<<48)+(secStatus<<32)+0xF0D703), "Reading Host MBOX4 data to Validate"],
)
'''
#-----------------------------------------------------------------------
# Do not modify - Used to simulate interrupt on Ringing Doorbell on Host
#-----------------------------------------------------------------------
'''
host_polling_data = (
#----------------------------------------------------------------------------------------------------------------
# OP Reg ValueToWrite size Test Expected Data Description
#----------------------------------------------------------------------------------------------------------------
["read", reg.PSU_HOST_DOORBELL_REG_WO_OR, "0", 8, "8000000000000000", "Reading Host Doorbell for Interrupt Bit0"],
)
# Run Simics initially
testUtil.runCycles( 10000000 );
# Intialize the class obj instances
regObj = testPSUUtil.registry() # Registry obj def for operation
# HOST->SBE data set execution
regObj.ExecuteTestOp( testPSUUtil.simSbeObj, sbe_test_data )
print("\n Poll on Host side for INTR ...\n")
#Poll on HOST DoorBell Register for interrupt
regObj.pollingOn( testPSUUtil.simSbeObj, host_polling_data, 5 )
#SBE->HOST data set execution
regObj.ExecuteTestOp( testPSUUtil.simSbeObj, host_test_data_success )
#-------------------------
# Main Function
#-------------------------
def main():
# Run Simics initially
testUtil.runCycles( 10000000 );
print("\n Execute SBE Test - Read SBE Mem\n")
'''
Test Case 1
'''
readSeeprom(0, 128, 0x08000000, 0, 0)
print("SUCCESS: read seeprom valid")
# Read data from cache and verify its contents
# seeprom header
seepprmHdr = 'XIP SEPM'
#read from cache
readData = testMemUtil.getmem(0x08000000, 0x80, 0x02)
for byte in range(len(seepprmHdr)):
if( ord(seepprmHdr[byte]) != readData[byte ]):
print("Data mismtach at: ", byte)
print(" expected: ", ord(seepprmHdr[byte]))
print(" Actual: ", readData[byte])
raise Exception('data mistmach');
'''
Test Case 2
'''
readSeeprom(0x38CA0, 0x180, 0x8973780, 0, 0)
print("SUCCESS: read seeprom HB testcase")
'''
Test Case 3
'''
readSeeprom(0x0, 0x40, 0x08000000, 0x03, 0x19)
print("SUCCESS: read seeprom size not aligned")
'''
Test Case 4
'''
readSeeprom(0x3fe80, 0x180, 0x08000000, 0x03, 0x19)
print("SUCCESS: read seeprom size exceeded")
'''
Test Case 5
'''
readSeeprom(0x7, 0x40, 0x08000000, 0x03, 0x19)
print("SUCCESS: read seeprom offset not aligned")
if __name__ == "__main__":
if testUtil.getMachineName() == "axone":
try:
main()
except:
print ( "\nTest Suite completed with error(s)" )
testUtil.collectFFDC()
raise()
print ( "\nTest Suite completed with no errors" )
else:
main()
if err:
print ( "\nTest Suite completed with error(s)" )
#sys.exit(1)
else:
print ( "\nTest Suite completed with no errors" )
#sys.exit(0);
| 38.123529 | 144 | 0.494214 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,067 | 0.627527 |
d202532cd5f882629e4b1ca88d649d9dd76cb423 | 9,780 | py | Python | biobb_ml/clustering/clustering_predict.py | bioexcel/biobb_ml | f99346ef7885d3a62de47dab738a01db4b27467a | [
"Apache-2.0"
]
| null | null | null | biobb_ml/clustering/clustering_predict.py | bioexcel/biobb_ml | f99346ef7885d3a62de47dab738a01db4b27467a | [
"Apache-2.0"
]
| 5 | 2021-06-30T11:24:14.000Z | 2021-08-04T12:53:00.000Z | biobb_ml/clustering/clustering_predict.py | bioexcel/biobb_ml | f99346ef7885d3a62de47dab738a01db4b27467a | [
"Apache-2.0"
]
| null | null | null | #!/usr/bin/env python3
"""Module containing the ClusteringPredict class and the command line interface."""
import argparse
import pandas as pd
import joblib
from biobb_common.generic.biobb_object import BiobbObject
from sklearn.preprocessing import StandardScaler
from biobb_common.configuration import settings
from biobb_common.tools import file_utils as fu
from biobb_common.tools.file_utils import launchlogger
from biobb_ml.clustering.common import *
class ClusteringPredict(BiobbObject):
"""
| biobb_ml ClusteringPredict
| Makes predictions from an input dataset and a given clustering model.
| Makes predictions from an input dataset (provided either as a file or as a dictionary property) and a given clustering model fitted with `KMeans <https://scikit-learn.org/stable/modules/generated/sklearn.cluster.KMeans.html>`_ method.
Args:
input_model_path (str): Path to the input model. File type: input. `Sample file <https://github.com/bioexcel/biobb_ml/raw/master/biobb_ml/test/data/clustering/model_clustering_predict.pkl>`_. Accepted formats: pkl (edam:format_3653).
input_dataset_path (str) (Optional): Path to the dataset to predict. File type: input. `Sample file <https://github.com/bioexcel/biobb_ml/raw/master/biobb_ml/test/data/clustering/input_clustering_predict.csv>`_. Accepted formats: csv (edam:format_3752).
output_results_path (str): Path to the output results file. File type: output. `Sample file <https://github.com/bioexcel/biobb_ml/raw/master/biobb_ml/test/reference/clustering/ref_output_results_clustering_predict.csv>`_. Accepted formats: csv (edam:format_3752).
properties (dic - Python dictionary object containing the tool parameters, not input/output files):
* **predictions** (*list*) - (None) List of dictionaries with all values you want to predict targets. It will be taken into account only in case **input_dataset_path** is not provided. Format: [{ 'var1': 1.0, 'var2': 2.0 }, { 'var1': 4.0, 'var2': 2.7 }] for datasets with headers and [[ 1.0, 2.0 ], [ 4.0, 2.7 ]] for datasets without headers.
* **remove_tmp** (*bool*) - (True) [WF property] Remove temporal files.
* **restart** (*bool*) - (False) [WF property] Do not execute if output files exist.
Examples:
This is a use example of how to use the building block from Python::
from biobb_ml.clustering.clustering_predict import clustering_predict
prop = {
'predictions': [
{
'var1': 1.0,
'var2': 2.0
},
{
'var1': 4.0,
'var2': 2.7
}
]
}
clustering_predict(input_model_path='/path/to/myModel.pkl',
output_results_path='/path/to/newPredictedResults.csv',
input_dataset_path='/path/to/myDataset.csv',
properties=prop)
Info:
* wrapped_software:
* name: scikit-learn
* version: >=0.24.2
* license: BSD 3-Clause
* ontology:
* name: EDAM
* schema: http://edamontology.org/EDAM.owl
"""
def __init__(self, input_model_path, output_results_path,
input_dataset_path=None, properties=None, **kwargs) -> None:
properties = properties or {}
# Call parent class constructor
super().__init__(properties)
# Input/Output files
self.io_dict = {
"in": { "input_model_path": input_model_path, "input_dataset_path": input_dataset_path },
"out": { "output_results_path": output_results_path }
}
# Properties specific for BB
self.predictions = properties.get('predictions', [])
self.properties = properties
# Check the properties
self.check_properties(properties)
def check_data_params(self, out_log, err_log):
""" Checks all the input/output paths and parameters """
self.io_dict["in"]["input_model_path"] = check_input_path(self.io_dict["in"]["input_model_path"], "input_model_path", out_log, self.__class__.__name__)
self.io_dict["out"]["output_results_path"] = check_output_path(self.io_dict["out"]["output_results_path"],"output_results_path", False, out_log, self.__class__.__name__)
if self.io_dict["in"]["input_dataset_path"]:
self.io_dict["in"]["input_dataset_path"] = check_input_path(self.io_dict["in"]["input_dataset_path"], "input_dataset_path", out_log, self.__class__.__name__)
@launchlogger
def launch(self) -> int:
"""Execute the :class:`ClusteringPredict <clustering.clustering_predict.ClusteringPredict>` clustering.clustering_predict.ClusteringPredict object."""
# check input/output paths and parameters
self.check_data_params(self.out_log, self.err_log)
# Setup Biobb
if self.check_restart(): return 0
self.stage_files()
fu.log('Getting model from %s' % self.io_dict["in"]["input_model_path"], self.out_log, self.global_log)
with open(self.io_dict["in"]["input_model_path"], "rb") as f:
while True:
try:
m = joblib.load(f)
if (isinstance(m, KMeans)):
new_model = m
if isinstance(m, StandardScaler):
scaler = m
if isinstance(m, dict):
variables = m
except EOFError:
break
if self.io_dict["in"]["input_dataset_path"]:
# load dataset from input_dataset_path file
fu.log('Getting dataset from %s' % self.io_dict["in"]["input_dataset_path"], self.out_log, self.global_log)
if 'columns' in variables['predictors']:
labels = getHeader(self.io_dict["in"]["input_dataset_path"])
skiprows = 1
else:
labels = None
skiprows = None
new_data_table = pd.read_csv(self.io_dict["in"]["input_dataset_path"], header = None, sep="\s+|;|:|,|\t", engine="python", skiprows=skiprows, names=labels)
else:
# load dataset from properties
if 'columns' in variables['predictors']:
# sorting self.properties in the correct order given by variables['predictors']['columns']
index_map = { v: i for i, v in enumerate(variables['predictors']['columns']) }
predictions = []
for i, pred in enumerate(self.predictions):
sorted_pred = sorted(pred.items(), key=lambda pair: index_map[pair[0]])
predictions.append(dict(sorted_pred))
new_data_table = pd.DataFrame(data=get_list_of_predictors(predictions),columns=get_keys_of_predictors(predictions))
else:
predictions = self.predictions
new_data_table = pd.DataFrame(data=predictions)
if variables['scale']:
fu.log('Scaling dataset', self.out_log, self.global_log)
new_data = scaler.transform(new_data_table)
else: new_data = new_data_table
p = new_model.predict(new_data)
new_data_table['cluster'] = p
fu.log('Predicting results\n\nPREDICTION RESULTS\n\n%s\n' % new_data_table, self.out_log, self.global_log)
fu.log('Saving results to %s' % self.io_dict["out"]["output_results_path"], self.out_log, self.global_log)
new_data_table.to_csv(self.io_dict["out"]["output_results_path"], index = False, header=True, float_format='%.3f')
return 0
def clustering_predict(input_model_path: str, output_results_path: str, input_dataset_path: str = None, properties: dict = None, **kwargs) -> int:
"""Execute the :class:`ClusteringPredict <clustering.clustering_predict.ClusteringPredict>` class and
execute the :meth:`launch() <clustering.clustering_predict.ClusteringPredict.launch>` method."""
return ClusteringPredict(input_model_path=input_model_path,
output_results_path=output_results_path,
input_dataset_path=input_dataset_path,
properties=properties, **kwargs).launch()
def main():
"""Command line execution of this building block. Please check the command line documentation."""
parser = argparse.ArgumentParser(description="Makes predictions from an input dataset and a given clustering model.", formatter_class=lambda prog: argparse.RawTextHelpFormatter(prog, width=99999))
parser.add_argument('--config', required=False, help='Configuration file')
# Specific args of each building block
required_args = parser.add_argument_group('required arguments')
required_args.add_argument('--input_model_path', required=True, help='Path to the input model. Accepted formats: pkl.')
required_args.add_argument('--output_results_path', required=True, help='Path to the output results file. Accepted formats: csv.')
parser.add_argument('--input_dataset_path', required=False, help='Path to the dataset to predict. Accepted formats: csv.')
args = parser.parse_args()
args.config = args.config or "{}"
properties = settings.ConfReader(config=args.config).get_prop_dic()
# Specific call of each building block
clustering_predict(input_model_path=args.input_model_path,
output_results_path=args.output_results_path,
input_dataset_path=args.input_dataset_path,
properties=properties)
if __name__ == '__main__':
main()
| 52.864865 | 354 | 0.645194 | 7,357 | 0.752249 | 0 | 0 | 3,101 | 0.317076 | 0 | 0 | 4,941 | 0.505215 |
d2025b2a20a97cda599aa94a7ddf6c498a1acbae | 121 | py | Python | treeviz_test.py | larsga/sprake | 32598651b2fb514b18aab4f82ffba89d606a7b74 | [
"Apache-2.0"
]
| 1 | 2022-01-26T08:50:33.000Z | 2022-01-26T08:50:33.000Z | treeviz_test.py | larsga/sprake | 32598651b2fb514b18aab4f82ffba89d606a7b74 | [
"Apache-2.0"
]
| null | null | null | treeviz_test.py | larsga/sprake | 32598651b2fb514b18aab4f82ffba89d606a7b74 | [
"Apache-2.0"
]
| null | null | null |
from sprake import treeviz
# we don't actually have any meaningful tests that we can do, but at least
# we can do this
| 20.166667 | 74 | 0.752066 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 90 | 0.743802 |
d2037084e3cebaba8f3eced7b0c24bf337957571 | 1,926 | py | Python | build/scripts/gen_mx_table.py | r1nadeg/04_catboost | 6755bbbd1496540b92ded57eea1974f64bef87c5 | [
"Apache-2.0"
]
| null | null | null | build/scripts/gen_mx_table.py | r1nadeg/04_catboost | 6755bbbd1496540b92ded57eea1974f64bef87c5 | [
"Apache-2.0"
]
| null | null | null | build/scripts/gen_mx_table.py | r1nadeg/04_catboost | 6755bbbd1496540b92ded57eea1974f64bef87c5 | [
"Apache-2.0"
]
| 1 | 2018-08-06T14:13:12.000Z | 2018-08-06T14:13:12.000Z | import sys
tmpl = """
#include "yabs_mx_calc_table.h"
#include <kernel/matrixnet/mn_sse.h>
#include <library/archive/yarchive.h>
#include <util/memory/blob.h>
#include <util/generic/hash.h>
#include <util/generic/ptr.h>
#include <util/generic/singleton.h>
using namespace NMatrixnet;
extern "C" {
extern const unsigned char MxFormulas[];
extern const ui32 MxFormulasSize;
}
namespace {
struct TFml: public TBlob, public TMnSseInfo {
inline TFml(const TBlob& b)
: TBlob(b)
, TMnSseInfo(Data(), Size())
{
}
};
struct TFormulas: public THashMap<size_t, TAutoPtr<TFml>> {
inline TFormulas() {
TBlob b = TBlob::NoCopy(MxFormulas, MxFormulasSize);
TArchiveReader ar(b);
%s
}
inline const TMnSseInfo& at(size_t n) const throw () {
return *find(n)->second;
}
};
%s
static func_descr_t yabs_funcs[] = {
%s
};
}
yabs_mx_calc_table_t yabs_mx_calc_table = {YABS_MX_CALC_VERSION, 10000, 0, yabs_funcs};
"""
if __name__ == '__main__':
init = []
body = []
defs = {}
for i in sys.argv[1:]:
name = i.replace('.', '_')
num = long(name.split('_')[1])
init.append('(*this)[%s] = new TFml(ar.ObjectBlobByKey("%s"));' % (num, '/' + i))
f1 = 'static void yabs_%s(size_t count, const float** args, double* res) {Singleton<TFormulas>()->at(%s).DoCalcRelevs(args, res, count);}' % (name, num)
f2 = 'static size_t yabs_%s_factor_count() {return Singleton<TFormulas>()->at(%s).MaxFactorIndex() + 1;}' % (name, num)
body.append(f1)
body.append(f2)
d1 = 'yabs_%s' % name
d2 = 'yabs_%s_factor_count' % name
defs[num] = '{%s, %s}' % (d1, d2)
print tmpl % ('\n'.join(init), '\n\n'.join(body), ',\n'.join((defs.get(i, '{nullptr, nullptr}') for i in range(0, 10000))))
| 25.342105 | 160 | 0.576324 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,440 | 0.747664 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.