blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8920f1e323513222adafbd77853f23a6f87ca1e2 | d3efc82dfa61fb82e47c82d52c838b38b076084c | /Autocase_Result/GPMM/YW_GPMM_SZSJ_287.py | dd5bdfe7fc378edf0289633f7e8debff4b950338 | []
| no_license | nantongzyg/xtp_test | 58ce9f328f62a3ea5904e6ed907a169ef2df9258 | ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f | refs/heads/master | 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,063 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
sys.path.append("/home/yhl2/workspace/xtp_test/xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test/service")
from ServiceConfig import *
from mainService import *
from QueryStkPriceQty import *
from log import *
sys.path.append("/home/yhl2/workspace/xtp_test/mysql")
from CaseParmInsertMysql import *
sys.path.append("/home/yhl2/workspace/xtp_test/utils")
from QueryOrderErrorMsg import queryOrderErrorMsg
class YW_GPMM_SZSJ_287(xtp_test_case):
# YW_GPMM_SZSJ_287
def test_YW_GPMM_SZSJ_287(self):
title = '深A本方最优卖(卖出数量=可用股份数+100)'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '废单',
'errorID': 11010121,
'errorMSG': queryOrderErrorMsg(11010121),
'是否生成报单': '是',
'是否是撤废': '否',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryStkPriceQty('003123', '2', '0', '2', '0', 'S', case_goal['期望状态'], Api)
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'用例测试结果': stkparm['返回结果'],
'测试错误原因': '获取下单参数失败,' + stkparm['错误原因'],
}
self.assertEqual(rs['用例测试结果'], True)
else:
wt_reqs = {
'business_type': Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'order_client_id':trade_type + 1,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SZ_A'],
'ticker': stkparm['证券代码'],
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_FORWARD_BEST'],
'price': stkparm['涨停价'],
'quantity': 100100,
'position_effect': Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT']
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = serviceTest(Api, case_goal, wt_reqs)
logger.warning('执行结果为' + str(rs['用例测试结果']) + ','
+ str(rs['用例错误源']) + ',' + str(rs['用例错误原因']))
self.assertEqual(rs['用例测试结果'], True) # 0
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
987ab0b6c6d56227783ce5d319a505c1c5526fbf | aa64c62a3d246b87f3f1e5810a8f75b1d166aaf6 | /paradrop/daemon/paradrop/core/config/haproxy.py | fc29f54f582c5117987eb8e786324404287cdf69 | [
"Apache-2.0"
]
| permissive | ParadropLabs/Paradrop | ca40b3373c0732c781f9c10d38da9b6e9fbd3453 | c910fd5ac1d1b5e234f40f9f5592cc981e9bb5db | refs/heads/master | 2023-02-26T17:51:53.058300 | 2022-03-01T17:46:10 | 2022-03-01T17:46:10 | 37,789,450 | 88 | 31 | Apache-2.0 | 2023-02-16T05:24:46 | 2015-06-20T23:18:38 | Python | UTF-8 | Python | false | false | 5,061 | py | """
This module is responsible for configuration haproxy.
"""
import os
import subprocess
from paradrop.base import settings
from paradrop.core.chute.chute_storage import ChuteStorage
from paradrop.core.container.chutecontainer import ChuteContainer
def generateConfigSections():
sections = []
sections.append({
"header": "global",
"lines": [
"daemon",
"maxconn 256",
]
})
sections.append({
"header": "defaults",
"lines": [
"mode http",
"timeout connect 5000ms",
"timeout client 50000ms",
"timeout server 50000ms"
]
})
sections.append({
"header": "backend portal",
"lines": [
"server pd_portal 127.0.0.1:8080 maxconn 256"
]
})
# Custom variables:
# - req.querymarker: will be set to the literal "?" if the original request
# contains a query string. We will use this to construct a redirect with a
# query string only if needed.
# - req.subpath: will be set to the remainder of the path, if anything,
# after removing /chutes/<chutename>, e.g. "/chutes/hello-world/index.html"
# becomes "/index.html". This does not include the query string.
frontend = {
"header": "frontend http-in",
"lines": [
"bind *:80",
"default_backend portal",
"http-request set-var(req.querymarker) str(?) if { query -m found }",
"http-request set-var(req.subpath) path,regsub(^/chutes/[^/]+,)"
]
}
sections.append(frontend)
chuteStore = ChuteStorage()
chutes = chuteStore.getChuteList()
for chute in chutes:
port, service = chute.get_web_port_and_service()
if port is None or service is None:
continue
container = ChuteContainer(service.get_container_name())
if not container.isRunning():
continue
# Generate a rule that matches HTTP host header to chute name.
frontend['lines'].append("acl host_{} hdr(host) -i {}.chute.paradrop.org".format(
chute.name, chute.name))
frontend['lines'].append("use_backend {} if host_{}".format(
chute.name, chute.name))
# Generate rules that matches the beginning of the URL.
# We need to be careful and either have an exact match
# or make sure there is a slash or question mark after the chute name
# to avoid mix-ups, e.g. "sticky-board" and "sticky-board-new".
frontend['lines'].append("acl path_{} url /chutes/{}".format(
chute.name, chute.name))
frontend['lines'].append("acl path_{} url_beg /chutes/{}/".format(
chute.name, chute.name))
frontend['lines'].append("acl path_{} url_beg /chutes/{}?".format(
chute.name, chute.name))
# Try to find a host binding for the web port to redirect:
# http://<host addr>/chutes/<chute>/<path> ->
# http://<host addr>:<chute port>/<path>
#
# We need to do a lookup because the host port might be dynamically
# assigned by Docker.
#
# Use HTTP code 302 for the redirect, which will not be cached by the
# web browser. The port portion of the URL can change whenever the
# chute restarts, so we don't want web browsers to cache it. Browsers
# will cache a 301 (Moved Permanently) response.
portconf = container.getPortConfiguration(port, "tcp")
if len(portconf) > 0:
# TODO: Are there other elements in the list?
binding = portconf[0]
frontend['lines'].append("http-request replace-value Host (.*):(.*) \\1")
frontend['lines'].append("http-request redirect location http://%[req.hdr(host)]:{}%[var(req.subpath)]%[var(req.querymarker)]%[query] code 302 if path_{}".format(
binding['HostPort'], chute.name))
# Add a server at the chute's IP address.
sections.append({
"header": "backend {}".format(chute.name),
"lines": [
"server {} {}:{} maxconn 256".format(chute.name,
container.getIP(), port)
]
})
return sections
def writeConfigFile(output):
sections = generateConfigSections()
for section in sections:
output.write(section['header'] + "\n")
for line in section['lines']:
output.write(" " + line + "\n")
output.write("\n")
def reconfigureProxy(update):
"""
Reconfigure haproxy with forwarding and redirect rules.
"""
confFile = os.path.join(settings.RUNTIME_HOME_DIR, "haproxy.conf")
pidFile = os.path.join(settings.TMP_DIR, "haproxy.pid")
with open(confFile, "w") as output:
writeConfigFile(output)
cmd = ["haproxy", "-f", confFile, "-p", pidFile]
if os.path.exists(pidFile):
with open(pidFile, "r") as source:
pid = source.read().strip()
cmd.extend(["-sf", pid])
subprocess.call(cmd)
| [
"[email protected]"
]
| |
09db562a43a4de24b3c2c642181d463e0a4b80ae | 6d9795fa1aafc0fa5316020aaa0eaa4f68b76229 | /sellproperty/models.py | 63aa8f893cb9dcbfbd6429438758eeac799571cd | []
| no_license | icerahi/immolists | 02d379a22c193e793b26e35828b5eebff33bf888 | 813333c3923385861f111bb7aa715aeb04108c3a | refs/heads/master | 2022-12-15T15:00:39.844142 | 2022-01-06T10:06:44 | 2022-01-06T10:06:44 | 196,600,572 | 0 | 0 | null | 2022-11-22T04:14:43 | 2019-07-12T15:12:33 | JavaScript | UTF-8 | Python | false | false | 6,052 | py | import os
import random
from django.contrib.auth.models import User
from django.db import models
# Create your models here.
from ckeditor.fields import RichTextField
from ckeditor_uploader.fields import RichTextUploadingField
from django.conf import settings
from django.db import models
from django.db.models.signals import pre_save
from django.dispatch import receiver
from django.urls import reverse
from django.utils.text import slugify
from djmoney.models.fields import MoneyField
from phonenumber_field.modelfields import PhoneNumberField
from embed_video.fields import EmbedVideoField
from places.fields import PlacesField
class Category(models.Model):
name=models.CharField(max_length=200,unique=True,)
def __str__(self):
return f"{self.name}"
class Type(models.Model):
name=models.CharField(max_length=200,unique=True)
category=models.ForeignKey(Category,related_name='category',on_delete=models.CASCADE)
def __str__(self):
return f"{self.name}"
class PublishedManager(models.Manager):
def get_queryset(self):
return super(PublishedManager, self).get_queryset().filter(status='published')
class AllObjectManager(models.Manager):
def get_queryset(self):
return super(AllObjectManager, self).get_queryset()
def get_filename_extention(filepath):
base_name=os.path.basename(filepath)
name,ext=os.path.splitext(base_name)
return name,ext
def upload_image_path(instance,filename):
new_filename=random.randint(1,1234567876543211)
name,ext=get_filename_extention(filename)
final_filename='{new_filename}{ext}'.format(new_filename=filename,ext=ext)
return 'sellproperty/{new_filename}/{final_filename}'.format(
new_filename=new_filename,
final_filename=final_filename
)
class SellProperty(models.Model):
STATUS_CHOICES=(
('draf','Draft'),
('published','Published')
)
ACTION_FOR=(('sale','Sale',),
('rent','Rent')
)
RENT_PER=(("nothing","One Time Price (For sale)"),
('month','PER MONTH'),
('year','PER YEAR'))
realator = models.ForeignKey(settings.AUTH_USER_MODEL,on_delete=models.CASCADE)
category =models.ForeignKey(Category,on_delete=models.CASCADE)
type =models.ForeignKey(Type,on_delete=models.CASCADE)
title =models.CharField(max_length=200)
full_description =RichTextUploadingField()
key_features =RichTextField()
min_price = MoneyField(max_digits=14, decimal_places=2, default_currency='USD')
max_price = MoneyField(max_digits=14, decimal_places=2, default_currency='USD')
created =models.DateTimeField(auto_now_add=True)
updated =models.DateTimeField(auto_now=True)
slug = models.SlugField()
status =models.CharField(max_length=12,choices=STATUS_CHOICES,default='published')
published =PublishedManager() #Costom model manager
objects =AllObjectManager() # Costom model manager
main_image =models.ImageField(upload_to=upload_image_path,default='default.jpg')
image_2 =models.ImageField(upload_to=upload_image_path,null=True,blank=True)
image_3 =models.ImageField(upload_to=upload_image_path,null=True,blank=True)
views = models.PositiveIntegerField(default=0, blank=True)
favourite =models.ManyToManyField(settings.AUTH_USER_MODEL,blank=True,related_name='favourite')
video = EmbedVideoField(null=True,blank=True)
action =models.CharField(max_length=6,choices=ACTION_FOR)
rent_per =models.CharField(max_length=30,choices=RENT_PER,null=True,blank=True)
location = PlacesField(blank=True)
def __unicode__(self):
return self.location.place
def __str__(self):
return f"{self.title}"
class Meta:
ordering=['-created']
def get_update_url(self,*args,**kwargs):
return reverse('dashboard:sell_update',kwargs={'pk':self.pk,'slug':self.slug})
def get_delete_url(self,*args,**kwargs):
return reverse('dashboard:sell_delete',kwargs={'pk':self.pk,'slug':self.slug})
def get_absolute_url(self,*args,**kwargs):
return reverse('site:detail',kwargs={'pk':self.pk,'slug':self.slug})
@receiver(pre_save,sender=SellProperty)
def pre_save_slug(sender,**kwargs):
slug=slugify(kwargs['instance'].title)
kwargs['instance'].slug=slug
class EnquiryManager(models.Manager):
def get_come(self,user):
return super(EnquiryManager, self).get_queryset().filter(property__realator=user)
def get_send(self,user):
return super(EnquiryManager, self).get_queryset().filter(email=user.email)
class Enquiry(models.Model):
property=models.ForeignKey(SellProperty,on_delete=models.CASCADE,related_name='enquiry')
name =models.CharField(max_length=100,blank=False,null=False)
email=models.EmailField(blank=False,null=False)
phone=PhoneNumberField(blank=True,null=True)
message=models.TextField(blank=False,null=False)
time =models.DateTimeField(auto_now_add=True)
objects=EnquiryManager()
def __str__(self):
return f'{self.name}'
class Meta:
ordering=['-time']
def get_come_delete_url(self,*args,**kwargs):
return reverse('dashboard:enquirycome_delete',kwargs={'pk':self.pk})
def get_send_delete_url(self,*args,**kwargs):
return reverse('dashboard:enquirysend_delete',kwargs={'pk':self.pk})
class MakeOffer(models.Model):
property=models.ForeignKey(SellProperty,on_delete=models.CASCADE,related_name='make_offer')
discount=models.DecimalField(max_digits=3,decimal_places=0)
time=models.DateTimeField(auto_now_add=True)
objects=AllObjectManager()
def get_delete_url(self,*args,**kwargs):
return reverse('dashboard:offer_remove',kwargs={
'pk':self.pk,
})
def __str__(self):
return f'{self.discount}'
class Meta:
ordering=['-time']
| [
"[email protected]"
]
| |
f6cbe10ec60a3e67c3d01909eb6787f81d784725 | 52b5fa23f79d76883728d8de0bfd202c741e9c43 | /kubernetes/test/test_v1_horizontal_pod_autoscaler.py | 315840a3d5385881407d0c8128db6c771a02de99 | []
| no_license | kippandrew/client-python-tornado | 5d00810f57035825a84e37ff8fc89a7e79aed8da | d479dfeb348c5dd2e929327d800fe033b5b3b010 | refs/heads/master | 2021-09-04T13:01:28.275677 | 2018-01-18T23:27:34 | 2018-01-18T23:27:34 | 114,912,995 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,037 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1.8.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import kubernetes.client
from kubernetes.client.models.v1_horizontal_pod_autoscaler import V1HorizontalPodAutoscaler # noqa: E501
from kubernetes.client.rest import ApiException
class TestV1HorizontalPodAutoscaler(unittest.TestCase):
"""V1HorizontalPodAutoscaler unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testV1HorizontalPodAutoscaler(self):
"""Test V1HorizontalPodAutoscaler"""
# FIXME: construct object with mandatory attributes with example values
# model = kubernetes.client.models.v1_horizontal_pod_autoscaler.V1HorizontalPodAutoscaler() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
89e06687f93fce54b05689e3792cc5692934b929 | da497ddf926b8791f3812c79543120215822216b | /icsbep/pu-sol-therm-012/openmc/case-14/generate_materials.py | 8eab1d8d6513479d03829680991c284d7261e01d | []
| no_license | mit-crpg/benchmarks | 55f38e569699554d07df254103e2f828dc5b4ff8 | 58e15679ec684b9e2f552df58099e3648b5708cc | refs/heads/master | 2022-05-17T12:27:45.590757 | 2022-05-09T15:07:00 | 2022-05-09T15:07:00 | 2,704,358 | 23 | 30 | null | 2019-11-11T16:35:27 | 2011-11-03T19:04:29 | Python | UTF-8 | Python | false | false | 1,822 | py | import openmc
mats = openmc.Materials()
mat = openmc.Material(1)
mat.name = "Plutonium nitrate solution (52.7 g/L)"
mat.set_density('sum')
mat.add_nuclide('Pu239', 9.86655e-05)
mat.add_nuclide('Pu240', 2.50004e-05)
mat.add_nuclide('Pu241', 7.41089e-06)
mat.add_nuclide('Pu242', 1.49702e-06)
mat.add_nuclide('Am241', 8.03099e-07)
mat.add_element('N', 1.78497e-03)
mat.add_element('O', 3.59564e-02)
mat.add_nuclide('H1', 6.24015e-02)
mat.add_element('Fe', 1.21850e-05)
mat.add_element('Cr', 3.91841e-06)
mat.add_element('Ni', 2.77719e-06)
mat.add_s_alpha_beta('c_H_in_H2O')
mats.append(mat)
mat = openmc.Material(2)
mat.name = "Air"
mat.set_density('sum')
mat.add_nuclide('O16', 1.0784e-05)
mat.add_nuclide('N14', 4.3090e-05)
mats.append(mat)
mat = openmc.Material(3)
mat.name = "Stainless steel"
mat.set_density('sum')
mat.add_element('Fe', 6.1344e-02)
mat.add_element('Cr', 1.6472e-02)
mat.add_element('Ni', 8.1050e-03)
mats.append(mat)
mat = openmc.Material(4)
mat.name = "Lucoflex"
mat.set_density('sum')
mat.add_element('C', 2.7365e-02)
mat.add_nuclide('H1', 4.1047e-02)
mat.add_element('Cl', 1.3682e-02)
mats.append(mat)
mat = openmc.Material(5)
mat.name = "Water"
mat.set_density('sum')
mat.add_nuclide('H1', 6.6688e-02)
mat.add_element('O', 3.3344e-02)
mat.add_s_alpha_beta('c_H_in_H2O')
mats.append(mat)
mat = openmc.Material(6)
mat.name = "Steel (pool wall)"
mat.set_density('sum')
mat.add_element('Fe', 8.5068e-02)
mat.add_element('C', 5.5545e-04)
mats.append(mat)
mat = openmc.Material(7)
mat.name = "Concrete"
mat.set_density('sum')
mat.add_nuclide('H1', 1.035e-02)
mat.add_nuclide('B10', 1.602e-06)
mat.add_element('O', 4.347e-02)
mat.add_element('Al', 1.563e-03)
mat.add_element('Si', 1.417e-02)
mat.add_element('Ca', 6.424e-03)
mat.add_element('Fe', 7.621e-04)
mats.append(mat)
mats.export_to_xml()
| [
"[email protected]"
]
| |
2fc35c78749760c361cd5b6ea2884fc7fd16bb07 | f8a66f137d53306d1f05db6a2a6a0f4d0bd5acf1 | /Cyber-Main/JSL_Threat_Intel_Framework_whodat/a.py | f08854459bb01f5f6acedb36866ec7d61afe6614 | []
| no_license | sec-js/JSL-Cyber-ThreatIntelCore | 5d9e63a5fca0b0d2e250d682332ad86286277205 | a66c350b42c7ed95a4e3703e82983626fdab8ab7 | refs/heads/master | 2020-12-03T12:46:53.319750 | 2017-02-03T19:32:30 | 2017-02-03T19:32:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,957 | py | import csv
def write_final_output(dico):
f = open("outputfile", 'w')
f.write(
'pipelineid' + ',' + 'datauploadid' + ',' + 'uuid' + ',' + 'referential' + ',' + 'datasourcename' + ',' + 'date' + ',' + 'cog' + ',' + 'model' + ',' + 'concept' \
+ ',' + 'segment' + ',' + 'pedigree' + ',' + 'confidence_score' + ',' + 'ipaddress' + ',' + 'ipaddress_int' + ',' + 'offenderclass' + ',' + 'first_observed_date' + ',' + 'first_observed_time' + ',' + \
'most_recent_observation_date' + ',' + 'most_recent_observation_time' + ',' + 'total_observations' + ',' + 'blranking' + ',' + 'threat_score' + ',' + 'total_capabilities' + ',' + \
'commvett' + ',' + 'commdatevett' + ',' + 'govvett' + ',' + 'govdatevett' + ',' + 'countryabbrv' + ',' + 'country' + ',' + 'city' + ',' + 'coordinates' + ',' + 'geo_longitude' + ',' + 'geo_latitude' \
+ ',' + 'isp' + ',' + 'domain' + ',' + 'netspeed' + ',' + 'network_asn' + ',' + 'network_class' + ',' + 'network_type' + ',' + 'active boolean' + ',' + 'insrtdttm' + ',' + 'updtdttm' + '\n')
for entry in dico:
f.write(str(entry['pipelineid']) + ',' + str(entry['datauploadid']) + ',' + str(entry['uuid']) + ',' + str(
entry['referential']) + ',' + str(entry['datasourcename']) + ',' + str(entry['date']) + ',' + str(
entry['cog']) + ',' + str(entry['model']) + ',' + str(entry['concept']) \
+ ',' + str(entry['segment']) + ',' + str(entry['pedigree']) + ',' + str(
entry['confidence_score']) + ',' + str(entry['ipaddress']) + ',' + str(entry['ipaddress_int']) + ',' + str(
entry['offenderclass']) + ',' + str(entry['first_observed_date']) + ',' + str(
entry['first_observed_time']) + ',' + \
str(entry['most_recent_observation_date']) + ',' + str(
entry['most_recent_observation_time']) + ',' + str(entry['total_observations']) + ',' + str(
entry['blranking']) + ',' + str(entry['threat_score']) + ',' + str(entry['total_capabilities']) + ',' + \
str(entry['commvett']) + ',' + str(entry['commdatevett']) + ',' + str(entry['govvett']) + ',' + str(
entry['govdatevett']) + ',' + str(entry['countryabbrv']) + ',' + str(entry['country']) + ',' + str(
entry['city']) + ',' + str(entry['coordinates']) + ',' + str(entry['geo_longitude']) + ',' + str(
entry['geo_latitude']) \
+ ',' + str(entry['isp']) + ',' + str(entry['domain']) + ',' + str(entry['netspeed']) + ',' + str(
entry['network_asn']) + ',' + str(entry['network_class']) + ',' + str(entry['network_type']) + ',' + str(
entry['active boolean']) + ',' + str(entry['insrtdttm']) + ',' + str(entry['updtdttm']) + '\n')
with open('test.csv') as f:
reader = csv.reader(f, skipinitialspace=True)
header = next(reader)
a = [dict(zip(header, map(str, row))) for row in reader]
write_final_output(a)
| [
"[email protected]"
]
| |
2496fe20939060e4e360a7862a99df132f61170b | d8cbe9ce0469f72b8929af01538b6ceddff10a38 | /homeassistant/components/sensibo/sensor.py | 8048eece3389bcd056ef788ba4ca8de6ce7edddc | [
"Apache-2.0"
]
| permissive | piitaya/home-assistant | 9c1ba162dac9604e4d43e035e74bad7bba327f0b | 48893738192431f96966998c4ff7a3723a2f8f4a | refs/heads/dev | 2023-03-07T16:13:32.117970 | 2023-01-10T17:47:48 | 2023-01-10T17:47:48 | 172,578,293 | 3 | 1 | Apache-2.0 | 2023-02-22T06:15:56 | 2019-02-25T20:19:40 | Python | UTF-8 | Python | false | false | 12,334 | py | """Sensor platform for Sensibo integration."""
from __future__ import annotations
from collections.abc import Callable, Mapping
from dataclasses import dataclass
from datetime import datetime
from typing import TYPE_CHECKING, Any
from pysensibo.model import MotionSensor, SensiboDevice
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_PARTS_PER_MILLION,
PERCENTAGE,
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
UnitOfElectricPotential,
UnitOfTemperature,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity import EntityCategory
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import StateType
from .const import DOMAIN
from .coordinator import SensiboDataUpdateCoordinator
from .entity import SensiboDeviceBaseEntity, SensiboMotionBaseEntity
PARALLEL_UPDATES = 0
@dataclass
class MotionBaseEntityDescriptionMixin:
"""Mixin for required Sensibo base description keys."""
value_fn: Callable[[MotionSensor], StateType]
@dataclass
class DeviceBaseEntityDescriptionMixin:
"""Mixin for required Sensibo base description keys."""
value_fn: Callable[[SensiboDevice], StateType | datetime]
extra_fn: Callable[[SensiboDevice], dict[str, str | bool | None] | None] | None
@dataclass
class SensiboMotionSensorEntityDescription(
SensorEntityDescription, MotionBaseEntityDescriptionMixin
):
"""Describes Sensibo Motion sensor entity."""
@dataclass
class SensiboDeviceSensorEntityDescription(
SensorEntityDescription, DeviceBaseEntityDescriptionMixin
):
"""Describes Sensibo Device sensor entity."""
FILTER_LAST_RESET_DESCRIPTION = SensiboDeviceSensorEntityDescription(
key="filter_last_reset",
device_class=SensorDeviceClass.TIMESTAMP,
name="Filter last reset",
icon="mdi:timer",
value_fn=lambda data: data.filter_last_reset,
extra_fn=None,
)
MOTION_SENSOR_TYPES: tuple[SensiboMotionSensorEntityDescription, ...] = (
SensiboMotionSensorEntityDescription(
key="rssi",
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
entity_category=EntityCategory.DIAGNOSTIC,
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
state_class=SensorStateClass.MEASUREMENT,
name="rssi",
icon="mdi:wifi",
value_fn=lambda data: data.rssi,
entity_registry_enabled_default=False,
),
SensiboMotionSensorEntityDescription(
key="battery_voltage",
device_class=SensorDeviceClass.VOLTAGE,
entity_category=EntityCategory.DIAGNOSTIC,
native_unit_of_measurement=UnitOfElectricPotential.VOLT,
state_class=SensorStateClass.MEASUREMENT,
name="Battery voltage",
icon="mdi:battery",
value_fn=lambda data: data.battery_voltage,
),
SensiboMotionSensorEntityDescription(
key="humidity",
device_class=SensorDeviceClass.HUMIDITY,
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
name="Humidity",
icon="mdi:water",
value_fn=lambda data: data.humidity,
),
SensiboMotionSensorEntityDescription(
key="temperature",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
name="Temperature",
icon="mdi:thermometer",
value_fn=lambda data: data.temperature,
),
)
PURE_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = (
SensiboDeviceSensorEntityDescription(
key="pm25",
device_class=SensorDeviceClass.PM25,
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
name="PM2.5",
icon="mdi:air-filter",
value_fn=lambda data: data.pm25,
extra_fn=None,
),
SensiboDeviceSensorEntityDescription(
key="pure_sensitivity",
name="Pure sensitivity",
icon="mdi:air-filter",
value_fn=lambda data: data.pure_sensitivity,
extra_fn=None,
translation_key="sensitivity",
),
FILTER_LAST_RESET_DESCRIPTION,
)
DEVICE_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = (
SensiboDeviceSensorEntityDescription(
key="timer_time",
device_class=SensorDeviceClass.TIMESTAMP,
name="Timer end time",
icon="mdi:timer",
value_fn=lambda data: data.timer_time,
extra_fn=lambda data: {"id": data.timer_id, "turn_on": data.timer_state_on},
),
SensiboDeviceSensorEntityDescription(
key="feels_like",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
name="Temperature feels like",
value_fn=lambda data: data.feelslike,
extra_fn=None,
entity_registry_enabled_default=False,
),
SensiboDeviceSensorEntityDescription(
key="climate_react_low",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
name="Climate React low temperature threshold",
value_fn=lambda data: data.smart_low_temp_threshold,
extra_fn=lambda data: data.smart_low_state,
entity_registry_enabled_default=False,
),
SensiboDeviceSensorEntityDescription(
key="climate_react_high",
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
name="Climate React high temperature threshold",
value_fn=lambda data: data.smart_high_temp_threshold,
extra_fn=lambda data: data.smart_high_state,
entity_registry_enabled_default=False,
),
SensiboDeviceSensorEntityDescription(
key="climate_react_type",
translation_key="smart_type",
name="Climate React type",
value_fn=lambda data: data.smart_type,
extra_fn=None,
entity_registry_enabled_default=False,
),
FILTER_LAST_RESET_DESCRIPTION,
)
AIRQ_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = (
SensiboDeviceSensorEntityDescription(
key="airq_tvoc",
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
state_class=SensorStateClass.MEASUREMENT,
icon="mdi:air-filter",
name="AirQ TVOC",
value_fn=lambda data: data.tvoc,
extra_fn=None,
),
SensiboDeviceSensorEntityDescription(
key="airq_co2",
device_class=SensorDeviceClass.CO2,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
state_class=SensorStateClass.MEASUREMENT,
name="AirQ CO2",
value_fn=lambda data: data.co2,
extra_fn=None,
),
)
ELEMENT_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = (
SensiboDeviceSensorEntityDescription(
key="pm25",
device_class=SensorDeviceClass.PM25,
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
name="PM 2.5",
value_fn=lambda data: data.pm25,
extra_fn=None,
),
SensiboDeviceSensorEntityDescription(
key="tvoc",
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
state_class=SensorStateClass.MEASUREMENT,
name="TVOC",
value_fn=lambda data: data.tvoc,
extra_fn=None,
),
SensiboDeviceSensorEntityDescription(
key="co2",
device_class=SensorDeviceClass.CO2,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
state_class=SensorStateClass.MEASUREMENT,
name="CO2",
value_fn=lambda data: data.co2,
extra_fn=None,
),
SensiboDeviceSensorEntityDescription(
key="ethanol",
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
state_class=SensorStateClass.MEASUREMENT,
name="Ethanol",
value_fn=lambda data: data.etoh,
extra_fn=None,
),
SensiboDeviceSensorEntityDescription(
key="iaq",
device_class=SensorDeviceClass.AQI,
state_class=SensorStateClass.MEASUREMENT,
name="Air quality",
value_fn=lambda data: data.iaq,
extra_fn=None,
),
)
DESCRIPTION_BY_MODELS = {
"pure": PURE_SENSOR_TYPES,
"airq": AIRQ_SENSOR_TYPES,
"elements": ELEMENT_SENSOR_TYPES,
}
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up Sensibo sensor platform."""
coordinator: SensiboDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
entities: list[SensiboMotionSensor | SensiboDeviceSensor] = []
for device_id, device_data in coordinator.data.parsed.items():
if device_data.motion_sensors:
entities.extend(
SensiboMotionSensor(
coordinator, device_id, sensor_id, sensor_data, description
)
for sensor_id, sensor_data in device_data.motion_sensors.items()
for description in MOTION_SENSOR_TYPES
)
entities.extend(
SensiboDeviceSensor(coordinator, device_id, description)
for device_id, device_data in coordinator.data.parsed.items()
for description in DESCRIPTION_BY_MODELS.get(
device_data.model, DEVICE_SENSOR_TYPES
)
)
async_add_entities(entities)
class SensiboMotionSensor(SensiboMotionBaseEntity, SensorEntity):
"""Representation of a Sensibo Motion Sensor."""
entity_description: SensiboMotionSensorEntityDescription
def __init__(
self,
coordinator: SensiboDataUpdateCoordinator,
device_id: str,
sensor_id: str,
sensor_data: MotionSensor,
entity_description: SensiboMotionSensorEntityDescription,
) -> None:
"""Initiate Sensibo Motion Sensor."""
super().__init__(
coordinator,
device_id,
sensor_id,
sensor_data,
)
self.entity_description = entity_description
self._attr_unique_id = f"{sensor_id}-{entity_description.key}"
@property
def native_unit_of_measurement(self) -> str | None:
"""Add native unit of measurement."""
if self.entity_description.device_class == SensorDeviceClass.TEMPERATURE:
return UnitOfTemperature.CELSIUS
return self.entity_description.native_unit_of_measurement
@property
def native_value(self) -> StateType:
"""Return value of sensor."""
if TYPE_CHECKING:
assert self.sensor_data
return self.entity_description.value_fn(self.sensor_data)
class SensiboDeviceSensor(SensiboDeviceBaseEntity, SensorEntity):
"""Representation of a Sensibo Device Sensor."""
entity_description: SensiboDeviceSensorEntityDescription
def __init__(
self,
coordinator: SensiboDataUpdateCoordinator,
device_id: str,
entity_description: SensiboDeviceSensorEntityDescription,
) -> None:
"""Initiate Sensibo Device Sensor."""
super().__init__(
coordinator,
device_id,
)
self.entity_description = entity_description
self._attr_unique_id = f"{device_id}-{entity_description.key}"
@property
def native_unit_of_measurement(self) -> str | None:
"""Add native unit of measurement."""
if self.entity_description.device_class == SensorDeviceClass.TEMPERATURE:
return UnitOfTemperature.CELSIUS
return self.entity_description.native_unit_of_measurement
@property
def native_value(self) -> StateType | datetime:
"""Return value of sensor."""
state = self.entity_description.value_fn(self.device_data)
return state
@property
def extra_state_attributes(self) -> Mapping[str, Any] | None:
"""Return additional attributes."""
if self.entity_description.extra_fn is not None:
return self.entity_description.extra_fn(self.device_data)
return None
| [
"[email protected]"
]
| |
66dc0f2daff11b6cce93fd0485b61c72d2d44f92 | 1adc05008f0caa9a81cc4fc3a737fcbcebb68995 | /hardhat/recipes/pango.py | 534919580c9ee2d79f2ed539a86e1db554ea72c2 | [
"MIT",
"BSD-3-Clause"
]
| permissive | stangelandcl/hardhat | 4aa995518697d19b179c64751108963fa656cfca | 1ad0c5dec16728c0243023acb9594f435ef18f9c | refs/heads/master | 2021-01-11T17:19:41.988477 | 2019-03-22T22:18:44 | 2019-03-22T22:18:52 | 79,742,340 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 725 | py | from .base import GnuRecipe
class PangoRecipe(GnuRecipe):
def __init__(self, *args, **kwargs):
super(PangoRecipe, self).__init__(*args, **kwargs)
self.sha256 = '1d2b74cd63e8bd41961f2f8d952355aa' \
'0f9be6002b52c8aa7699d9f5da597c9d'
self.name = 'pango'
self.depends = ['cairo', 'fontconfig', 'glib', 'harfbuzz']
self.version = '1.42.4'
self.version_regex = '(?P<version>\d+\.\d+)'
self.version_url = 'http://ftp.gnome.org/pub/GNOME/sources/pango/'
short_version = '.'.join(self.version.split('.')[:2])
self.url = 'http://ftp.gnome.org/pub/gnome/sources/$name/' \
'%s/$name-$version.tar.xz' % short_version
| [
"[email protected]"
]
| |
d4df6a68cf8b4a3fcd98eebda53d3764910e0f8a | 47288e23d0248d7bfbda0cfae5bc138d389cbbb6 | /sympy/integrals/rde.py | c0a1c22af21f5d97140e22176bc174fe8dde385f | []
| no_license | cosmosZhou/sympy | d94ce6ad9028d143bb877f46fad4eda1d5dc2173 | 6160fff63636fd81755bd94e777ba450a09df517 | refs/heads/master | 2022-07-30T20:51:35.359580 | 2022-07-11T12:40:50 | 2022-07-11T12:40:50 | 199,948,191 | 3 | 1 | null | 2020-08-26T14:13:49 | 2019-08-01T00:34:25 | Python | UTF-8 | Python | false | false | 26,576 | py | """
Algorithms for solving the Risch differential equation.
Given a differential field K of characteristic 0 that is a simple
monomial extension of a base field k and f, g in K, the Risch
Differential Equation problem is to decide if there exist y in K such
that Dy + f*y == g and to find one if there are some. If t is a
monomial over k and the coefficients of f and g are in k(t), then y is
in k(t), and the outline of the algorithm here is given as:
1. Compute the normal part n of the denominator of y. The problem is
then reduced to finding y' in k<t>, where y == y'/n.
2. Compute the special part s of the denominator of y. The problem is
then reduced to finding y'' in k[t], where y == y''/(n*s)
3. Bound the degree of y''.
4. Reduce the equation Dy + f*y == g to a similar equation with f, g in
k[t].
5. Find the solutions in k[t] of bounded degree of the reduced equation.
See Chapter 6 of "Symbolic Integration I: Transcendental Functions" by
Manuel Bronstein. See also the docstring of risch.py.
"""
from operator import mul
from sympy.core import oo
from sympy.core.compatibility import reduce
from sympy.core.symbol import Dummy
from sympy.polys import Poly, gcd, ZZ, cancel
from sympy.integrals.risch import (gcdex_diophantine, frac_in, derivation,
splitfactor, NonElementaryIntegralException, DecrementLevel)
# TODO: Add messages to NonElementaryIntegralException errors
def order_at(a, p, t):
"""
Computes the order of a at p, with respect to t.
For a, p in k[t], the order of a at p is defined as nu_p(a) = max({n
in Z+ such that p**n|a}), where a != 0. If a == 0, nu_p(a) = +oo.
To compute the order at a rational function, a/b, use the fact that
nu_p(a/b) == nu_p(a) - nu_p(b).
"""
if a.is_zero:
return oo
if p == Poly(t, t):
return a.as_poly(t).ET()[0][0]
# Uses binary search for calculating the power. power_list collects the tuples
# (p^k,k) where each k is some power of 2. After deciding the largest k
# such that k is power of 2 and p^k|a the loop iteratively calculates
# the actual power.
power_list = []
p1 = p
r = a.rem(p1)
tracks_power = 1
while r.is_zero:
power_list.append((p1,tracks_power))
p1 = p1*p1
tracks_power *= 2
r = a.rem(p1)
n = 0
product = Poly(1, t)
while len(power_list) != 0:
final = power_list.pop()
productf = product*final[0]
r = a.rem(productf)
if r.is_zero:
n += final[1]
product = productf
return n
def order_at_oo(a, d, t):
"""
Computes the order of a/d at oo (infinity), with respect to t.
For f in k(t), the order or f at oo is defined as deg(d) - deg(a), where
f == a/d.
"""
if a.is_zero:
return oo
return d.degree(t) - a.degree(t)
def weak_normalizer(a, d, DE, z=None):
"""
Weak normalization.
Given a derivation D on k[t] and f == a/d in k(t), return q in k[t]
such that f - Dq/q is weakly normalized with respect to t.
f in k(t) is said to be "weakly normalized" with respect to t if
residue_p(f) is not a positive integer for any normal irreducible p
in k[t] such that f is in R_p (Definition 6.1.1). If f has an
elementary integral, this is equivalent to no logarithm of
integral(f) whose argument depends on t has a positive integer
coefficient, where the arguments of the logarithms not in k(t) are
in k[t].
Returns (q, f - Dq/q)
"""
z = z or Dummy('z')
dn, ds = splitfactor(d, DE)
# Compute d1, where dn == d1*d2**2*...*dn**n is a square-free
# factorization of d.
g = gcd(dn, dn.diff(DE.t))
d_sqf_part = dn.quo(g)
d1 = d_sqf_part.quo(gcd(d_sqf_part, g))
a1, b = gcdex_diophantine(d.quo(d1).as_poly(DE.t), d1.as_poly(DE.t),
a.as_poly(DE.t))
r = (a - Poly(z, DE.t)*derivation(d1, DE)).as_poly(DE.t).resultant(
d1.as_poly(DE.t))
r = Poly(r, z)
if not r.has(z):
return (Poly(1, DE.t), (a, d))
N = [i for i in r.real_roots() if i in ZZ and i > 0]
q = reduce(mul, [gcd(a - Poly(n, DE.t)*derivation(d1, DE), d1) for n in N],
Poly(1, DE.t))
dq = derivation(q, DE)
sn = q*a - d*dq
sd = q*d
sn, sd = sn.cancel(sd, include=True)
return (q, (sn, sd))
def normal_denom(fa, fd, ga, gd, DE):
"""
Normal part of the denominator.
Given a derivation D on k[t] and f, g in k(t) with f weakly
normalized with respect to t, either raise NonElementaryIntegralException,
in which case the equation Dy + f*y == g has no solution in k(t), or the
quadruplet (a, b, c, h) such that a, h in k[t], b, c in k<t>, and for any
solution y in k(t) of Dy + f*y == g, q = y*h in k<t> satisfies
a*Dq + b*q == c.
This constitutes step 1 in the outline given in the rde.py docstring.
"""
dn, ds = splitfactor(fd, DE)
en, es = splitfactor(gd, DE)
p = dn.gcd(en)
h = en.gcd(en.diff(DE.t)).quo(p.gcd(p.diff(DE.t)))
a = dn*h
c = a*h
if c.div(en)[1]:
# en does not divide dn*h**2
raise NonElementaryIntegralException
ca = c*ga
ca, cd = ca.cancel(gd, include=True)
ba = a*fa - dn*derivation(h, DE)*fd
ba, bd = ba.cancel(fd, include=True)
# (dn*h, dn*h*f - dn*Dh, dn*h**2*g, h)
return (a, (ba, bd), (ca, cd), h)
def special_denom(a, ba, bd, ca, cd, DE, case='auto'):
"""
Special part of the denominator.
case is one of {'exp', 'tan', 'primitive'} for the hyperexponential,
hypertangent, and primitive cases, respectively. For the
hyperexponential (resp. hypertangent) case, given a derivation D on
k[t] and a in k[t], b, c, in k<t> with Dt/t in k (resp. Dt/(t**2 + 1) in
k, sqrt(-1) not in k), a != 0, and gcd(a, t) == 1 (resp.
gcd(a, t**2 + 1) == 1), return the quadruplet (A, B, C, 1/h) such that
A, B, C, h in k[t] and for any solution q in k<t> of a*Dq + b*q == c,
r = qh in k[t] satisfies A*Dr + B*r == C.
For case == 'primitive', k<t> == k[t], so it returns (a, b, c, 1) in
this case.
This constitutes step 2 of the outline given in the rde.py docstring.
"""
from sympy.integrals.prde import parametric_log_deriv
# TODO: finish writing this and write tests
if case == 'auto':
case = DE.case
if case == 'exp':
p = Poly(DE.t, DE.t)
elif case == 'tan':
p = Poly(DE.t**2 + 1, DE.t)
elif case in ['primitive', 'base']:
B = ba.to_field().quo(bd)
C = ca.to_field().quo(cd)
return (a, B, C, Poly(1, DE.t))
else:
raise ValueError("case must be one of {'exp', 'tan', 'primitive', "
"'base'}, not %s." % case)
nb = order_at(ba, p, DE.t) - order_at(bd, p, DE.t)
nc = order_at(ca, p, DE.t) - order_at(cd, p, DE.t)
n = min(0, nc - min(0, nb))
if not nb:
# Possible cancellation.
if case == 'exp':
dcoeff = DE.d.quo(Poly(DE.t, DE.t))
with DecrementLevel(DE): # We are guaranteed to not have problems,
# because case != 'base'.
alphaa, alphad = frac_in(-ba.eval(0)/bd.eval(0)/a.eval(0), DE.t)
etaa, etad = frac_in(dcoeff, DE.t)
A = parametric_log_deriv(alphaa, alphad, etaa, etad, DE)
if A is not None:
Q, m, z = A
if Q == 1:
n = min(n, m)
elif case == 'tan':
dcoeff = DE.d.quo(Poly(DE.t**2+1, DE.t))
with DecrementLevel(DE): # We are guaranteed to not have problems,
# because case != 'base'.
alphaa, alphad = frac_in(im(-ba.eval(sqrt(-1))/bd.eval(sqrt(-1))/a.eval(sqrt(-1))), DE.t)
betaa, betad = frac_in(re(-ba.eval(sqrt(-1))/bd.eval(sqrt(-1))/a.eval(sqrt(-1))), DE.t)
etaa, etad = frac_in(dcoeff, DE.t)
if recognize_log_derivative(2*betaa, betad, DE):
A = parametric_log_deriv(alphaa*sqrt(-1)*betad+alphad*betaa, alphad*betad, etaa, etad, DE)
if A is not None:
Q, m, z = A
if Q == 1:
n = min(n, m)
N = max(0, -nb, n - nc)
pN = p**N
pn = p**-n
A = a*pN
B = ba*pN.quo(bd) + Poly(n, DE.t)*a*derivation(p, DE).quo(p)*pN
C = (ca*pN*pn).quo(cd)
h = pn
# (a*p**N, (b + n*a*Dp/p)*p**N, c*p**(N - n), p**-n)
return (A, B, C, h)
def bound_degree(a, b, cQ, DE, case='auto', parametric=False):
"""
Bound on polynomial solutions.
Given a derivation D on k[t] and a, b, c in k[t] with a != 0, return
n in ZZ such that deg(q) <= n for any solution q in k[t] of
a*Dq + b*q == c, when parametric=False, or deg(q) <= n for any solution
c1, ..., cm in Const(k) and q in k[t] of a*Dq + b*q == Sum(ci*gi, (i, 1, m))
when parametric=True.
For parametric=False, cQ is c, a Poly; for parametric=True, cQ is Q ==
[q1, ..., qm], a list of Polys.
This constitutes step 3 of the outline given in the rde.py docstring.
"""
from sympy.integrals.prde import (parametric_log_deriv, limited_integrate,
is_log_deriv_k_t_radical_in_field)
# TODO: finish writing this and write tests
if case == 'auto':
case = DE.case
da = a.degree(DE.t)
db = b.degree(DE.t)
# The parametric and regular cases are identical, except for this part
if parametric:
dc = max([i.degree(DE.t) for i in cQ])
else:
dc = cQ.degree(DE.t)
alpha = cancel(-b.as_poly(DE.t).LC().as_expr()/
a.as_poly(DE.t).LC().as_expr())
if case == 'base':
n = max(0, dc - max(db, da - 1))
if db == da - 1 and alpha.is_Integer:
n = max(0, alpha, dc - db)
elif case == 'primitive':
if db > da:
n = max(0, dc - db)
else:
n = max(0, dc - da + 1)
etaa, etad = frac_in(DE.d, DE.T[DE.level - 1])
t1 = DE.t
with DecrementLevel(DE):
alphaa, alphad = frac_in(alpha, DE.t)
if db == da - 1:
# if alpha == m*Dt + Dz for z in k and m in ZZ:
try:
(za, zd), m = limited_integrate(alphaa, alphad, [(etaa, etad)],
DE)
except NonElementaryIntegralException:
pass
else:
if len(m) != 1:
raise ValueError("Length of m should be 1")
n = max(n, m[0])
elif db == da:
# if alpha == Dz/z for z in k*:
# beta = -lc(a*Dz + b*z)/(z*lc(a))
# if beta == m*Dt + Dw for w in k and m in ZZ:
# n = max(n, m)
A = is_log_deriv_k_t_radical_in_field(alphaa, alphad, DE)
if A is not None:
aa, z = A
if aa == 1:
beta = -(a*derivation(z, DE).as_poly(t1) +
b*z.as_poly(t1)).LC()/(z.as_expr()*a.LC())
betaa, betad = frac_in(beta, DE.t)
try:
(za, zd), m = limited_integrate(betaa, betad,
[(etaa, etad)], DE)
except NonElementaryIntegralException:
pass
else:
if len(m) != 1:
raise ValueError("Length of m should be 1")
n = max(n, m[0])
elif case == 'exp':
n = max(0, dc - max(db, da))
if da == db:
etaa, etad = frac_in(DE.d.quo(Poly(DE.t, DE.t)), DE.T[DE.level - 1])
with DecrementLevel(DE):
alphaa, alphad = frac_in(alpha, DE.t)
A = parametric_log_deriv(alphaa, alphad, etaa, etad, DE)
if A is not None:
# if alpha == m*Dt/t + Dz/z for z in k* and m in ZZ:
# n = max(n, m)
a, m, z = A
if a == 1:
n = max(n, m)
elif case in ['tan', 'other_nonlinear']:
delta = DE.d.degree(DE.t)
lam = DE.d.LC()
alpha = cancel(alpha/lam)
n = max(0, dc - max(da + delta - 1, db))
if db == da + delta - 1 and alpha.is_Integer:
n = max(0, alpha, dc - db)
else:
raise ValueError("case must be one of {'exp', 'tan', 'primitive', "
"'other_nonlinear', 'base'}, not %s." % case)
return n
def spde(a, b, c, n, DE):
"""
Rothstein's Special Polynomial Differential Equation algorithm.
Given a derivation D on k[t], an integer n and a, b, c in k[t] with
a != 0, either raise NonElementaryIntegralException, in which case the
equation a*Dq + b*q == c has no solution of degree at most n in
k[t], or return the tuple (B, C, m, alpha, beta) such that B, C,
alpha, beta in k[t], m in ZZ, and any solution q in k[t] of degree
at most n of a*Dq + b*q == c must be of the form
q == alpha*h + beta, where h in k[t], deg(h) <= m, and Dh + B*h == C.
This constitutes step 4 of the outline given in the rde.py docstring.
"""
zero = Poly(0, DE.t)
alpha = Poly(1, DE.t)
beta = Poly(0, DE.t)
while True:
if c.is_zero:
return (zero, zero, 0, zero, beta) # -1 is more to the point
if (n < 0) is True:
raise NonElementaryIntegralException
g = a.gcd(b)
if not c.rem(g).is_zero: # g does not divide c
raise NonElementaryIntegralException
a, b, c = a.quo(g), b.quo(g), c.quo(g)
if a.degree(DE.t) == 0:
b = b.to_field().quo(a)
c = c.to_field().quo(a)
return (b, c, n, alpha, beta)
r, z = gcdex_diophantine(b, a, c)
b += derivation(a, DE)
c = z - derivation(r, DE)
n -= a.degree(DE.t)
beta += alpha * r
alpha *= a
def no_cancel_b_large(b, c, n, DE):
"""
Poly Risch Differential Equation - No cancellation: deg(b) large enough.
Given a derivation D on k[t], n either an integer or +oo, and b, c
in k[t] with b != 0 and either D == d/dt or
deg(b) > max(0, deg(D) - 1), either raise NonElementaryIntegralException, in
which case the equation Dq + b*q == c has no solution of degree at
most n in k[t], or a solution q in k[t] of this equation with
deg(q) < n.
"""
q = Poly(0, DE.t)
while not c.is_zero:
m = c.degree(DE.t) - b.degree(DE.t)
if not 0 <= m <= n: # n < 0 or m < 0 or m > n
raise NonElementaryIntegralException
p = Poly(c.as_poly(DE.t).LC()/b.as_poly(DE.t).LC()*DE.t**m, DE.t,
expand=False)
q = q + p
n = m - 1
c = c - derivation(p, DE) - b*p
return q
def no_cancel_b_small(b, c, n, DE):
"""
Poly Risch Differential Equation - No cancellation: deg(b) small enough.
Given a derivation D on k[t], n either an integer or +oo, and b, c
in k[t] with deg(b) < deg(D) - 1 and either D == d/dt or
deg(D) >= 2, either raise NonElementaryIntegralException, in which case the
equation Dq + b*q == c has no solution of degree at most n in k[t],
or a solution q in k[t] of this equation with deg(q) <= n, or the
tuple (h, b0, c0) such that h in k[t], b0, c0, in k, and for any
solution q in k[t] of degree at most n of Dq + bq == c, y == q - h
is a solution in k of Dy + b0*y == c0.
"""
q = Poly(0, DE.t)
while not c.is_zero:
if n == 0:
m = 0
else:
m = c.degree(DE.t) - DE.d.degree(DE.t) + 1
if not 0 <= m <= n: # n < 0 or m < 0 or m > n
raise NonElementaryIntegralException
if m > 0:
p = Poly(c.as_poly(DE.t).LC()/(m*DE.d.as_poly(DE.t).LC())*DE.t**m,
DE.t, expand=False)
else:
if b.degree(DE.t) != c.degree(DE.t):
raise NonElementaryIntegralException
if b.degree(DE.t) == 0:
return (q, b.as_poly(DE.T[DE.level - 1]),
c.as_poly(DE.T[DE.level - 1]))
p = Poly(c.as_poly(DE.t).LC()/b.as_poly(DE.t).LC(), DE.t,
expand=False)
q = q + p
n = m - 1
c = c - derivation(p, DE) - b*p
return q
# TODO: better name for this function
def no_cancel_equal(b, c, n, DE):
"""
Poly Risch Differential Equation - No cancellation: deg(b) == deg(D) - 1
Given a derivation D on k[t] with deg(D) >= 2, n either an integer
or +oo, and b, c in k[t] with deg(b) == deg(D) - 1, either raise
NonElementaryIntegralException, in which case the equation Dq + b*q == c has
no solution of degree at most n in k[t], or a solution q in k[t] of
this equation with deg(q) <= n, or the tuple (h, m, C) such that h
in k[t], m in ZZ, and C in k[t], and for any solution q in k[t] of
degree at most n of Dq + b*q == c, y == q - h is a solution in k[t]
of degree at most m of Dy + b*y == C.
"""
q = Poly(0, DE.t)
lc = cancel(-b.as_poly(DE.t).LC()/DE.d.as_poly(DE.t).LC())
if lc.is_Integer and lc.is_positive:
M = lc
else:
M = -1
while not c.is_zero:
m = max(M, c.degree(DE.t) - DE.d.degree(DE.t) + 1)
if not 0 <= m <= n: # n < 0 or m < 0 or m > n
raise NonElementaryIntegralException
u = cancel(m*DE.d.as_poly(DE.t).LC() + b.as_poly(DE.t).LC())
if u.is_zero:
return (q, m, c)
if m > 0:
p = Poly(c.as_poly(DE.t).LC()/u*DE.t**m, DE.t, expand=False)
else:
if c.degree(DE.t) != DE.d.degree(DE.t) - 1:
raise NonElementaryIntegralException
else:
p = c.as_poly(DE.t).LC()/b.as_poly(DE.t).LC()
q = q + p
n = m - 1
c = c - derivation(p, DE) - b*p
return q
def cancel_primitive(b, c, n, DE):
"""
Poly Risch Differential Equation - Cancellation: Primitive case.
Given a derivation D on k[t], n either an integer or +oo, b in k, and
c in k[t] with Dt in k and b != 0, either raise
NonElementaryIntegralException, in which case the equation Dq + b*q == c
has no solution of degree at most n in k[t], or a solution q in k[t] of
this equation with deg(q) <= n.
"""
from sympy.integrals.prde import is_log_deriv_k_t_radical_in_field
with DecrementLevel(DE):
ba, bd = frac_in(b, DE.t)
A = is_log_deriv_k_t_radical_in_field(ba, bd, DE)
if A is not None:
n, z = A
if n == 1: # b == Dz/z
raise NotImplementedError("is_deriv_in_field() is required to "
" solve this problem.")
# if z*c == Dp for p in k[t] and deg(p) <= n:
# return p/z
# else:
# raise NonElementaryIntegralException
if c.is_zero:
return c # return 0
if n < c.degree(DE.t):
raise NonElementaryIntegralException
q = Poly(0, DE.t)
while not c.is_zero:
m = c.degree(DE.t)
if n < m:
raise NonElementaryIntegralException
with DecrementLevel(DE):
a2a, a2d = frac_in(c.LC(), DE.t)
sa, sd = rischDE(ba, bd, a2a, a2d, DE)
stm = Poly(sa.as_expr()/sd.as_expr()*DE.t**m, DE.t, expand=False)
q += stm
n = m - 1
c -= b*stm + derivation(stm, DE)
return q
def cancel_exp(b, c, n, DE):
"""
Poly Risch Differential Equation - Cancellation: Hyperexponential case.
Given a derivation D on k[t], n either an integer or +oo, b in k, and
c in k[t] with Dt/t in k and b != 0, either raise
NonElementaryIntegralException, in which case the equation Dq + b*q == c
has no solution of degree at most n in k[t], or a solution q in k[t] of
this equation with deg(q) <= n.
"""
from sympy.integrals.prde import parametric_log_deriv
eta = DE.d.quo(Poly(DE.t, DE.t)).as_expr()
with DecrementLevel(DE):
etaa, etad = frac_in(eta, DE.t)
ba, bd = frac_in(b, DE.t)
A = parametric_log_deriv(ba, bd, etaa, etad, DE)
if A is not None:
a, m, z = A
if a == 1:
raise NotImplementedError("is_deriv_in_field() is required to "
"solve this problem.")
# if c*z*t**m == Dp for p in k<t> and q = p/(z*t**m) in k[t] and
# deg(q) <= n:
# return q
# else:
# raise NonElementaryIntegralException
if c.is_zero:
return c # return 0
if n < c.degree(DE.t):
raise NonElementaryIntegralException
q = Poly(0, DE.t)
while not c.is_zero:
m = c.degree(DE.t)
if n < m:
raise NonElementaryIntegralException
# a1 = b + m*Dt/t
a1 = b.as_expr()
with DecrementLevel(DE):
# TODO: Write a dummy function that does this idiom
a1a, a1d = frac_in(a1, DE.t)
a1a = a1a*etad + etaa*a1d*Poly(m, DE.t)
a1d = a1d*etad
a2a, a2d = frac_in(c.LC(), DE.t)
sa, sd = rischDE(a1a, a1d, a2a, a2d, DE)
stm = Poly(sa.as_expr()/sd.as_expr()*DE.t**m, DE.t, expand=False)
q += stm
n = m - 1
c -= b*stm + derivation(stm, DE) # deg(c) becomes smaller
return q
def solve_poly_rde(b, cQ, n, DE, parametric=False):
"""
Solve a Polynomial Risch Differential Equation with degree bound n.
This constitutes step 4 of the outline given in the rde.py docstring.
For parametric=False, cQ is c, a Poly; for parametric=True, cQ is Q ==
[q1, ..., qm], a list of Polys.
"""
from sympy.integrals.prde import (prde_no_cancel_b_large,
prde_no_cancel_b_small)
# No cancellation
if not b.is_zero and (DE.case == 'base' or
b.degree(DE.t) > max(0, DE.d.degree(DE.t) - 1)):
if parametric:
return prde_no_cancel_b_large(b, cQ, n, DE)
return no_cancel_b_large(b, cQ, n, DE)
elif (b.is_zero or b.degree(DE.t) < DE.d.degree(DE.t) - 1) and \
(DE.case == 'base' or DE.d.degree(DE.t) >= 2):
if parametric:
return prde_no_cancel_b_small(b, cQ, n, DE)
R = no_cancel_b_small(b, cQ, n, DE)
if isinstance(R, Poly):
return R
else:
# XXX: Might k be a field? (pg. 209)
h, b0, c0 = R
with DecrementLevel(DE):
b0, c0 = b0.as_poly(DE.t), c0.as_poly(DE.t)
if b0 is None: # See above comment
raise ValueError("b0 should be a non-Null value")
if c0 is None:
raise ValueError("c0 should be a non-Null value")
y = solve_poly_rde(b0, c0, n, DE).as_poly(DE.t)
return h + y
elif DE.d.degree(DE.t) >= 2 and b.degree(DE.t) == DE.d.degree(DE.t) - 1 and \
n > -b.as_poly(DE.t).LC()/DE.d.as_poly(DE.t).LC():
# TODO: Is this check necessary, and if so, what should it do if it fails?
# b comes from the first element returned from spde()
if not b.as_poly(DE.t).LC().is_number:
raise TypeError("Result should be a number")
if parametric:
raise NotImplementedError("prde_no_cancel_b_equal() is not yet "
"implemented.")
R = no_cancel_equal(b, cQ, n, DE)
if isinstance(R, Poly):
return R
else:
h, m, C = R
# XXX: Or should it be rischDE()?
y = solve_poly_rde(b, C, m, DE)
return h + y
else:
# Cancellation
if b.is_zero:
raise NotImplementedError("Remaining cases for Poly (P)RDE are "
"not yet implemented (is_deriv_in_field() required).")
else:
if DE.case == 'exp':
if parametric:
raise NotImplementedError("Parametric RDE cancellation "
"hyperexponential case is not yet implemented.")
return cancel_exp(b, cQ, n, DE)
elif DE.case == 'primitive':
if parametric:
raise NotImplementedError("Parametric RDE cancellation "
"primitive case is not yet implemented.")
return cancel_primitive(b, cQ, n, DE)
else:
raise NotImplementedError("Other Poly (P)RDE cancellation "
"cases are not yet implemented (%s)." % case)
if parametric:
raise NotImplementedError("Remaining cases for Poly PRDE not yet "
"implemented.")
raise NotImplementedError("Remaining cases for Poly RDE not yet "
"implemented.")
def rischDE(fa, fd, ga, gd, DE):
"""
Solve a Risch Differential Equation: Dy + f*y == g.
See the outline in the docstring of rde.py for more information
about the procedure used. Either raise NonElementaryIntegralException, in
which case there is no solution y in the given differential field,
or return y in k(t) satisfying Dy + f*y == g, or raise
NotImplementedError, in which case, the algorithms necessary to
solve the given Risch Differential Equation have not yet been
implemented.
"""
_, (fa, fd) = weak_normalizer(fa, fd, DE)
a, (ba, bd), (ca, cd), hn = normal_denom(fa, fd, ga, gd, DE)
A, B, C, hs = special_denom(a, ba, bd, ca, cd, DE)
try:
# Until this is fully implemented, use oo. Note that this will almost
# certainly cause non-termination in spde() (unless A == 1), and
# *might* lead to non-termination in the next step for a nonelementary
# integral (I don't know for certain yet). Fortunately, spde() is
# currently written recursively, so this will just give
# RuntimeError: maximum recursion depth exceeded.
n = bound_degree(A, B, C, DE)
except NotImplementedError:
# Useful for debugging:
# import warnings
# warnings.warn("rischDE: Proceeding with n = oo; may cause "
# "non-termination.")
n = oo
B, C, m, alpha, beta = spde(A, B, C, n, DE)
if C.is_zero:
y = C
else:
y = solve_poly_rde(B, C, m, DE)
return (alpha*y + beta, hn*hs)
| [
"[email protected]"
]
| |
c85c460a448c4a63602d3d96b271abbdb9f524f3 | afbcda99c55aeb26360d593f1abe99afbbb1d1b7 | /Python/Temppraw/temppraw.py | e6309910cf300fdc9d0c9bc4b437f7b346c77495 | []
| no_license | cstuartroe/misc | b4c4fb2f8ef7341acf99f35e9eece1cf3769a0fc | 307b00c3ab7e51204401e84bd6c4466315889dfe | refs/heads/master | 2023-08-17T19:07:59.535257 | 2023-08-06T16:07:27 | 2023-08-06T16:07:27 | 156,424,382 | 0 | 0 | null | 2022-05-25T02:00:29 | 2018-11-06T17:50:34 | Java | UTF-8 | Python | false | false | 859 | py | import praw
import time
import datetime
current = time.time()
reddit = praw.Reddit(client_id='PTofuEjEjIPbcg',
client_secret='_R0b3zmCvjXGPseYbaPIUEnZAlU',
password='LinguisticsIsCool208',
user_agent='testscript by /u/conor_emily_ling208',
username='conor_emily_ling208')
def get_worthwhile_posts():
reddit.read_only = True
rWP = reddit.subreddit('WritingPrompts')
posts = []
for submission in rWP.new(limit=500):
timestamp = submission.created
elapsed = int(current - timestamp + 28800)
score = submission.score
if (elapsed < 86400) and (score >= 4) and (elapsed/score < 3600) and (submission.num_comments <= 1):
posts.append({'title':submission.title,'score':score,'elapsed':elapsed//3600})
return posts
| [
"[email protected]"
]
| |
fc216362a02225b2ff41c9073d1ca8277c088188 | 37c3b81ad127c9e3cc26fa9168fda82460ca9bda | /Baekjoon/boj_10711_모래성.py | 3f23b08f4cb51446ea056e929f61c0ec51f93fab | []
| no_license | potomatoo/TIL | 5d85b69fdaed68966db7cfe2a565b7c64ed3e816 | 395dc190fa13e5ed036e1e3c7d9e0bc2e1ee4d6c | refs/heads/master | 2021-07-08T16:19:40.410097 | 2021-04-19T02:33:40 | 2021-04-19T02:33:40 | 238,872,774 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 700 | py | N, M = map(int,input().split())
castle = [list(map(str,[*input()])) for _ in range(N)]
search = []
for y in range(N):
for x in range(M):
if castle[y][x] != '.' and castle[y][x] != '9':
castle[y][x] = int(castle[y][x])
search.append((y, x, 0, 0))
dy = [-1, 1, 0, 0, -1, -1, 1, 1]
dx = [0, 0, -1, 1, -1, 1, 1, -1]
for y in range(N):
for x in range(M):
if castle[y][x] != '.' and castle[y][x] != '9':
for i in range(4):
ty = y + dy[i]
tx = x + dx[i]
if ty < 0 or tx < 0 or ty > N-1 or tx > M-1:
continue
if castle[ty][tx] == '.':
search
| [
"[email protected]"
]
| |
602534b2b5640835f91753fe88773c67f8116f05 | 7da6ecf172b3e9354d93ddfe06f87b930fad90b3 | /pickup/generator_profile/folder.py | 8b6f79f8d7e9a8b25b0989cacbb483ad3f55c10e | []
| no_license | exhuma/pickup | 05f8d271de95d76b337a6994dcd21799fe0e4b34 | 688b05d0ae1276dcc386b45c8ddb1cea71b15cb1 | refs/heads/master | 2016-09-06T01:21:08.343607 | 2011-07-15T15:09:10 | 2011-07-15T15:09:10 | 1,059,260 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,683 | py | """
The folder plugin create a bzipped tar file for a specific folder. It is also
possible to specify a parent folder and create individual tarballs for each
folder and one for files beneath that folder.
Configuration
~~~~~~~~~~~~~
The following fields are used by this plugin:
**path** (string)
The folder
**split** (boolean) *optional*
If set to "True", this module will create individual tarballs (Default =
False).
Configuration Example
~~~~~~~~~~~~~~~~~~~~~
.. code-block:: python
dict(
name = 'My home folder',
profile = 'folder',
config = dict(
path = '/home/me',
split = True,
)
),
"""
import logging
import tarfile
import re
from os.path import exists, join, abspath, isdir
import os
LOG = logging.getLogger(__name__)
API_VERSION = (2,0)
CONFIG = {}
SOURCE = {}
def init(source):
"""
If split is set, this strategy will create one folder per subfolder in the
given path.
"""
CONFIG.update(source['config'])
SOURCE.update(source)
LOG.debug("Initialised '%s' with %r" % ( __name__, CONFIG))
def run(staging_area):
if not exists(CONFIG['path']):
LOG.error("Path '%s' does not exist! Skipping!" % CONFIG['path'])
return
if CONFIG.get("split", False):
create_split_tar(staging_area)
else:
create_simple_tar(staging_area)
def create_split_tar(staging_area):
"""
Creates one tar file for each folder found in CONFIG['path']. If normal
files reside in that folder, they will be collected into a special tarfile
named "__PICKUP_FILES__.tar.bz2"
@param staging_area: The target folder
"""
if not isdir(CONFIG['path']):
LOG.error("Impossible to create a split tar! %s is not a folder!" % CONFIG['path'])
return
LOG.info("Creating tarball for each folder inside %s" % CONFIG['path'])
if not exists(staging_area):
os.makedirs( staging_area )
elif not isdir(staging_area):
LOG.error("'%s' exists and is not a folder! Skipping" % staging_area)
return
files = []
for entry in os.listdir(CONFIG['path']):
entrypath = join(CONFIG['path'], entry)
# Add directories directly, and add normal files into a special filename
if not isdir(entrypath):
files.append(entrypath)
continue
tarname = join(staging_area, "%s.tar.bz2" % entry)
LOG.info("Writing to '%s'" % abspath(tarname))
tar = tarfile.open(abspath(tarname), "w:bz2")
tar.add(entrypath)
tar.close()
if files:
tarname = join(staging_area, "__PICKUP_FILES__.tar.bz2")
LOG.info("Writing remaining files to '%s'" % abspath(tarname))
tar = tarfile.open(abspath(tarname), "w:bz2")
for file in files:
LOG.info(" Adding %s" % file)
tar.add(file)
tar.close()
def get_basename():
"""
Create a 'clean' filename
"""
# replace non-ascii characters with underscores
basename = re.sub( r'[^a-zA-Z0-9]', "_", SOURCE['name'] )
# now remove all leading/trainling underscores
basename = basename.strip("_")
# prevent accidental overwrites
counter = 0
while exists(basename):
counter += 1
LOG.debug( "File %s exists. Adding a counter." % basename )
basename = "%s-%d" % (basename, counter)
return basename
def create_simple_tar(staging_area):
LOG.info("Creating tarball for path %s" % CONFIG['path'])
tarname = "%s.tar.bz2" % get_basename()
# put it into the staging area
tarname = join(staging_area, tarname)
LOG.info("Writing to '%s'" % abspath(tarname))
tar = tarfile.open(abspath(tarname), "w:bz2")
tar.add( CONFIG['path'] )
tar.close()
| [
"[email protected]"
]
| |
4146e50a8525f3747cb0dca5aef9030f0519f149 | 295f34f4411d984f0ff6026be6e96fe134dc1550 | /home/pi/antes/consulta.py | 92d50b55d536b5cbe4a174bb77c1f2614a735a08 | []
| no_license | mcashjavier/disco-linux-raspy | 1e3fed914b6040fa9972e7cfc7357ecb72070e8c | 8c23103cf089059fbdadfad8cfb7059c1580da83 | refs/heads/master | 2022-12-20T17:06:39.967203 | 2019-03-12T12:09:22 | 2019-03-12T20:01:10 | 175,072,541 | 0 | 3 | null | 2022-12-18T06:59:27 | 2019-03-11T19:44:12 | null | UTF-8 | Python | false | false | 2,093 | py | from tkinter import *
import socket
import sys
def Salir():
ventana.destroy()
def MiSaldo():
if tarjeta.get():
# Create a TCP/IP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# Connect the socket to the port where the server is listening
server_address = ('181.164.238.87', 123)
print('connecting to %s port %s' % server_address)
sock.connect(server_address)
try:
# Send data
message = 'C' + tarjeta.get() #'C;1921000005618'
print('sending "%s"' % message)
sock.send(message.encode())
# Look for the response
amount_received = 0
amount_expected = 1#len(message)
while amount_received < amount_expected:
data = sock.recv(100)
amount_received += len(data)
print('received "%s"' % data.decode())
# Saldo.text=
Saldo.config(text = data.decode())
finally:
print('closing socket')
sock.close()
else:
Saldo.config(text = 'Escribi algo antes de mandar!!! *-*')
ventana = Tk()
ventana.attributes("-fullscreen", True)
ventana.configure(background='black')
#ventana.geometry("600x300+0+0")
ventana.title("Consulta de Saldo")
tarjeta = Entry(ventana, text = "-",justify='center',fg="white", font=("Helvetica", 14),background='black')
tarjeta.place(x = (ventana.winfo_screenwidth())/2-(ventana.winfo_screenwidth()/2), y = 140,width=ventana.winfo_screenwidth())
Saldo = Label(ventana, text = "-",fg="green", font=("Helvetica", 16),background='black')
Saldo.place(x = (ventana.winfo_screenwidth())/2-(ventana.winfo_screenwidth()/2), y = 160,width=ventana.winfo_screenwidth())
btn_estado = Button(ventana, text = "Consultar Saldo", command = MiSaldo,)
btn_estado.place(x = (ventana.winfo_screenwidth())/2-100, y = 100,width=200)
bsalir = Button(ventana, text = "Salir", command = Salir)
bsalir.place(x = (ventana.winfo_screenwidth())/2-100, y = 200,width=200)
ventana.mainloop() | [
"[email protected]"
]
| |
6e785160cfd23b23fd62580b0dd68b6ef5ba14f8 | 212daad1c33e796944fff2ca41788b872f6e6a0e | /plane_shotting/settings.py | 522e673cca183d9cda49aedf39e87315251c4015 | []
| no_license | walter0909/python_scripts | ecae13b8931f791d241b8902a76629683e2fdccd | 19156358ced7b8cc0107d390e74203ca5b320cb5 | refs/heads/master | 2023-02-25T12:53:53.795302 | 2021-02-03T06:07:28 | 2021-02-03T06:07:28 | 297,516,050 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 396 | py |
class Settings():
"""settings """
def __init__(self):
#screen
self.screen_width = 800
self.screen_height = 600
self.bg_color = (230,230,230)
self.ship_speed_factor = 1.5
self.bullet_speed_factor = 1
self.bullet_width = 3
self.bullet_height = 15
self.bullet_color = (60, 60, 60)
self.bullets_allowed = 3
| [
"[email protected]"
]
| |
529f4017618780f8663e90680936300e7bd47b4b | be7949a09fa8526299b42c4c27adbe72d59d2201 | /cnns/foolbox/foolbox_2_3_0/v1/attacks/decoupled_direction_norm.py | ba84884374e569e8613766c56669d5399a107841 | [
"MIT",
"Apache-2.0"
]
| permissive | adam-dziedzic/bandlimited-cnns | 375b5cccc7ab0f23d2fbdec4dead3bf81019f0b4 | 81aaa27f1dd9ea3d7d62b661dac40cac6c1ef77a | refs/heads/master | 2022-11-25T05:40:55.044920 | 2020-06-07T16:14:34 | 2020-06-07T16:14:34 | 125,884,603 | 17 | 5 | Apache-2.0 | 2022-11-21T21:01:46 | 2018-03-19T16:02:57 | Jupyter Notebook | UTF-8 | Python | false | false | 4,263 | py | import math
import numpy as np
import logging
from .base import Attack
from .base import call_decorator
class DecoupledDirectionNormL2Attack(Attack):
"""The Decoupled Direction and Norm L2 adversarial attack from [1]_.
References
----------
.. [1] Jérôme Rony, Luiz G. Hafemann, Luiz S. Oliveira, Ismail Ben Ayed,
Robert Sabourin, Eric Granger, "Decoupling Direction and Norm for Efficient
Gradient-Based L2 Adversarial Attacks and Defenses",
https://arxiv.org/abs/1811.09600
"""
@call_decorator
def __call__(
self,
input_or_adv,
label=None,
unpack=True,
steps=100,
gamma=0.05,
initial_norm=1,
quantize=True,
levels=256,
):
"""The Decoupled Direction and Norm L2 adversarial attack.
Parameters
----------
input_or_adv : `numpy.ndarray` or :class:`Adversarial`
The original, unperturbed input as a `numpy.ndarray` or
an :class:`Adversarial` instance.
label : int
The reference label of the original input. Must be passed
if `a` is a `numpy.ndarray`, must not be passed if `a` is
an :class:`Adversarial` instance.
unpack : bool
If true, returns the adversarial input, otherwise returns
the Adversarial object.
steps : int
Number of steps for the optimization.
gamma : float, optional
Factor by which the norm will be modified.
new_norm = norm * (1 + or - gamma).
init_norm : float, optional
Initial value for the norm.
quantize : bool, optional
If True, the returned adversarials will have quantized values to
the specified number of levels.
levels : int, optional
Number of levels to use for quantization
(e.g. 256 for 8 bit images).
"""
a = input_or_adv
if not a.has_gradient():
logging.fatal(
"Applied gradient-based attack to model that "
"does not provide gradients."
)
return
min_, max_ = a.bounds()
s = max_ - min_
if a.target_class is not None:
multiplier = -1
attack_class = a.target_class
else:
multiplier = 1
attack_class = a.original_class
norm = initial_norm
unperturbed = a.unperturbed
perturbation = np.zeros_like(unperturbed)
for i in range(steps):
logits, grad, is_adv = a.forward_and_gradient_one(
unperturbed + perturbation, attack_class, strict=True
)
# renorm gradient and handle 0-norm gradient
grad_norm = np.linalg.norm(grad)
if grad_norm == 0: # pragma: no cover
grad = np.random.normal(size=grad.shape)
grad_norm = np.linalg.norm(grad)
grad *= s / grad_norm
# udpate perturbation
lr = cosine_learning_rate(i, steps, 1.0, 0.01)
perturbation += lr * multiplier * grad
# update norm value and renorm perturbation accordingly
norm *= 1 - (2 * is_adv - 1) * gamma
perturbation *= s * norm / np.linalg.norm(perturbation)
if quantize:
perturbation = (perturbation - min_) / s
perturbation = np.round(perturbation * (levels - 1))
perturbation /= levels - 1
perturbation = perturbation * s + min_
perturbation = np.clip(perturbation, min_ - unperturbed, max_ - unperturbed)
def cosine_learning_rate(current_step, max_steps, init_lr, final_lr):
"""Cosine annealing schedule for learning rate.
Parameters
----------
current_step : int
Current step in the optimization
max_steps : int
Total number of steps of the optimization.
init_lr : float
Initial learning rate.
final_lr : float
Final learning rate.
Returns
-------
float
The current learning rate.
"""
alpha = (1 + math.cos(math.pi * current_step / max_steps)) / 2
return final_lr + alpha * (init_lr - final_lr)
| [
"[email protected]"
]
| |
732590f7535f493bc8add88cca06fc797937dc05 | 2874b52c0234d7e71031d4c22dedb3b24e539b2c | /backend/asetbeta_23682/wsgi.py | 2ec019498b29468afbec180a22811cd744c8072d | []
| no_license | crowdbotics-apps/asetbeta-23682 | d81ed8125b8c2ebb12b43d25d8068ff9ca71e344 | 128708fff3c98680a239a0f198aed5937950c5bf | refs/heads/master | 2023-02-12T09:51:04.507332 | 2021-01-14T14:38:53 | 2021-01-14T14:38:53 | 327,014,116 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | """
WSGI config for asetbeta_23682 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "asetbeta_23682.settings")
application = get_wsgi_application()
| [
"[email protected]"
]
| |
55a35f079b434c08a95524dea40c0fc2846bb651 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2586/50263/236178.py | bd5fcad9a1a5d3b23ec8f4ae01903de54d954987 | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | a = eval(input())
b = eval(input())
c = eval(input())
s = []
d1 = abs(b-a)
d2 = abs(c-b)
min_move = 0
max_move = d1+d2-2
if max_move != 0:
if min(d1,d2) < 3:
min_move = 1
else:
min_move = 2
s.append(min_move)
s.append(max_move)
print(s) | [
"[email protected]"
]
| |
0b112bb5b099f40be9e2e6bdaa122ab85ab5752e | 4e14341a1b89dfbe67c81b7a4ea52a458e696132 | /mysitedday61/mysitedday61/urls.py | b8883c4bf9f05dfa1c8ea73a53c7da198d7aaa1f | []
| no_license | 520wsl/python-django-test | fae13cdb9fb6c3001a08b5f3f6bc3262a637922a | bbd111dd84a65315b727adef26d92aeb416daba0 | refs/heads/master | 2020-05-26T06:39:12.624462 | 2019-06-02T02:54:03 | 2019-06-02T02:54:03 | 188,138,055 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 815 | py | """mysitedday61 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from app01 import views
urlpatterns = [
path('login/', views.login),
path('user_list/', views.user_list),
]
| [
"[email protected]"
]
| |
0a99d4c3e732dfba797afb7b87f336b19d5fa9d6 | 802c002ecd90be6b489e0250ec4454a11c17ed1d | /src/homework/homework12/converter.py | a0a150b76663e22b8968c182983fc71228c948fd | [
"MIT"
]
| permissive | acc-cosc-1336/cosc-1336-spring-2018-EricScotty | 3a088285ae853a9ff5d747f2301f755c5e3749b3 | 80c0249a583dc178cfc7bb95b851d7f3240dc3e9 | refs/heads/master | 2021-09-14T05:54:02.132705 | 2018-05-08T23:27:04 | 2018-05-08T23:27:04 | 118,071,042 | 0 | 0 | MIT | 2018-04-23T02:51:05 | 2018-01-19T03:22:13 | Python | UTF-8 | Python | false | false | 103 | py | class Converter:
def get_miles_from_km(self, km):
return float(round(km * 0.6214, 2))
| [
"[email protected]"
]
| |
55e46cda0dd6de6b2889f6a697eaed55932bdb46 | 70f1c694bea6178c98b134b9c44952ef6693be9f | /Manuscript/figure/Figure_MITE_auto_promoter/Auto_target/scripts/rename_hit-flank-files_by_peps-to-repbase_results.py | dded703f195e797fa3373153c85e0299798aa992 | []
| no_license | JinfengChen/Rice_pop | 5c19c5837805e51ddb3b2ffba4baffdc59c9bfd3 | ef272bf4825b29610c94de55eb53f231fb5febc6 | refs/heads/master | 2020-04-07T04:55:36.606594 | 2018-03-02T16:52:53 | 2018-03-02T16:52:53 | 33,501,941 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,659 | py | #!/usr/bin/env python
import sys
import os
import os.path
import fastaIO
import fnmatch
from collections import OrderedDict
args = sys.argv[1:]
arg_len = len(args)
print "Argument length:", arg_len
def usage():
print """
usage:
python rename_hit-flank-files_by_peps-to-repbase_results.py <DNA_TPase_match_file> <retro_match_file> <heli-mav_match_file> <no_match_file> <hit_seq_file> <flank_seq_file> <cluster_output_path1> <cluster_output_path2> <strand_in_match_titles?>
The two cluster paths are optional. The hit and flank file processing can be skipped by using 'na' without quotes as the path. If the sequences to have protein hit info added contain strand information and the match files do not, it must be split off for the script to work. Putting any value with trigger this option.
"""
sys.exit(-1)
if (len(args) != 6 and len(args) != 7 and len(args) != 8 and len(args) != 9) or sys.argv[1] == '-h' or sys.argv[1] == '-help' or sys.argv[1] == '-H' or sys.argv[1] == '-Help' or sys.argv[1] == '--h' or sys.argv[1] == '--help':
usage()
wanted_dict = {}
#import DNA match file
dna_in = open(sys.argv[1], "r")
info = dna_in.readlines()
c = 0
for line in info:
line = line.strip()
line = line.split("\t")
if "_protein" in line[0]:
name = line[0].split("_protein")[0]
elif "_genscan" in line[0]:
name = line[0].split("_genscan")[0]
if line[3] == "?":
if "SPM" in line[2] or "OSHOOTER" in line[2] or "PSL" in line[2] or "SmTRC1" in line[2]:
line[3] = "CMC"
elif "HARB" in line[2]:
line[3] = "PIF_harbinger"
else:
line[2] = line[2].split(":")
line[3] = "Uncertain:" + line[2][0]
if "/" in line[3]:
line[3] = line[3].replace("/", "_")
new_name = "_" + line[3]
if c < 4:
print ' '.join(["DNA match:", name, new_name])
if name not in wanted_dict:
wanted_dict[name] = new_name
c += 1
dna_in.close()
print "\n"
retro_in = open(sys.argv[2], "r")
info = retro_in.readlines()
c = 0
for line in info:
line = line.strip()
line = line.split("\t")
if "_protein" in line[0]:
name = line[0].split("_protein")[0]
elif "_genscan" in line[0]:
name = line[0].split("_genscan")[0]
new_name = "_retro"
if c < 4:
print ' '.join(["Retro:", name, new_name])
if name not in wanted_dict:
wanted_dict[name] = new_name
c += 1
retro_in.close()
print "\n"
heli_in = open(sys.argv[3], "r")
info = heli_in.readlines()
c = 0
for line in info:
line = line.strip()
line = line.split("\t")
if "_protein" in line[0]:
name = line[0].split("_protein")[0]
elif "_genscan" in line[0]:
name = line[0].split("_genscan")[0]
ele = line[1].rsplit(":", 1)[1]
new_name = "_" + ele
if c < 4:
print ' '.join(["Helitron:", name, new_name])
if name not in wanted_dict:
wanted_dict[name] = new_name
c += 1
heli_in.close()
print "\n"
no_match_in = open(sys.argv[4], "r")
info = no_match_in.readlines()
c = 0
for line in info:
line = line.strip()
if "_protein" in line:
name = line.split("_protein")[0]
elif "_genscan" in line:
name = line.split("_genscan")[0]
new_name = "_Unknown"
if c < 4:
print ' '.join(["Unknown:", name, new_name])
if name not in wanted_dict:
wanted_dict[name] = new_name
c += 1
no_match_in.close()
print "\n"
#for key in wanted_dict:
# print key, " ", wanted_dict[key]
if sys.argv[5] != "na":
c = 0
#import hit sequence file
hit_in = open(sys.argv[5], "r")
hit_track = OrderedDict()
for title, seq in fastaIO.FastaGeneralIterator(hit_in):
if arg_len == 9:
if "plus_" in title:
title = title.rsplit("_plus", 1)[0]
elif "minus_" in title:
title = title.rsplit("_minus", 1)[0]
else:
if "plus_" in title:
title = title.rsplit("plus_", 1)[0] + "plus"
elif "minus_" in title:
title = title.rsplit("minus_", 1)[0] + "minus"
if c < 4:
print "hit title:", title
if title in wanted_dict:
title = title + wanted_dict[title]
hit_track[title] = seq
else:
#print "1, Title:", title
title = title + "_unpredicted"
hit_track[title] = seq
c += 1
hit_in.close()
parts = os.path.splitext(sys.argv[5])
hit_out = open(parts[0] + "_match-info" + parts[1], "w", 1)
for keys in hit_track:
print>>hit_out, ">" + keys + "\n" + hit_track[keys]
hit_out.close()
print "\n"
if sys.argv[6] != "na":
#import flank sequence file
flank_in = open(sys.argv[6], "r")
flank_track = OrderedDict()
c = 0
for title, seq in fastaIO.FastaGeneralIterator(flank_in):
if arg_len == 9:
if "plus_" in title:
title = title.rsplit("_plus", 1)[0]
elif "minus_" in title:
title = title.rsplit("_minus", 1)[0]
else:
if "plus_" in title:
title = title.rsplit("plus_", 1)[0] + "plus"
elif "minus_" in title:
title = title.rsplit("minus_", 1)[0] + "minus"
if c < 4:
print "flank title:", title
if title in wanted_dict:
title = title + wanted_dict[title]
flank_track[title] = seq
else:
title = title + "_unpredicted"
flank_track[title] = seq
c += 1
flank_in.close()
parts = os.path.splitext(sys.argv[6])
flank_out = open(parts[0] + "_match-info" + parts[1], "w", 1)
for keys in flank_track:
print>>flank_out, ">" + keys + "\n" + flank_track[keys]
flank_out.close()
if arg_len >= 7:
print "Loop 7 yes!"
d = 0
base_file_list = os.listdir(sys.argv[7])
base_dict = {}
for item in base_file_list:
base_dict[item] = 1
for root, dirs, files in os.walk(sys.argv[7]):
for filename in files:
if fnmatch.fnmatch(filename, '*.msa') or fnmatch.fnmatch(filename, '*.group*split') or fnmatch.fnmatch(filename, '*.fa') or fnmatch.fnmatch(filename, '*.final'):
fpath = os.path.join(root, filename)
in_handle = open(fpath, "r")
track_dict = OrderedDict()
c = 0
for title, seq in fastaIO.FastaGeneralIterator(in_handle):
if arg_len == 9:
if "_plus" in title:
title = title.rsplit("_plus", 1)[0]
elif "_minus" in title:
title = title.rsplit("_minus", 1)[0]
else:
if "_plus" in title:
title = title.rsplit("_plus", 1)[0] + "_plus"
elif "_minus" in title:
title = title.rsplit("_minus", 1)[0] + "_minus"
if c < 4 and d < 2:
print "Seven loop title:", title
if title in wanted_dict:
title = title + wanted_dict[title]
track_dict[title] = seq
else:
title = title + "_unpredicted"
track_dict[title] = seq
c += 1
in_handle.close()
out_handle = open(fpath, "w", 1)
for keys in track_dict:
print>>out_handle, ">" + keys + "\n" + track_dict[keys]
out_handle.close()
d += 1
if arg_len >= 8:
print "Loop 8 yes!"
d = 0
for root, dirs, files in os.walk(sys.argv[8]):
for filename in files:
if fnmatch.fnmatch(filename, '*.fa') or fnmatch.fnmatch(filename, '*.msa') or fnmatch.fnmatch(filename, '*.group*split') or fnmatch.fnmatch(filename, '*.final'):
fpath = os.path.join(root, filename)
in_handle = open(fpath, "r")
track_dict = OrderedDict()
c = 0
for title, seq in fastaIO.FastaGeneralIterator(in_handle):
if arg_len == 9:
if "_plus" in title:
title = title.rsplit("_plus", 1)[0]
elif "_minus" in title:
title = title.rsplit("_minus", 1)[0]
else:
if "_plus" in title:
title = title.rsplit("_plus", 1)[0] + "_plus"
elif "_minus" in title:
title = title.rsplit("_minus", 1)[0] + "_minus"
if c < 4 and d < 2:
print "Eight loop title:", title
if title in wanted_dict:
title = title + wanted_dict[title]
track_dict[title] = seq
else:
title = title + "_unpredicted"
track_dict[title] = seq
c += 1
in_handle.close()
out_handle = open(fpath, "w", 1)
for keys in track_dict:
print>>out_handle, ">" + keys + "\n" + track_dict[keys]
out_handle.close()
d += 1
| [
"[email protected]"
]
| |
b11d87f75fac838ba16019628037010af0a05b95 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/RFC1253-MIB.py | 3d3a8a932440536f5c755452f34497a2a851a3d9 | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
]
| permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 50,680 | py | #
# PySNMP MIB module RFC1253-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/RFC1253-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:16:27 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueSizeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
iso, MibIdentifier, ModuleIdentity, Counter64, NotificationType, Integer32, IpAddress, Bits, TimeTicks, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, Counter32, ObjectIdentity, mib_2, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "MibIdentifier", "ModuleIdentity", "Counter64", "NotificationType", "Integer32", "IpAddress", "Bits", "TimeTicks", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Counter32", "ObjectIdentity", "mib-2", "Unsigned32")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
ospf = MibIdentifier((1, 3, 6, 1, 2, 1, 14))
class AreaID(IpAddress):
pass
class RouterID(IpAddress):
pass
class Metric(Integer32):
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(1, 65535)
class BigMetric(Integer32):
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(1, 16777215)
class TruthValue(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("true", 1), ("false", 2))
class Status(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("enabled", 1), ("disabled", 2))
class Validation(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("valid", 1), ("invalid", 2))
class PositiveInteger(Integer32):
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(1, 4294967295)
class HelloRange(Integer32):
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(1, 65535)
class UpToMaxAge(Integer32):
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(1, 3600)
class InterfaceIndex(Integer32):
pass
class DesignatedRouterPriority(Integer32):
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 255)
class TOSType(Integer32):
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 31)
ospfGeneralGroup = MibIdentifier((1, 3, 6, 1, 2, 1, 14, 1))
ospfRouterId = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 1), RouterID()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfRouterId.setReference('OSPF Version 2, C.1 Global parameters')
if mibBuilder.loadTexts: ospfRouterId.setStatus('mandatory')
if mibBuilder.loadTexts: ospfRouterId.setDescription("A 32-bit integer uniquely identifying the router in the Autonomous System. By convention, to ensure uniqueness, this should default to the value of one of the router's IP interface addresses.")
ospfAdminStat = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 2), Status()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfAdminStat.setStatus('mandatory')
if mibBuilder.loadTexts: ospfAdminStat.setDescription("The administrative status of OSPF in the router. The value 'enabled' denotes that the OSPF Process is active on at least one interface; 'disabled' disables it on all interfaces.")
ospfVersionNumber = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2))).clone(namedValues=NamedValues(("version2", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfVersionNumber.setReference('OSPF Version 2, Title')
if mibBuilder.loadTexts: ospfVersionNumber.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVersionNumber.setDescription('The current version number of the OSPF protocol is 2.')
ospfAreaBdrRtrStatus = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 4), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfAreaBdrRtrStatus.setReference('OSPF Version 2, Section 3 Splitting the AS into Areas')
if mibBuilder.loadTexts: ospfAreaBdrRtrStatus.setStatus('mandatory')
if mibBuilder.loadTexts: ospfAreaBdrRtrStatus.setDescription('A flag to note whether this router is an area border router.')
ospfASBdrRtrStatus = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 5), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfASBdrRtrStatus.setReference('OSPF Version 2, Section 3.3 Classification of routers')
if mibBuilder.loadTexts: ospfASBdrRtrStatus.setStatus('mandatory')
if mibBuilder.loadTexts: ospfASBdrRtrStatus.setDescription('A flag to note whether this router is an Autonomous System border router.')
ospfExternLSACount = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfExternLSACount.setReference('OSPF Version 2, Appendix A.4.5 AS external link advertisements')
if mibBuilder.loadTexts: ospfExternLSACount.setStatus('mandatory')
if mibBuilder.loadTexts: ospfExternLSACount.setDescription('The number of external (LS type 5) link-state advertisements in the link-state database.')
ospfExternLSACksumSum = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfExternLSACksumSum.setStatus('mandatory')
if mibBuilder.loadTexts: ospfExternLSACksumSum.setDescription("The 32-bit unsigned sum of the LS checksums of the external link-state advertisements contained in the link-state database. This sum can be used to determine if there has been a change in a router's link state database, and to compare the link-state database of two routers.")
ospfTOSSupport = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 8), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfTOSSupport.setReference('OSPF Version 2, Appendix F.1.2 Optional TOS support')
if mibBuilder.loadTexts: ospfTOSSupport.setStatus('mandatory')
if mibBuilder.loadTexts: ospfTOSSupport.setDescription("The router's support for type-of-service routing.")
ospfOriginateNewLSAs = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfOriginateNewLSAs.setStatus('mandatory')
if mibBuilder.loadTexts: ospfOriginateNewLSAs.setDescription('The number of new link-state advertisements that have been originated. This number is incremented each time the router originates a new LSA.')
ospfRxNewLSAs = MibScalar((1, 3, 6, 1, 2, 1, 14, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfRxNewLSAs.setStatus('mandatory')
if mibBuilder.loadTexts: ospfRxNewLSAs.setDescription('The number of link-state advertisements received determined to be new instantiations. This number does not include newer instantiations of self-originated link-state advertisements.')
ospfAreaTable = MibTable((1, 3, 6, 1, 2, 1, 14, 2), )
if mibBuilder.loadTexts: ospfAreaTable.setReference('OSPF Version 2, Section 6 The Area Data Structure')
if mibBuilder.loadTexts: ospfAreaTable.setStatus('mandatory')
if mibBuilder.loadTexts: ospfAreaTable.setDescription("Information describing the configured parameters and cumulative statistics of the router's attached areas.")
ospfAreaEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 2, 1), ).setIndexNames((0, "RFC1253-MIB", "ospfAreaId"))
if mibBuilder.loadTexts: ospfAreaEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ospfAreaEntry.setDescription("Information describing the configured parameters and cumulative statistics of one of the router's attached areas.")
ospfAreaId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 1), AreaID()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfAreaId.setReference('OSPF Version 2, Appendix C.2 Area parameters')
if mibBuilder.loadTexts: ospfAreaId.setStatus('mandatory')
if mibBuilder.loadTexts: ospfAreaId.setDescription('A 32-bit integer uniquely identifying an area. Area ID 0.0.0.0 is used for the OSPF backbone.')
ospfAuthType = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfAuthType.setReference('OSPF Version 2, Appendix E Authentication')
if mibBuilder.loadTexts: ospfAuthType.setStatus('mandatory')
if mibBuilder.loadTexts: ospfAuthType.setDescription('The authentication type specified for an area. Additional authentication types may be assigned locally on a per Area basis.')
ospfImportASExtern = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 3), TruthValue().clone('true')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfImportASExtern.setReference('OSPF Version 2, Appendix C.2 Area parameters')
if mibBuilder.loadTexts: ospfImportASExtern.setStatus('mandatory')
if mibBuilder.loadTexts: ospfImportASExtern.setDescription("The area's support for importing AS external link- state advertisements.")
ospfSpfRuns = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfSpfRuns.setStatus('mandatory')
if mibBuilder.loadTexts: ospfSpfRuns.setDescription("The number of times that the intra-area route table has been calculated using this area's link-state database. This is typically done using Dijkstra's algorithm.")
ospfAreaBdrRtrCount = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfAreaBdrRtrCount.setStatus('mandatory')
if mibBuilder.loadTexts: ospfAreaBdrRtrCount.setDescription('The total number of area border routers reachable within this area. This is initially zero, and is calculated in each SPF Pass.')
ospfASBdrRtrCount = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfASBdrRtrCount.setStatus('mandatory')
if mibBuilder.loadTexts: ospfASBdrRtrCount.setDescription('The total number of Autonomous System border routers reachable within this area. This is initially zero, and is calculated in each SPF Pass.')
ospfAreaLSACount = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfAreaLSACount.setStatus('mandatory')
if mibBuilder.loadTexts: ospfAreaLSACount.setDescription("The total number of link-state advertisements in this area's link-state database, excluding AS External LSA's.")
ospfAreaLSACksumSum = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 2, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfAreaLSACksumSum.setStatus('mandatory')
if mibBuilder.loadTexts: ospfAreaLSACksumSum.setDescription("The 32-bit unsigned sum of the link-state advertisements' LS checksums contained in this area's link-state database. This sum excludes external (LS type 5) link-state advertisements. The sum can be used to determine if there has been a change in a router's link state database, and to compare the link-state database of two routers.")
ospfStubAreaTable = MibTable((1, 3, 6, 1, 2, 1, 14, 3), )
if mibBuilder.loadTexts: ospfStubAreaTable.setReference('OSPF Version 2, Appendix C.2, Area Parameters')
if mibBuilder.loadTexts: ospfStubAreaTable.setStatus('mandatory')
if mibBuilder.loadTexts: ospfStubAreaTable.setDescription('The set of metrics that will be advertised by a default Area Border Router into a stub area.')
ospfStubAreaEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 3, 1), ).setIndexNames((0, "RFC1253-MIB", "ospfStubAreaID"), (0, "RFC1253-MIB", "ospfStubTOS"))
if mibBuilder.loadTexts: ospfStubAreaEntry.setReference('OSPF Version 2, Appendix C.2, Area Parameters')
if mibBuilder.loadTexts: ospfStubAreaEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ospfStubAreaEntry.setDescription('The metric for a given Type of Service that will be advertised by a default Area Border Router into a stub area.')
ospfStubAreaID = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 3, 1, 1), AreaID()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfStubAreaID.setStatus('mandatory')
if mibBuilder.loadTexts: ospfStubAreaID.setDescription('The 32 bit identifier for the Stub Area. On creation, this can be derived from the instance.')
ospfStubTOS = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 3, 1, 2), TOSType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfStubTOS.setStatus('mandatory')
if mibBuilder.loadTexts: ospfStubTOS.setDescription('The Type of Service associated with the metric. On creation, this can be derived from the instance.')
ospfStubMetric = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 3, 1, 3), BigMetric()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfStubMetric.setStatus('mandatory')
if mibBuilder.loadTexts: ospfStubMetric.setDescription('The metric value applied at the indicated type of service. By default, this equals the least metric at the type of service among the interfaces to other areas.')
ospfStubStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 3, 1, 4), Validation().clone('valid')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfStubStatus.setStatus('mandatory')
if mibBuilder.loadTexts: ospfStubStatus.setDescription("This variable displays the validity or invalidity of the entry. Setting it to 'invalid' has the effect of rendering it inoperative. The internal effect (row removal) is implementation dependent.")
ospfLsdbTable = MibTable((1, 3, 6, 1, 2, 1, 14, 4), )
if mibBuilder.loadTexts: ospfLsdbTable.setReference('OSPF Version 2, Section 12 Link State Advertisements')
if mibBuilder.loadTexts: ospfLsdbTable.setStatus('mandatory')
if mibBuilder.loadTexts: ospfLsdbTable.setDescription("The OSPF Process's Links State Database.")
ospfLsdbEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 4, 1), ).setIndexNames((0, "RFC1253-MIB", "ospfLsdbAreaId"), (0, "RFC1253-MIB", "ospfLsdbType"), (0, "RFC1253-MIB", "ospfLsdbLSID"), (0, "RFC1253-MIB", "ospfLsdbRouterId"))
if mibBuilder.loadTexts: ospfLsdbEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ospfLsdbEntry.setDescription('A single Link State Advertisement.')
ospfLsdbAreaId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 4, 1, 1), AreaID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfLsdbAreaId.setReference('OSPF Version 2, Appendix C.2 Area parameters')
if mibBuilder.loadTexts: ospfLsdbAreaId.setStatus('mandatory')
if mibBuilder.loadTexts: ospfLsdbAreaId.setDescription('The 32 bit identifier of the Area from which the LSA was received.')
ospfLsdbType = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 4, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("routerLink", 1), ("networkLink", 2), ("summaryLink", 3), ("asSummaryLink", 4), ("asExternalLink", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfLsdbType.setReference('OSPF Version 2, Appendix A.4.1 The Link State Advertisement header')
if mibBuilder.loadTexts: ospfLsdbType.setStatus('mandatory')
if mibBuilder.loadTexts: ospfLsdbType.setDescription('The type of the link state advertisement. Each link state type has a separate advertisement format.')
ospfLsdbLSID = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 4, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfLsdbLSID.setReference('OSPF Version 2, Section 12.1.4 Link State ID')
if mibBuilder.loadTexts: ospfLsdbLSID.setStatus('mandatory')
if mibBuilder.loadTexts: ospfLsdbLSID.setDescription('The Link State ID is an LS Type Specific field containing either a Router ID or an IP Address; it identifies the piece of the routing domain that is being described by the advertisement.')
ospfLsdbRouterId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 4, 1, 4), RouterID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfLsdbRouterId.setReference('OSPF Version 2, Appendix C.1 Global parameters')
if mibBuilder.loadTexts: ospfLsdbRouterId.setStatus('mandatory')
if mibBuilder.loadTexts: ospfLsdbRouterId.setDescription('The 32 bit number that uniquely identifies the originating router in the Autonomous System.')
ospfLsdbSequence = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 4, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfLsdbSequence.setReference('OSPF Version 2, Section 12.1.6 LS sequence number')
if mibBuilder.loadTexts: ospfLsdbSequence.setStatus('mandatory')
if mibBuilder.loadTexts: ospfLsdbSequence.setDescription('The sequence number field is a signed 32-bit integer. It is used to detect old and duplicate link state advertisements. The space of sequence numbers is linearly ordered. The larger the sequence number the more recent the advertisement.')
ospfLsdbAge = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 4, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfLsdbAge.setReference('OSPF Version 2, Section 12.1.1 LS age')
if mibBuilder.loadTexts: ospfLsdbAge.setStatus('mandatory')
if mibBuilder.loadTexts: ospfLsdbAge.setDescription('This field is the age of the link state advertisement in seconds.')
ospfLsdbChecksum = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 4, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfLsdbChecksum.setReference('OSPF Version 2, Section 12.1.7 LS checksum')
if mibBuilder.loadTexts: ospfLsdbChecksum.setStatus('mandatory')
if mibBuilder.loadTexts: ospfLsdbChecksum.setDescription("This field is the checksum of the complete contents of the advertisement, excepting the age field. The age field is excepted so that an advertisement's age can be incremented without updating the checksum. The checksum used is the same that is used for ISO connectionless datagrams; it is commonly referred to as the Fletcher checksum.")
ospfLsdbAdvertisement = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 4, 1, 8), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfLsdbAdvertisement.setReference('OSPF Version 2, Section 12 Link State Advertisements')
if mibBuilder.loadTexts: ospfLsdbAdvertisement.setStatus('mandatory')
if mibBuilder.loadTexts: ospfLsdbAdvertisement.setDescription('The entire Link State Advertisement, including its header.')
ospfAreaRangeTable = MibTable((1, 3, 6, 1, 2, 1, 14, 5), )
if mibBuilder.loadTexts: ospfAreaRangeTable.setReference('OSPF Version 2, Appendix C.2 Area parameters')
if mibBuilder.loadTexts: ospfAreaRangeTable.setStatus('mandatory')
if mibBuilder.loadTexts: ospfAreaRangeTable.setDescription('A range if IP addresses specified by an IP address/IP network mask pair. For example, class B address range of X.X.X.X with a network mask of 255.255.0.0 includes all IP addresses from X.X.0.0 to X.X.255.255')
ospfAreaRangeEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 5, 1), ).setIndexNames((0, "RFC1253-MIB", "ospfAreaRangeAreaID"), (0, "RFC1253-MIB", "ospfAreaRangeNet"))
if mibBuilder.loadTexts: ospfAreaRangeEntry.setReference('OSPF Version 2, Appendix C.2 Area parameters')
if mibBuilder.loadTexts: ospfAreaRangeEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ospfAreaRangeEntry.setDescription('A range if IP addresses specified by an IP address/IP network mask pair. For example, class B address range of X.X.X.X with a network mask of 255.255.0.0 includes all IP addresses from X.X.0.0 to X.X.255.255')
ospfAreaRangeAreaID = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 5, 1, 1), AreaID()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfAreaRangeAreaID.setReference('OSPF Version 2, Appendix C.2 Area parameters')
if mibBuilder.loadTexts: ospfAreaRangeAreaID.setStatus('mandatory')
if mibBuilder.loadTexts: ospfAreaRangeAreaID.setDescription('The Area the Address Range is to be found within.')
ospfAreaRangeNet = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 5, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfAreaRangeNet.setReference('OSPF Version 2, Appendix C.2 Area parameters')
if mibBuilder.loadTexts: ospfAreaRangeNet.setStatus('mandatory')
if mibBuilder.loadTexts: ospfAreaRangeNet.setDescription('The IP Address of the Net or Subnet indicated by the range.')
ospfAreaRangeMask = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 5, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfAreaRangeMask.setReference('OSPF Version 2, Appendix C.2 Area parameters')
if mibBuilder.loadTexts: ospfAreaRangeMask.setStatus('mandatory')
if mibBuilder.loadTexts: ospfAreaRangeMask.setDescription('The Subnet Mask that pertains to the Net or Subnet.')
ospfAreaRangeStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 5, 1, 4), Validation().clone('valid')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfAreaRangeStatus.setStatus('mandatory')
if mibBuilder.loadTexts: ospfAreaRangeStatus.setDescription("This variable displays the validity or invalidity of the entry. Setting it to 'invalid' has the effect of rendering it inoperative. The internal effect (row removal) is implementation dependent.")
ospfHostTable = MibTable((1, 3, 6, 1, 2, 1, 14, 6), )
if mibBuilder.loadTexts: ospfHostTable.setReference('OSPF Version 2, Appendix C.6 Host route parameters')
if mibBuilder.loadTexts: ospfHostTable.setStatus('mandatory')
if mibBuilder.loadTexts: ospfHostTable.setDescription('The list of Hosts, and their metrics, that the router will advertise as host routes.')
ospfHostEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 6, 1), ).setIndexNames((0, "RFC1253-MIB", "ospfHostIpAddress"), (0, "RFC1253-MIB", "ospfHostTOS"))
if mibBuilder.loadTexts: ospfHostEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ospfHostEntry.setDescription('A metric to be advertised, for a given type of service, when a given host is reachable.')
ospfHostIpAddress = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 6, 1, 1), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfHostIpAddress.setReference('OSPF Version 2, Appendix C.6 Host route parameters')
if mibBuilder.loadTexts: ospfHostIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: ospfHostIpAddress.setDescription('The IP Address of the Host.')
ospfHostTOS = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 6, 1, 2), TOSType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfHostTOS.setReference('OSPF Version 2, Appendix C.6 Host route parameters')
if mibBuilder.loadTexts: ospfHostTOS.setStatus('mandatory')
if mibBuilder.loadTexts: ospfHostTOS.setDescription('The Type of Service of the route being configured.')
ospfHostMetric = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 6, 1, 3), Metric()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfHostMetric.setReference('OSPF Version 2, Appendix C.6 Host route parameters')
if mibBuilder.loadTexts: ospfHostMetric.setStatus('mandatory')
if mibBuilder.loadTexts: ospfHostMetric.setDescription('The Metric to be advertised.')
ospfHostStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 6, 1, 4), Validation().clone('valid')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfHostStatus.setStatus('mandatory')
if mibBuilder.loadTexts: ospfHostStatus.setDescription("This variable displays the validity or invalidity of the entry. Setting it to 'invalid' has the effect of rendering it inoperative. The internal effect (row removal) is implementation dependent.")
ospfIfTable = MibTable((1, 3, 6, 1, 2, 1, 14, 7), )
if mibBuilder.loadTexts: ospfIfTable.setReference('OSPF Version 2, Appendix C.3 Router interface parameters')
if mibBuilder.loadTexts: ospfIfTable.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfTable.setDescription('The OSPF Interface Table describes the interfaces from the viewpoint of OSPF.')
ospfIfEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 7, 1), ).setIndexNames((0, "RFC1253-MIB", "ospfIfIpAddress"), (0, "RFC1253-MIB", "ospfAddressLessIf"))
if mibBuilder.loadTexts: ospfIfEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfEntry.setDescription('The OSPF Interface Entry describes one interface from the viewpoint of OSPF.')
ospfIfIpAddress = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 1), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfIfIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfIpAddress.setDescription('The IP address of this OSPF interface.')
ospfAddressLessIf = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfAddressLessIf.setStatus('mandatory')
if mibBuilder.loadTexts: ospfAddressLessIf.setDescription('For the purpose of easing the instancing of addressed and addressless interfaces; This variable takes the value 0 on interfaces with IP Addresses, and the corresponding value of ifIndex for interfaces having no IP Address.')
ospfIfAreaId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 3), AreaID().clone(hexValue="00000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfIfAreaId.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfAreaId.setDescription('A 32-bit integer uniquely identifying the area to which the interface connects. Area ID 0.0.0.0 is used for the OSPF backbone.')
ospfIfType = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("broadcast", 1), ("nbma", 2), ("pointToPoint", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfIfType.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfType.setDescription("The OSPF interface type. By way of a default, this field may be intuited from the corresponding value of ifType. Broadcast LANs, such as Ethernet and IEEE 802.5, take the value 'broadcast', X.25, Frame Relay, and similar technologies take the value 'nbma', and links that are definitively point to point take the value 'pointToPoint'.")
ospfIfAdminStat = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 5), Status().clone('enabled')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfIfAdminStat.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfAdminStat.setDescription("The OSPF interface's administrative status. The value 'enabled' denotes that neighbor relationships may be formed on the interface, and the interface will be advertised as an internal route to some area. The value 'disabled' denotes that the interface is external to OSPF.")
ospfIfRtrPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 6), DesignatedRouterPriority().clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfIfRtrPriority.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfRtrPriority.setDescription('The priority of this interface. Used in multi-access networks, this field is used in the designated router election algorithm. The value 0 signifies that the router is not eligible to become the designated router on this particular network. In the event of a tie in this value, routers will use their router id as a tie breaker.')
ospfIfTransitDelay = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 7), UpToMaxAge().clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfIfTransitDelay.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfTransitDelay.setDescription('The estimated number of seconds it takes to transmit a link- state update packet over this interface.')
ospfIfRetransInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 8), UpToMaxAge().clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfIfRetransInterval.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfRetransInterval.setDescription('The number of seconds between link-state advertisement retransmissions, for adjacencies belonging to this interface. This value is also used when retransmitting database description and link-state request packets.')
ospfIfHelloInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 9), HelloRange().clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfIfHelloInterval.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfHelloInterval.setDescription('The length of time, in seconds, between the Hello packets that the router sends on the interface. This value must be the same for all routers attached to a common network.')
ospfIfRtrDeadInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 10), PositiveInteger().clone(40)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfIfRtrDeadInterval.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfRtrDeadInterval.setDescription("The number of seconds that a router's Hello packets have not been seen before it's neighbors declare the router down. This should be some multiple of the Hello interval. This value must be the same for all routers attached to a common network.")
ospfIfPollInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 11), PositiveInteger().clone(120)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfIfPollInterval.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfPollInterval.setDescription('The larger time interval, in seconds, between the Hello packets sent to an inactive non-broadcast multi- access neighbor.')
ospfIfState = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("down", 1), ("loopback", 2), ("waiting", 3), ("pointToPoint", 4), ("designatedRouter", 5), ("backupDesignatedRouter", 6), ("otherDesignatedRouter", 7))).clone('down')).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfIfState.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfState.setDescription('The OSPF Interface State.')
ospfIfDesignatedRouter = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 13), IpAddress().clone(hexValue="00000000")).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfIfDesignatedRouter.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfDesignatedRouter.setDescription('The IP Address of the Designated Router.')
ospfIfBackupDesignatedRouter = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 14), IpAddress().clone(hexValue="00000000")).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfIfBackupDesignatedRouter.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfBackupDesignatedRouter.setDescription('The IP Address of the Backup Designated Router.')
ospfIfEvents = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfIfEvents.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfEvents.setDescription('The number of times this OSPF interface has changed its state, or an error has occurred.')
ospfIfAuthKey = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 7, 1, 16), OctetString().clone(hexValue="0000000000000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfIfAuthKey.setReference('OSPF Version 2, Section 9 The Interface Data Structure')
if mibBuilder.loadTexts: ospfIfAuthKey.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfAuthKey.setDescription("The Authentication Key. If the Area's Authorization Type is simplePassword, and the key length is shorter than 8 octets, the agent will left adjust and zero fill to 8 octets. When read, ospfIfAuthKey always returns an Octet String of length zero.")
ospfIfMetricTable = MibTable((1, 3, 6, 1, 2, 1, 14, 8), )
if mibBuilder.loadTexts: ospfIfMetricTable.setReference('OSPF Version 2, Appendix C.3 Router interface parameters')
if mibBuilder.loadTexts: ospfIfMetricTable.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfMetricTable.setDescription('The TOS metrics for a non-virtual interface identified by the interface index.')
ospfIfMetricEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 8, 1), ).setIndexNames((0, "RFC1253-MIB", "ospfIfMetricIpAddress"), (0, "RFC1253-MIB", "ospfIfMetricAddressLessIf"), (0, "RFC1253-MIB", "ospfIfMetricTOS"))
if mibBuilder.loadTexts: ospfIfMetricEntry.setReference('OSPF Version 2, Appendix C.3 Router interface parameters')
if mibBuilder.loadTexts: ospfIfMetricEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfMetricEntry.setDescription('A particular TOS metric for a non-virtual interface identified by the interface index.')
ospfIfMetricIpAddress = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 8, 1, 1), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfIfMetricIpAddress.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfMetricIpAddress.setDescription('The IP address of this OSPF interface. On row creation, this can be derived from the instance.')
ospfIfMetricAddressLessIf = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 8, 1, 2), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfIfMetricAddressLessIf.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfMetricAddressLessIf.setDescription('For the purpose of easing the instancing of addressed and addressless interfaces; This variable takes the value 0 on interfaces with IP Addresses, and the value of ifIndex for interfaces having no IP Address. On row creation, this can be derived from the instance.')
ospfIfMetricTOS = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 8, 1, 3), TOSType()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfIfMetricTOS.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfMetricTOS.setDescription('The type of service metric being referenced. On row creation, this can be derived from the instance.')
ospfIfMetricMetric = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 8, 1, 4), Metric()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfIfMetricMetric.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfMetricMetric.setDescription("The metric of using this type of service on this interface. The default value of the TOS 0 Metric is 10^8 / ifSpeed. The value FFFF is distinguished to mean 'no route via this TOS'.")
ospfIfMetricStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 8, 1, 5), Validation().clone('valid')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfIfMetricStatus.setStatus('mandatory')
if mibBuilder.loadTexts: ospfIfMetricStatus.setDescription("This variable displays the validity or invalidity of the entry. Setting it to 'invalid' has the effect of rendering it inoperative. The internal effect (row removal) is implementation dependent.")
ospfVirtIfTable = MibTable((1, 3, 6, 1, 2, 1, 14, 9), )
if mibBuilder.loadTexts: ospfVirtIfTable.setReference('OSPF Version 2, Appendix C.4 Virtual link parameters')
if mibBuilder.loadTexts: ospfVirtIfTable.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtIfTable.setDescription("Information about this router's virtual interfaces.")
ospfVirtIfEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 9, 1), ).setIndexNames((0, "RFC1253-MIB", "ospfVirtIfAreaID"), (0, "RFC1253-MIB", "ospfVirtIfNeighbor"))
if mibBuilder.loadTexts: ospfVirtIfEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtIfEntry.setDescription('Information about a single Virtual Interface.')
ospfVirtIfAreaID = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 1), AreaID()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfVirtIfAreaID.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtIfAreaID.setDescription('The Transit Area that the Virtual Link traverses. By definition, this is not 0.0.0.0')
ospfVirtIfNeighbor = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 2), RouterID()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfVirtIfNeighbor.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtIfNeighbor.setDescription('The Router ID of the Virtual Neighbor.')
ospfVirtIfTransitDelay = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 3), UpToMaxAge().clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfVirtIfTransitDelay.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtIfTransitDelay.setDescription('The estimated number of seconds it takes to transmit a link- state update packet over this interface.')
ospfVirtIfRetransInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 4), UpToMaxAge().clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfVirtIfRetransInterval.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtIfRetransInterval.setDescription('The number of seconds between link-state advertisement retransmissions, for adjacencies belonging to this interface. This value is also used when retransmitting database description and link-state request packets. This value should be well over the expected round-trip time.')
ospfVirtIfHelloInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 5), HelloRange().clone(10)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfVirtIfHelloInterval.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtIfHelloInterval.setDescription('The length of time, in seconds, between the Hello packets that the router sends on the interface. This value must be the same for the virtual neighbor.')
ospfVirtIfRtrDeadInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 6), PositiveInteger().clone(60)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfVirtIfRtrDeadInterval.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtIfRtrDeadInterval.setDescription("The number of seconds that a router's Hello packets have not been seen before it's neighbors declare the router down. This should be some multiple of the Hello interval. This value must be the same for the virtual neighbor.")
ospfVirtIfState = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 4))).clone(namedValues=NamedValues(("down", 1), ("pointToPoint", 4))).clone('down')).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfVirtIfState.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtIfState.setDescription('OSPF virtual interface states.')
ospfVirtIfEvents = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfVirtIfEvents.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtIfEvents.setDescription('The number of state changes or error events on this Virtual Link')
ospfVirtIfAuthKey = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 9), OctetString().clone(hexValue="0000000000000000")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfVirtIfAuthKey.setReference('OSPF Version 2, Section 9 The Interface Data Structure')
if mibBuilder.loadTexts: ospfVirtIfAuthKey.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtIfAuthKey.setDescription('If Authentication Type is simplePassword, the device will left adjust and zero fill to 8 octets. When read, ospfVifAuthKey always returns a string of length zero.')
ospfVirtIfStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 9, 1, 10), Validation().clone('valid')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfVirtIfStatus.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtIfStatus.setDescription("This variable displays the validity or invalidity of the entry. Setting it to 'invalid' has the effect of rendering it inoperative. The internal effect (row removal) is implementation dependent.")
ospfNbrTable = MibTable((1, 3, 6, 1, 2, 1, 14, 10), )
if mibBuilder.loadTexts: ospfNbrTable.setReference('OSPF Version 2, Section 10 The Neighbor Data Structure')
if mibBuilder.loadTexts: ospfNbrTable.setStatus('mandatory')
if mibBuilder.loadTexts: ospfNbrTable.setDescription('A table of non-virtual neighbor information.')
ospfNbrEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 10, 1), ).setIndexNames((0, "RFC1253-MIB", "ospfNbrIpAddr"), (0, "RFC1253-MIB", "ospfNbrAddressLessIndex"))
if mibBuilder.loadTexts: ospfNbrEntry.setReference('OSPF Version 2, Section 10 The Neighbor Data Structure')
if mibBuilder.loadTexts: ospfNbrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ospfNbrEntry.setDescription('The information regarding a single neighbor.')
ospfNbrIpAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 1), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfNbrIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: ospfNbrIpAddr.setDescription('The IP address of this neighbor.')
ospfNbrAddressLessIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 2), InterfaceIndex()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfNbrAddressLessIndex.setStatus('mandatory')
if mibBuilder.loadTexts: ospfNbrAddressLessIndex.setDescription(' On an interface having an IP Address, zero. On addressless interfaces, the corresponding value of ifIndex in the Internet Standard MIB. On row creation, this can be derived from the instance.')
ospfNbrRtrId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 3), RouterID().clone(hexValue="00000000")).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfNbrRtrId.setStatus('mandatory')
if mibBuilder.loadTexts: ospfNbrRtrId.setDescription('A 32-bit integer (represented as a type IpAddress) uniquely identifying the neighboring router in the Autonomous System.')
ospfNbrOptions = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfNbrOptions.setReference('OSPF Version 2, Section 12.1.2 Options')
if mibBuilder.loadTexts: ospfNbrOptions.setStatus('mandatory')
if mibBuilder.loadTexts: ospfNbrOptions.setDescription("A Bit Mask corresponding to the neighbor's options field. Bit 0, if set, indicates that the area accepts and operates on external information; if zero, it is a stub area. Bit 1, if set, indicates that the system will operate on Type of Service metrics other than TOS 0. If zero, the neighbor will ignore all metrics except the TOS 0 metric.")
ospfNbrPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 5), DesignatedRouterPriority().clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfNbrPriority.setStatus('mandatory')
if mibBuilder.loadTexts: ospfNbrPriority.setDescription('The priority of this neighbor in the designated router election algorithm. The value 0 signifies that the neighbor is not eligible to become the designated router on this particular network.')
ospfNbrState = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("down", 1), ("attempt", 2), ("init", 3), ("twoWay", 4), ("exchangeStart", 5), ("exchange", 6), ("loading", 7), ("full", 8))).clone('down')).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfNbrState.setReference('OSPF Version 2, Section 10.1 Neighbor States')
if mibBuilder.loadTexts: ospfNbrState.setStatus('mandatory')
if mibBuilder.loadTexts: ospfNbrState.setDescription('The State of the relationship with this Neighbor.')
ospfNbrEvents = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfNbrEvents.setStatus('mandatory')
if mibBuilder.loadTexts: ospfNbrEvents.setDescription('The number of times this neighbor relationship has changed state, or an error has occurred.')
ospfNbrLSRetransQLen = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfNbrLSRetransQLen.setStatus('mandatory')
if mibBuilder.loadTexts: ospfNbrLSRetransQLen.setDescription('The current length of the retransmission queue.')
ospfNBMANbrStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 10, 1, 9), Validation().clone('valid')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ospfNBMANbrStatus.setStatus('mandatory')
if mibBuilder.loadTexts: ospfNBMANbrStatus.setDescription("This variable displays the validity or invalidity of the entry. Setting it to 'invalid' has the effect of rendering it inoperative. The internal effect (row removal) is implementation dependent.")
ospfVirtNbrTable = MibTable((1, 3, 6, 1, 2, 1, 14, 11), )
if mibBuilder.loadTexts: ospfVirtNbrTable.setReference('OSPF Version 2, Section 15 Virtual Links')
if mibBuilder.loadTexts: ospfVirtNbrTable.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtNbrTable.setDescription('A table of virtual neighbor information.')
ospfVirtNbrEntry = MibTableRow((1, 3, 6, 1, 2, 1, 14, 11, 1), ).setIndexNames((0, "RFC1253-MIB", "ospfVirtNbrArea"), (0, "RFC1253-MIB", "ospfVirtNbrRtrId"))
if mibBuilder.loadTexts: ospfVirtNbrEntry.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtNbrEntry.setDescription('Virtual neighbor information.')
ospfVirtNbrArea = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 1), AreaID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfVirtNbrArea.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtNbrArea.setDescription('The Transit Area Identifier.')
ospfVirtNbrRtrId = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 2), RouterID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfVirtNbrRtrId.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtNbrRtrId.setDescription('A 32-bit integer uniquely identifying the neighboring router in the Autonomous System.')
ospfVirtNbrIpAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfVirtNbrIpAddr.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtNbrIpAddr.setDescription('The IP address this Virtual Neighbor is using.')
ospfVirtNbrOptions = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfVirtNbrOptions.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtNbrOptions.setDescription("A bit map corresponding to the neighbor's options field. Thus, Bit 1, if set, indicates that the neighbor supports Type of Service Routing; if zero, no metrics other than TOS 0 are in use by the neighbor.")
ospfVirtNbrState = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("down", 1), ("attempt", 2), ("init", 3), ("twoWay", 4), ("exchangeStart", 5), ("exchange", 6), ("loading", 7), ("full", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfVirtNbrState.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtNbrState.setDescription('The state of the Virtual Neighbor Relationship.')
ospfVirtNbrEvents = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfVirtNbrEvents.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtNbrEvents.setDescription('The number of times this virtual link has changed its state, or an error has occurred.')
ospfVirtNbrLSRetransQLen = MibTableColumn((1, 3, 6, 1, 2, 1, 14, 11, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ospfVirtNbrLSRetransQLen.setStatus('mandatory')
if mibBuilder.loadTexts: ospfVirtNbrLSRetransQLen.setDescription('The current length of the retransmission queue.')
mibBuilder.exportSymbols("RFC1253-MIB", ospfAreaLSACksumSum=ospfAreaLSACksumSum, ospfIfMetricEntry=ospfIfMetricEntry, ospfLsdbChecksum=ospfLsdbChecksum, ospfNbrOptions=ospfNbrOptions, ospfLsdbRouterId=ospfLsdbRouterId, ospfNbrRtrId=ospfNbrRtrId, ospfIfEvents=ospfIfEvents, ospfHostStatus=ospfHostStatus, ospfAreaRangeNet=ospfAreaRangeNet, HelloRange=HelloRange, ospfHostIpAddress=ospfHostIpAddress, ospfNbrAddressLessIndex=ospfNbrAddressLessIndex, ospfAreaLSACount=ospfAreaLSACount, ospfIfMetricAddressLessIf=ospfIfMetricAddressLessIf, Validation=Validation, ospfStubAreaEntry=ospfStubAreaEntry, ospfExternLSACksumSum=ospfExternLSACksumSum, ospfVirtIfRtrDeadInterval=ospfVirtIfRtrDeadInterval, ospfStubAreaID=ospfStubAreaID, ospfAreaRangeMask=ospfAreaRangeMask, RouterID=RouterID, ospfVirtNbrState=ospfVirtNbrState, ospfASBdrRtrCount=ospfASBdrRtrCount, ospfIfRetransInterval=ospfIfRetransInterval, BigMetric=BigMetric, ospfAdminStat=ospfAdminStat, ospfVirtIfEvents=ospfVirtIfEvents, ospfIfMetricIpAddress=ospfIfMetricIpAddress, ospfIfMetricTOS=ospfIfMetricTOS, ospfStubStatus=ospfStubStatus, DesignatedRouterPriority=DesignatedRouterPriority, ospfAreaTable=ospfAreaTable, ospfHostEntry=ospfHostEntry, ospfIfMetricMetric=ospfIfMetricMetric, ospfNbrPriority=ospfNbrPriority, ospfNBMANbrStatus=ospfNBMANbrStatus, ospfNbrState=ospfNbrState, ospfStubAreaTable=ospfStubAreaTable, InterfaceIndex=InterfaceIndex, ospfAreaRangeAreaID=ospfAreaRangeAreaID, ospfIfTransitDelay=ospfIfTransitDelay, ospfLsdbAdvertisement=ospfLsdbAdvertisement, ospfVirtNbrTable=ospfVirtNbrTable, ospfGeneralGroup=ospfGeneralGroup, ospfLsdbTable=ospfLsdbTable, ospfVirtIfAreaID=ospfVirtIfAreaID, ospfVirtIfStatus=ospfVirtIfStatus, TOSType=TOSType, ospfAreaEntry=ospfAreaEntry, ospfIfState=ospfIfState, ospfIfAreaId=ospfIfAreaId, ospfOriginateNewLSAs=ospfOriginateNewLSAs, ospfVirtNbrRtrId=ospfVirtNbrRtrId, ospfNbrIpAddr=ospfNbrIpAddr, ospfNbrEvents=ospfNbrEvents, ospfAreaId=ospfAreaId, ospfIfRtrDeadInterval=ospfIfRtrDeadInterval, ospfNbrLSRetransQLen=ospfNbrLSRetransQLen, ospfNbrEntry=ospfNbrEntry, ospfVirtIfNeighbor=ospfVirtIfNeighbor, PositiveInteger=PositiveInteger, ospfIfPollInterval=ospfIfPollInterval, ospfHostTOS=ospfHostTOS, ospfStubTOS=ospfStubTOS, ospfAreaBdrRtrCount=ospfAreaBdrRtrCount, ospfLsdbAreaId=ospfLsdbAreaId, ospfStubMetric=ospfStubMetric, ospfExternLSACount=ospfExternLSACount, ospfIfEntry=ospfIfEntry, ospfIfMetricStatus=ospfIfMetricStatus, ospf=ospf, ospfVirtIfTransitDelay=ospfVirtIfTransitDelay, ospfVersionNumber=ospfVersionNumber, ospfVirtIfAuthKey=ospfVirtIfAuthKey, ospfAreaRangeTable=ospfAreaRangeTable, ospfIfTable=ospfIfTable, ospfLsdbType=ospfLsdbType, ospfVirtNbrIpAddr=ospfVirtNbrIpAddr, ospfTOSSupport=ospfTOSSupport, ospfVirtNbrLSRetransQLen=ospfVirtNbrLSRetransQLen, ospfVirtNbrEvents=ospfVirtNbrEvents, ospfNbrTable=ospfNbrTable, ospfIfBackupDesignatedRouter=ospfIfBackupDesignatedRouter, ospfIfHelloInterval=ospfIfHelloInterval, ospfLsdbLSID=ospfLsdbLSID, ospfHostMetric=ospfHostMetric, ospfIfAuthKey=ospfIfAuthKey, ospfImportASExtern=ospfImportASExtern, ospfASBdrRtrStatus=ospfASBdrRtrStatus, UpToMaxAge=UpToMaxAge, ospfVirtIfRetransInterval=ospfVirtIfRetransInterval, ospfHostTable=ospfHostTable, ospfVirtIfEntry=ospfVirtIfEntry, AreaID=AreaID, ospfAreaRangeEntry=ospfAreaRangeEntry, ospfVirtIfState=ospfVirtIfState, ospfIfRtrPriority=ospfIfRtrPriority, ospfVirtIfHelloInterval=ospfVirtIfHelloInterval, ospfLsdbEntry=ospfLsdbEntry, ospfLsdbSequence=ospfLsdbSequence, ospfSpfRuns=ospfSpfRuns, ospfAuthType=ospfAuthType, ospfAddressLessIf=ospfAddressLessIf, ospfLsdbAge=ospfLsdbAge, ospfVirtIfTable=ospfVirtIfTable, Status=Status, ospfVirtNbrEntry=ospfVirtNbrEntry, ospfAreaBdrRtrStatus=ospfAreaBdrRtrStatus, ospfVirtNbrOptions=ospfVirtNbrOptions, ospfIfDesignatedRouter=ospfIfDesignatedRouter, ospfRouterId=ospfRouterId, ospfIfIpAddress=ospfIfIpAddress, ospfVirtNbrArea=ospfVirtNbrArea, ospfIfMetricTable=ospfIfMetricTable, ospfIfAdminStat=ospfIfAdminStat, ospfRxNewLSAs=ospfRxNewLSAs, Metric=Metric, TruthValue=TruthValue, ospfIfType=ospfIfType, ospfAreaRangeStatus=ospfAreaRangeStatus)
| [
"[email protected]"
]
| |
4ef5375f46be6cb5c3ceb6cb0a70c7c7fcbb357c | b08870f8fe7b3cf1bbab3c52a7bacbb36ee1dcc6 | /verp/patches/v13_0/update_custom_fields_for_shopify.py | 40343f14221e3519c8ebba2228a7bb47f7871451 | []
| no_license | vsadminpk18/verpfinalversion | 7148a64fe6134e2a6371470aceb1b57cc4b5a559 | 93d164b370ad9ca0dd5cda0053082dc3abbd20da | refs/heads/master | 2023-07-13T04:11:59.211046 | 2021-08-27T06:26:48 | 2021-08-27T06:26:48 | 400,410,611 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 362 | py | # Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from verp.verp_integrations.doctype.shopify_settings.shopify_settings import setup_custom_fields
def execute():
if frappe.db.get_single_value('Shopify Settings', 'enable_shopify'):
setup_custom_fields()
| [
"[email protected]"
]
| |
702a016412b75e13d5d29a36ca0710afac3810aa | ac1e944eb288c8b13a0bef0ee7de85ee6d30b4c0 | /django/djangotimer/manage.py | 6456b5b9262a6ad8e2e7bd9d7cd22862c826d549 | []
| no_license | Jayson7/random-projects | 05dd175d00e9bd62cb39973c3439846f641675c8 | cdbebb896a0ecea0de543f16ecf4661e519ec0bb | refs/heads/master | 2023-06-19T01:54:08.339954 | 2021-07-19T01:40:09 | 2021-07-19T01:40:09 | 383,971,840 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 667 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'djangotimer.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
8ee947a3381a57c428c551203de2c68479f20251 | 3c2eefd083f9b65ce7900ece4d9670b1130d65de | /bin/jwst_mtvt | f4721bd5bb4a607f58ea2c7fcf3b5246e11c8b8e | []
| no_license | wkerzendorf/jwst_gtvt | ef1896fcee2f292715b36ec1a48c39b000098e48 | 55ee820d978858cbd6065c275d70680868916f58 | refs/heads/master | 2021-04-27T18:31:47.858747 | 2017-12-11T16:13:48 | 2017-12-11T16:13:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,613 | #!/usr/bin/env python
import argparse
import sys
from jwst_gtvt.find_tgt_info import main, get_target_ephemeris
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='')
parser.add_argument('desg', nargs='+', help='Moving target designation.')
parser.add_argument('--smallbody', action='store_true', help='Set if the designation is that of a comet or asteroid. This is required for periodic comets with multiple orbit solutions in JPL/HORIZONS.')
parser.add_argument('--v3pa', help='Specify a desired V3 (telescope frame) Position Angle.')
parser.add_argument('--save_plot', help='Path of file to save plot output.')
parser.add_argument('--save_table', help='Path of file to save table output.')
parser.add_argument('--instrument', help='If specified plot shows only windows for this instrument. Options: nircam, nirspec, niriss, miri, fgs, v3 (case insensitive).')
parser.add_argument('--name', help='Target Name to appear on plots. Names with space should use double quotes e.g. "NGC 6240".')
parser.add_argument('--start_date', default='2018-01-01', help='Start date for visibility search in yyyy-mm-dd format. Earliest available is 2018-01-01.')
parser.add_argument('--end_date', default='2021-12-31', help='End date for visibility search in yyyy-mm-dd format. Latest available is 2021-12-31.')
args = parser.parse_args()
name, args.ra, args.dec = get_target_ephemeris(
' '.join(args.desg), args.start_date, args.end_date, smallbody=args.smallbody)
if args.name is None:
args.name = name
main(args, fixed=False)
| [
"[email protected]"
]
| ||
5a3af79325cecdb45f242a8ef34e72960f0eab7d | 74ba13d19d6adb22149dbb8b17c0f4f1385ecfcb | /src/packet_factory.py | a8cbc3435b4d71956d4707aa482a5b5f794a5f04 | [
"MIT"
]
| permissive | aenon/Melkweg | 2d6e7a2d6cd29c0bc9e246b65c0d042d9c73ad13 | d3adcc615ede1fad116c9c50ed0609a6b1a55250 | refs/heads/master | 2021-05-07T08:20:16.005318 | 2017-11-02T16:46:34 | 2017-11-02T16:46:34 | 108,594,644 | 0 | 0 | null | 2017-10-27T20:53:01 | 2017-10-27T20:53:01 | null | UTF-8 | Python | false | false | 1,004 | py | #coding=utf-8
from packet_pb2 import MPacket
from cipher import nonce
class PacketFlag(object):
DATA = 1
LIV = 2
RST = 3
FIN = 4
KILL = 5
class PacketFactory(object):
@classmethod
def create_syn_packet(self, iv):
packet = MPacket()
packet.iv = iv
return packet
@classmethod
def create_rst_packet(self, port):
packet = MPacket()
packet.port = port
packet.flags = PacketFlag.RST
return packet
@classmethod
def create_kill_packet(self):
packet = MPacket()
packet.flags = PacketFlag.KILL
return packet
@classmethod
def create_data_packet(self, port, data):
packet = MPacket()
packet.flags = PacketFlag.DATA
packet.port = port
packet.data = data
return packet
@classmethod
def create_fin_packet(self, port):
packet = MPacket()
packet.flags = PacketFlag.FIN
packet.port = port
return packet
| [
"[email protected]"
]
| |
c8add2de1f25ab13df5942aeef2e1e97114b65ae | 6a7e9e0e9c08132166f566bd88ae1c46ff8f9c0a | /azure-mgmt-compute/azure/mgmt/compute/v2018_06_01/operations/virtual_machines_operations.py | 6550e47a4fe00e58b55bed3083b4bbdd298ce2f3 | [
"MIT"
]
| permissive | ashirey-msft/azure-sdk-for-python | d92381d11c48f194ec9f989f5f803db614fb73f2 | e04778e13306dad2e8fb044970215bad6296afb6 | refs/heads/master | 2020-03-23T06:05:39.283442 | 2018-09-15T00:18:26 | 2018-09-15T00:18:26 | 141,188,192 | 0 | 1 | MIT | 2018-07-16T20:02:52 | 2018-07-16T20:02:52 | null | UTF-8 | Python | false | false | 70,882 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller, NoPolling
from msrestazure.polling.arm_polling import ARMPolling
from .. import models
class VirtualMachinesOperations(object):
"""VirtualMachinesOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: Client Api Version. Constant value: "2018-06-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2018-06-01"
self.config = config
def _capture_initial(
self, resource_group_name, vm_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.capture.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'VirtualMachineCaptureParameters')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualMachineCaptureResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def capture(
self, resource_group_name, vm_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Captures the VM by copying virtual hard disks of the VM and outputs a
template that can be used to create similar VMs.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param parameters: Parameters supplied to the Capture Virtual Machine
operation.
:type parameters:
~azure.mgmt.compute.v2018_06_01.models.VirtualMachineCaptureParameters
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns
VirtualMachineCaptureResult or
ClientRawResponse<VirtualMachineCaptureResult> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.compute.v2018_06_01.models.VirtualMachineCaptureResult]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.compute.v2018_06_01.models.VirtualMachineCaptureResult]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._capture_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('VirtualMachineCaptureResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
capture.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/capture'}
def _create_or_update_initial(
self, resource_group_name, vm_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.create_or_update.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'VirtualMachine')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualMachine', response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualMachine', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, vm_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""The operation to create or update a virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param parameters: Parameters supplied to the Create Virtual Machine
operation.
:type parameters:
~azure.mgmt.compute.v2018_06_01.models.VirtualMachine
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns VirtualMachine or
ClientRawResponse<VirtualMachine> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.compute.v2018_06_01.models.VirtualMachine]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.compute.v2018_06_01.models.VirtualMachine]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('VirtualMachine', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}'}
def _update_initial(
self, resource_group_name, vm_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.update.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'VirtualMachineUpdate')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualMachine', response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualMachine', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def update(
self, resource_group_name, vm_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""The operation to update a virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param parameters: Parameters supplied to the Update Virtual Machine
operation.
:type parameters:
~azure.mgmt.compute.v2018_06_01.models.VirtualMachineUpdate
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns VirtualMachine or
ClientRawResponse<VirtualMachine> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.compute.v2018_06_01.models.VirtualMachine]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.compute.v2018_06_01.models.VirtualMachine]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._update_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('VirtualMachine', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}'}
def _delete_initial(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete(
self, resource_group_name, vm_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""The operation to delete a virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}'}
def get(
self, resource_group_name, vm_name, expand=None, custom_headers=None, raw=False, **operation_config):
"""Retrieves information about the model view or the instance view of a
virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param expand: The expand expression to apply on the operation.
Possible values include: 'instanceView'
:type expand: str or
~azure.mgmt.compute.v2018_06_01.models.InstanceViewTypes
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: VirtualMachine or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.compute.v2018_06_01.models.VirtualMachine or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'InstanceViewTypes')
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualMachine', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}'}
def instance_view(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
"""Retrieves information about the run-time state of a virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: VirtualMachineInstanceView or ClientRawResponse if raw=true
:rtype:
~azure.mgmt.compute.v2018_06_01.models.VirtualMachineInstanceView or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.instance_view.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualMachineInstanceView', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
instance_view.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/instanceView'}
def _convert_to_managed_disks_initial(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.convert_to_managed_disks.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def convert_to_managed_disks(
self, resource_group_name, vm_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Converts virtual machine disks from blob-based to managed disks.
Virtual machine must be stop-deallocated before invoking this
operation.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._convert_to_managed_disks_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
convert_to_managed_disks.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/convertToManagedDisks'}
def _deallocate_initial(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.deallocate.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def deallocate(
self, resource_group_name, vm_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Shuts down the virtual machine and releases the compute resources. You
are not billed for the compute resources that this virtual machine
uses.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._deallocate_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
deallocate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/deallocate'}
def generalize(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
"""Sets the state of the virtual machine to generalized.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.generalize.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
generalize.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/generalize'}
def list(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Lists all of the virtual machines in the specified resource group. Use
the nextLink property in the response to get the next page of virtual
machines.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of VirtualMachine
:rtype:
~azure.mgmt.compute.v2018_06_01.models.VirtualMachinePaged[~azure.mgmt.compute.v2018_06_01.models.VirtualMachine]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.VirtualMachinePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.VirtualMachinePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines'}
def list_all(
self, custom_headers=None, raw=False, **operation_config):
"""Lists all of the virtual machines in the specified subscription. Use
the nextLink property in the response to get the next page of virtual
machines.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of VirtualMachine
:rtype:
~azure.mgmt.compute.v2018_06_01.models.VirtualMachinePaged[~azure.mgmt.compute.v2018_06_01.models.VirtualMachine]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_all.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.VirtualMachinePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.VirtualMachinePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/virtualMachines'}
def list_available_sizes(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
"""Lists all available virtual machine sizes to which the specified
virtual machine can be resized.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of VirtualMachineSize
:rtype:
~azure.mgmt.compute.v2018_06_01.models.VirtualMachineSizePaged[~azure.mgmt.compute.v2018_06_01.models.VirtualMachineSize]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_available_sizes.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.VirtualMachineSizePaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.VirtualMachineSizePaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_available_sizes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/vmSizes'}
def _power_off_initial(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.power_off.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def power_off(
self, resource_group_name, vm_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""The operation to power off (stop) a virtual machine. The virtual
machine can be restarted with the same provisioned resources. You are
still charged for this virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._power_off_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
power_off.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/powerOff'}
def _restart_initial(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.restart.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def restart(
self, resource_group_name, vm_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""The operation to restart a virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._restart_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/restart'}
def _start_initial(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.start.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def start(
self, resource_group_name, vm_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""The operation to start a virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._start_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/start'}
def _redeploy_initial(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.redeploy.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def redeploy(
self, resource_group_name, vm_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""The operation to redeploy a virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._redeploy_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
redeploy.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/redeploy'}
def _perform_maintenance_initial(
self, resource_group_name, vm_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.perform_maintenance.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def perform_maintenance(
self, resource_group_name, vm_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""The operation to perform maintenance on a virtual machine.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns None or
ClientRawResponse<None> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._perform_maintenance_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
perform_maintenance.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/performMaintenance'}
def _run_command_initial(
self, resource_group_name, vm_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.run_command.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vmName': self._serialize.url("vm_name", vm_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'RunCommandInput')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('RunCommandResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def run_command(
self, resource_group_name, vm_name, parameters, custom_headers=None, raw=False, polling=True, **operation_config):
"""Run command on the VM.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_name: The name of the virtual machine.
:type vm_name: str
:param parameters: Parameters supplied to the Run command operation.
:type parameters:
~azure.mgmt.compute.v2018_06_01.models.RunCommandInput
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns RunCommandResult or
ClientRawResponse<RunCommandResult> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.compute.v2018_06_01.models.RunCommandResult]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.compute.v2018_06_01.models.RunCommandResult]]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._run_command_initial(
resource_group_name=resource_group_name,
vm_name=vm_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('RunCommandResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
run_command.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/runCommand'}
| [
"[email protected]"
]
| |
b4d2b444b5df8f4145c1988f40a94af4842b1109 | d02c92f1fc6910b1a9c5e6823e689b32567e41a6 | /practica_2/polls_proj/polls_app/serializers.py | 42363486667c10bb144a77c72a2eafa87cddeed8 | []
| no_license | eflipe/Django-REST | 6c1050bf9f46e88a7639d103a629f96d59a797bf | 110072f282e8fe9852e8bf6ae6e5660aa0e80d64 | refs/heads/master | 2023-08-05T11:58:41.691221 | 2020-08-05T23:23:34 | 2020-08-05T23:23:34 | 282,772,802 | 0 | 0 | null | 2023-07-24T00:26:28 | 2020-07-27T02:15:06 | Python | UTF-8 | Python | false | false | 1,152 | py | from rest_framework import serializers
from django.contrib.auth.models import User
from .models import Poll, Choice, Vote
from rest_framework.authtoken.models import Token
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('username', 'email', 'password')
extra_kwargs = {'password': {'write_only': True}}
def create(self, validated_data):
user = User(
email=validated_data['email'],
username=validated_data['username']
)
user.set_password(validated_data['password'])
user.save()
Token.objects.create(user=user)
return user
class VoteSerializer(serializers.ModelSerializer):
class Meta:
model = Vote
fields = '__all__'
class ChoiceSerializer(serializers.ModelSerializer):
votes = VoteSerializer(many=True, required=False)
class Meta:
model = Choice
fields = '__all__'
class PollSerializer(serializers.ModelSerializer):
choices = ChoiceSerializer(many=True, read_only=True, required=False)
class Meta:
model = Poll
fields = '__all__'
| [
"[email protected]"
]
| |
95c0226e3f29a6fd7317200273f0e0fb0a7695ca | f642c054451aa3c87bb18fa63037eea0e6358bda | /algo/longestStringInArray_CanBeMadeFromotherStrings.py | 33d9cb3ae354d688e4da4d8c1b61436bc258fc5d | []
| no_license | devendraprasad1984/python | 30f3a539e92be13d893246ad28a42907457a38d5 | 0f1badabba07fbe7f5f792b7e543c0748eecd6c7 | refs/heads/master | 2023-07-21T08:22:45.193077 | 2021-08-27T15:09:28 | 2021-08-27T15:09:28 | 254,812,552 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 497 | py | """
"""
arr=["geeks","for","geeksgeeks","geeksfor","geeksforgeeks"]
maxStr=sorted(arr,key=lambda x:-len(x))[0]
print(arr,maxStr)
found=False
for x in arr:
for y in arr:
if maxStr!=x and maxStr!=y:
# print(x+y)
if x+y==maxStr:
found=True
print("max string",maxStr,"is possible to built from parts of arrays",x,y)
break
if not found:
print("max string",maxStr,"is not possible to built from parts of arrays") | [
"[email protected]"
]
| |
7537b54bab44dc8f46b1c1c38c0d6b02d131616e | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /public_hand_or_big_woman.py | a6465a3098d82b969e3ffb571a87aeeb368e3bf7 | []
| no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 189 | py |
#! /usr/bin/env python
def be_next_work(str_arg):
week(str_arg)
print('woman_or_time')
def week(str_arg):
print(str_arg)
if __name__ == '__main__':
be_next_work('year')
| [
"[email protected]"
]
| |
958d10213b2c05b768ced6c6cda03fb7c7d10b0b | bdc10ba57424040129cc72ad018ff26bc8bca66a | /ConfigDefinitions/BranchAdditions/UserDefinedBranches/Triggers_18_MC.py | fa59a97a8c09e3b16d9403906e1fd565dd4e9943 | []
| no_license | aloeliger/Jesterworks | 61e0ac38ca325fefbbd8ccedaa8eb02d8a76ebbe | 96a22bac4ce20b91aba5884eb0e5667fcea3bc9a | refs/heads/master | 2021-06-09T15:39:06.976110 | 2021-04-23T11:25:06 | 2021-04-23T11:25:06 | 157,698,363 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,383 | py | import ConfigDefinitions.BranchAdditions.BranchDef as Branch
def CalculateTrigger24(TheBranch,TheChain):
if (TheChain.passMu24 and TheChain.matchMu24_1
and TheChain.filterMu24_1 and TheChain.pt_1 > 25.0):
TheBranch.BranchValue[0]=1.0
else:
TheBranch.BranchValue[0]=0.0
def CalculateTrigger27(TheBranch,TheChain):
if(TheChain.passMu27 and TheChain.matchMu27_1
and TheChain.filterMu27_1 and TheChain.pt_1 > 25.0):
TheBranch.BranchValue[0]=1.0
else:
TheBranch.BranchValue[0]=0.0
def CalculateTrigger2027(TheBranch,TheChain):
if (TheChain.passMu20HPSTau27
and TheChain.matchMu20HPSTau27_1
and TheChain.matchMu20HPSTau27_2
and TheChain.pt_1 > 21 and TheChain.pt_1 < 25
and TheChain.pt_2 > 32
and abs(TheChain.eta_1) < 2.1
and abs(TheChain.eta_2) < 2.1
and TheChain.filterMu20HPSTau27_1
and TheChain.filterMu20HPSTau27_2):
TheBranch.BranchValue[0] = 1.0
else:
TheBranch.BranchValue[0] = 0.0
Trigger24 = Branch.UserBranch()
Trigger24.Name = "Trigger24"
Trigger24.CalculateValue = CalculateTrigger24
Trigger27 = Branch.UserBranch()
Trigger27.Name = "Trigger27"
Trigger27.CalculateValue = CalculateTrigger27
Trigger2027 = Branch.UserBranch()
Trigger2027.Name = "Trigger2027"
Trigger2027.CalculateValue = CalculateTrigger2027
| [
"[email protected]"
]
| |
f7543e5d841ceb31ee2674b563c1e772576e185c | 366b2ff9cd498808438bf7c48f697c05b361d02c | /app.py | afd82cb98a6734228c58e3cf1b2d768b487eb5e6 | []
| no_license | c-bata/AngularJS-Bottle-TodoApp | 1aef6b09fd85fabaa63898ab3fb9a2d586216b93 | 8f03820b7949b0c28477970c58f25ccd1856b2a9 | refs/heads/master | 2021-03-12T22:40:32.000758 | 2015-11-04T11:14:47 | 2015-11-04T11:14:47 | 38,732,944 | 2 | 0 | null | 2015-11-04T11:11:39 | 2015-07-08T05:02:47 | Python | UTF-8 | Python | false | false | 1,290 | py | from bottle import (
route, response, run, template, static_file, install, post, request
)
import json
import os
import jsonschema
import models
import schemas
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
STATIC_DIR = os.path.join(BASE_DIR, 'static')
install(models.plugin)
@route('/')
def index():
return template('tasks')
@route('/api/tasks')
def tasks(db):
response.content_type = 'application/json'
tasks = [task.serialize for task in db.query(models.Task).all()]
return json.dumps({'tasks': tasks})
@post('/api/tasks')
def create_task(db):
response.content_type = 'application/json'
try:
jsonschema.validate(request.json, schemas.task_schema)
task = models.Task(title=request.json['title'])
db.add(task)
db.commit() # ここでコミットしないとidとかdefault値を返せない
return json.dumps(task.serialize)
except jsonschema.ValidationError:
response.status_code = 400
return json.dumps({
'error': {'message': 'Validation is failed...'}
})
@route('/static/<filename:path>')
def send_static(filename):
return static_file(filename, root=STATIC_DIR)
if __name__ == '__main__':
run(host='localhost', port=8080, debug=True, reloader=True)
| [
"[email protected]"
]
| |
bbe8129b09d85cd20a4dcbad5bcd0f14703eb61a | ae79aa8458230fe2331b267308a29adff215bbfe | /armi/nuclearDataIO/tests/test_xsCollections.py | 9088f3c05a114f56505279c386786363dec4e6f4 | [
"LicenseRef-scancode-free-unknown",
"Apache-2.0"
]
| permissive | paulromano/armi | 3727cf3c52de5e412e8db4d5bf5d9998a720616c | 6c4fea1ca9d256a2599efd52af5e5ebe9860d192 | refs/heads/master | 2023-01-10T05:43:27.691791 | 2020-08-07T00:33:35 | 2020-08-07T00:33:35 | 285,824,692 | 1 | 0 | Apache-2.0 | 2020-08-07T12:32:54 | 2020-08-07T12:32:53 | null | UTF-8 | Python | false | false | 3,832 | py | # Copyright 2019 TerraPower, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module that tests methods within xsCollections
"""
import unittest
from armi.nuclearDataIO import xsCollections
from armi import nuclearDataIO
from armi.tests import ISOAA_PATH
from armi.physics.neutronics.tests import test_cross_section_manager
class TestXsCollections(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.microLib = nuclearDataIO.ISOTXS(ISOAA_PATH)
def setUp(self):
self.mc = xsCollections.MacroscopicCrossSectionCreator()
self.block = test_cross_section_manager.MockBlock()
self.block.setNumberDensity("U235", 0.02)
self.block.setNumberDensity("FE", 0.01)
def test_generateTotalScatteringMatrix(self):
"""Generates the total scattering matrix by summing elastic, inelastic, and n2n scattering matrices."""
nuc = self.microLib.nuclides[0]
totalScatter = nuc.micros.getTotalScatterMatrix()
self.assertAlmostEqual(
totalScatter[0, 0],
(
nuc.micros.elasticScatter[0, 0]
+ nuc.micros.inelasticScatter[0, 0]
+ 2.0 * nuc.micros.n2nScatter[0, 0]
),
)
def test_generateTotalScatteringMatrixWithMissingData(self):
"""
Generates the total scattering matrix by summing elastic and n2n scattering matrices.
Notes
-----
This tests that the total scattering matrix can be produced when the inelastic scattering matrix is not defined.
"""
nuc = self.microLib.nuclides[0]
nuc.micros.inelasticScatter = None
totalScatter = nuc.micros.getTotalScatterMatrix()
self.assertAlmostEqual(
totalScatter[0, 0],
(nuc.micros.elasticScatter[0, 0] + 2.0 * nuc.micros.n2nScatter[0, 0]),
)
def test_createMacrosFromMicros(self):
self.mc.createMacrosFromMicros(self.microLib, self.block)
totalMacroFissionXs = 0.0
totalMacroAbsXs = 0.0
for nuc, density in self.mc.densities.items():
nuclideXS = self.mc.microLibrary.getNuclide(nuc, "AA")
for microXs in nuclideXS.micros.fission:
totalMacroFissionXs += microXs * density
for microXsName in xsCollections.ABSORPTION_XS:
for microXs in getattr(nuclideXS.micros, microXsName):
totalMacroAbsXs += microXs * density
self.assertAlmostEqual(sum(self.mc.macros.fission), totalMacroFissionXs)
self.assertAlmostEqual(sum(self.mc.macros.absorption), totalMacroAbsXs)
def test_collapseCrossSection(self):
"""
Tests cross section collapsing
Notes
-----
The expected 1 group cross section was generated by running the collapse cross section method. This tests
that this method has not been modified to produce a different result.
"""
expected1gXs = 2.35725262208
micros = self.microLib["U235AA"].micros
flux = list(reversed(range(33)))
self.assertAlmostEqual(
micros.collapseCrossSection(micros.nGamma, flux), expected1gXs
)
if __name__ == "__main__":
# import sys;sys.argv = ['', 'TestXsCollections.test_generateTotalScatteringMatrix']
unittest.main()
| [
"[email protected]"
]
| |
c84a55dd992ae1628b0780c38e1917efe61e7ace | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03399/s036631775.py | eaffb9a399aacef430540ccd833a1b116ff7cb11 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 73 | py | n = [int(input()) for _ in range(4)]
print(min(n[0],n[1])+min(n[2],n[3])) | [
"[email protected]"
]
| |
699ea1d33083dbe690ac1495e2b02345c3ab0360 | 9a1538123b8abec14410dad46c437cf735684dd9 | /news/migrations/0001_initial.py | 552caaeb8e0ba8c9555709ab96355304db3f721e | []
| no_license | asmuratbek/zastroy24 | deec6bd65229aeb29eb313d915c6c47ca036a8aa | d68ce21beefc644752a1271a4d8981cd2423afba | refs/heads/master | 2020-04-27T18:44:26.845151 | 2019-03-08T18:09:13 | 2019-03-08T18:09:13 | 174,585,168 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,000 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-01 14:58
from __future__ import unicode_literals
import ckeditor_uploader.fields
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_active', models.BooleanField(default=True, verbose_name='\u0410\u043a\u0442\u0438\u0432\u043d\u0430\u044f \u043d\u043e\u0432\u043e\u0441\u0442\u044c?')),
('title', models.CharField(help_text='\u041e\u043d \u0436\u0435 \u0438 meta_title', max_length=255, verbose_name='\u041d\u0430\u0438\u043c\u0435\u043d\u043e\u0432\u0430\u043d\u0438\u0435 \u043d\u043e\u0432\u043e\u0441\u0442\u0438')),
('slug', models.CharField(help_text='\u041d\u0443\u0436\u0435\u043d \u0434\u043b\u044f URL', max_length=255, verbose_name='slug')),
('preview', models.ImageField(blank=True, null=True, upload_to='news/', verbose_name='\u041f\u0440\u0435\u0434\u043e\u0441\u043c\u043e\u0442\u0440 \u043d\u043e\u0432\u043e\u0441\u0442\u0438')),
('content', ckeditor_uploader.fields.RichTextUploadingField(verbose_name='\u0422\u0435\u043b\u043e \u043d\u043e\u0432\u043e\u0441\u0442\u0438')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='\u0414\u0430\u0442\u0430 \u0441\u043e\u0437\u0434\u0430\u043d\u0438\u044f')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='\u0414\u0430\u0442\u0430 \u043e\u0431\u043d\u043e\u0432\u043b\u0435\u043d\u0438\u044f')),
],
options={
'verbose_name': '\u043d\u043e\u0432\u043e\u0441\u0442\u044c',
'verbose_name_plural': '\u041d\u043e\u0432\u043e\u0441\u0442\u0438',
},
),
]
| [
"[email protected]"
]
| |
9e02f1f5e378de2d29593ff5b0c7234dc46017ae | ff81a9d7880f1b85a1dc19d5eba5ac72d7179c86 | /pychron/hardware/apis_controller.py | 3f9007cdfd86fd568ce9d3cbf6a0909680c9efef | [
"Apache-2.0"
]
| permissive | UManPychron/pychron | 2fb7e479a9f492423c0f458c70102c499e1062c4 | b84c9fd70072f9cbda30abe2c471e64fe3dd75d8 | refs/heads/develop | 2022-12-03T23:32:45.579326 | 2020-01-29T19:02:20 | 2020-01-29T19:02:20 | 36,100,637 | 0 | 0 | null | 2015-05-23T00:10:06 | 2015-05-23T00:10:05 | null | UTF-8 | Python | false | false | 5,881 | py | # ===============================================================================
# Copyright 2014 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
import time
from traits.api import Property, provides
# ============= standard library imports ========================
# ============= local library imports ==========================
from pychron.hardware.actuators.iactuator import IActuator
from pychron.hardware.core.core_device import CoreDevice
CMD_MAP = {'list_blanks': '100',
'list_airs': '101',
'last_runid': '102',
'pipette_record': '103',
'status': '104',
'load_blank': '105',
'load_air': '106',
'cancel': '107',
'set_external_pumping': '108'}
STATUS_MAP = {'0': 'Idle',
'1': 'Pumping pipette',
'2': 'Loading pipette',
'3': 'Expanding pipettes',
'4': 'Expansion complete'}
@provides(IActuator)
class ApisController(CoreDevice):
connection_url = Property
# close `isolation_valve` `isolation_delay` seconds after loading of pipette started
isolation_delay = 25
# name of valve to make analytical section static
isolation_valve = 'U'
isolation_info = 'isolate microbone'
# instead of the simple wait/close sequence use the a gosub
# use this for a more complex/flexible pattern i.e open/close multiple valves
isolation_gosub = None
def load_additional_args(self, config):
v = self.config_get(config, 'Isolation', 'valve', optional=False, default='U')
self.isolation_delay = self.config_get(config, 'Isolation', 'delay', optional=False, cast='int', default=25)
self.isolation_info = self.config_get(config, 'Isolation', 'info', optional=True)
self.isolation_gosub = self.config_get(config, 'Isolation', 'gosub', optional=True)
self.isolation_valve = v.replace('"', '').replace("'", '')
return True
#iacuator protocol
def close_channel(self, obj):
self.set_external_pumping(False)
return True
def open_channel(self, obj):
self.set_external_pumping(True)
return True
def get_channel_state(self, obj):
pass
def get_lock_state(self, obj):
pass
def script_loading_block(self, script, **kw):
"""
wait for script loading to complete.
this process has three steps.
1. wait for loading to start. status changes from 1 to 2
2. if isolation_gosub
do gosub
else
wait `isolation_delay` seconds then close the `isolation valve`
3. wait for apis script to complete
return True if completed successfully
"""
script.console_info('waiting for pipette to load')
if not self.blocking_poll('loading_started', script=script, **kw):
return
script.console_info('loading started')
if self.isolation_gosub:
self.debug('executing isolation gosub= {}'.format(self.isolation_gosub))
script.gosub(self.isolation_gosub)
else:
ws = self.isolation_delay
self.debug('wait {}s'.format(ws))
time.sleep(ws)
if self.isolation_info:
script.console_info(self.isolation_info)
iv = self.isolation_valve
iv=iv.split(',')
for v in iv:
script.close(v.strip())
script.console_info('wait for apis to complete expansion')
return self.blocking_poll('get_loading_complete', script=script, **kw)
def make_command(self, cmd):
try:
return CMD_MAP[cmd]
except KeyError:
return 'invalid command cmd={}'.format(cmd)
def load_blank(self, name):
cmd = self.make_command('load_blank')
self.ask('{},{}'.format(cmd, name))
def load_pipette(self, name):
cmd = self.make_command('load_air')
self.ask('{},{}'.format(cmd, name))
def get_status(self):
cmd = self.make_command('status')
status = self.ask(cmd)
return status
def get_loading_status(self):
status = self.get_status()
try:
status = STATUS_MAP[status]
return status
except KeyError:
pass
def loading_started(self):
status = self.get_loading_status()
return status == 'Loading pipette'
def get_loading_complete(self):
status = self.get_loading_status()
return status == 'Expansion complete'
def get_available_blanks(self):
cmd = self.make_command('list_blanks')
return self.ask(cmd)
def get_available_airs(self):
cmd = self.make_command('list_airs')
return self.ask(cmd)
def set_external_pumping(self, state):
cmd = self.make_command('set_external_pumping')
cmd = '{},{}'.format(cmd, 'true' if state else 'false')
return self.ask(cmd)
def _get_connection_url(self):
return '{}:{}'.format(self.communicator.host, self.communicator.port)
# ============= EOF =============================================
| [
"[email protected]"
]
| |
f9197a33c02d27eaa7c912ad0142e4e415f29be6 | facb8b9155a569b09ba66aefc22564a5bf9cd319 | /wp2/merra_scripts/01_netCDF_extraction/merra902Combine/267-tideGauge.py | 77661b2e1c355fc0cfe10e2d86a6e1f385a5c771 | []
| no_license | moinabyssinia/modeling-global-storm-surges | 13e69faa8f45a1244a964c5de4e2a5a6c95b2128 | 6e385b2a5f0867df8ceabd155e17ba876779c1bd | refs/heads/master | 2023-06-09T00:40:39.319465 | 2021-06-25T21:00:44 | 2021-06-25T21:00:44 | 229,080,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,376 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Jun 17 11:28:00 2020
--------------------------------------------
Load predictors for each TG and combine them
--------------------------------------------
@author: Michael Tadesse
"""
import os
import pandas as pd
#define directories
# dir_name = 'F:\\01_erainterim\\01_eraint_predictors\\eraint_D3'
dir_in = "/lustre/fs0/home/mtadesse/merraLocalized"
dir_out = "/lustre/fs0/home/mtadesse/merraAllCombined"
def combine():
os.chdir(dir_in)
#get names
tg_list_name = os.listdir()
x = 267
y = 268
for tg in range(x, y):
os.chdir(dir_in)
tg_name = tg_list_name[tg]
print(tg_name, '\n')
#looping through each TG folder
os.chdir(tg_name)
#check for empty folders
if len(os.listdir()) == 0:
continue
#defining the path for each predictor
where = os.getcwd()
csv_path = {'slp' : os.path.join(where, 'slp.csv'),\
"wnd_u": os.path.join(where, 'wnd_u.csv'),\
'wnd_v' : os.path.join(where, 'wnd_v.csv')}
first = True
for pr in csv_path.keys():
print(tg_name, ' ', pr)
#read predictor
pred = pd.read_csv(csv_path[pr])
#remove unwanted columns
pred.drop(['Unnamed: 0'], axis = 1, inplace=True)
#sort based on date as merra files are scrambled
pred.sort_values(by = 'date', inplace=True)
#give predictor columns a name
pred_col = list(pred.columns)
for pp in range(len(pred_col)):
if pred_col[pp] == 'date':
continue
pred_col[pp] = pr + str(pred_col[pp])
pred.columns = pred_col
#merge all predictors
if first:
pred_combined = pred
first = False
else:
pred_combined = pd.merge(pred_combined, pred, on = 'date')
#saving pred_combined
os.chdir(dir_out)
tg_name = str(tg)+"_"+tg_name;
pred_combined.to_csv('.'.join([tg_name, 'csv']))
os.chdir(dir_in)
print('\n')
#run script
combine()
| [
"[email protected]"
]
| |
fc1746cb1561850e95f01014fd657000a32f9695 | 5b10b6efa049014f0d00f81b148d94c429286b66 | /DRFViewset/manage.py | b746ebba33addb3132d357d7f25d2d28be599469 | []
| no_license | Ruchika-Munde/Rest_Task | dff657aed041ac6925590f423301f0cae7599f6c | ea741889927ed0fa2a1ba9c2311304671680c6bf | refs/heads/master | 2022-12-21T05:21:00.623464 | 2020-09-09T08:06:24 | 2020-09-09T08:06:24 | 294,044,070 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 630 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'DRFViewset.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
59f35ce862fba5572d3fa349af79c857f80998f2 | 5b7af6548668085da9a6ab86f564538ee73c4865 | /build/scripts/slave/recipe_modules/sync_submodules/resources/deps2submodules.py | 7596609ca687bffb82ee3c78743d82aa56d0c70d | [
"BSD-3-Clause"
]
| permissive | elastos/Elastos.APP.Android.ShiJiuTV | 463a986450a915f7b3066e6a03aca903cf56f69b | f77189a2b8df86028adc68105988710d16ce012b | refs/heads/master | 2023-03-18T03:11:58.337349 | 2018-03-12T08:50:57 | 2018-03-13T11:10:27 | 124,007,751 | 0 | 1 | null | 2022-10-03T03:30:29 | 2018-03-06T02:21:25 | null | UTF-8 | Python | false | false | 5,186 | py | #!/usr/bin/python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Read DEPS and use the information to update git submodules"""
import argparse
import logging
import os
import re
import subprocess
import sys
from deps_utils import GetDepsContent
SHA1_RE = re.compile(r'[0-9a-fA-F]{40}')
SHA1_REF_RE = re.compile(r'^([0-9a-fA-F]{40})\s+refs/[\w/]+\s*')
def SanitizeDeps(submods, path_prefix):
"""
Look for conflicts (primarily nested submodules) in submodule data. In the
case of a conflict, the higher-level (shallower) submodule takes precedence.
Modifies the submods argument in-place.
"""
ret = {}
for name, value in submods.iteritems():
if not name.startswith(path_prefix):
logging.warning('Dropping submodule "%s", because it is outside the '
'working directory "%s"', name, path_prefix)
continue
# Strip the prefix from the submodule name.
name = name[len(path_prefix):]
parts = name.split('/')[:-1]
while parts:
may_conflict = '/'.join(parts)
if may_conflict in submods:
logging.warning('Dropping submodule "%s", because it is nested in '
'submodule "%s"', name, may_conflict)
break
parts.pop()
ret[name] = value
return ret
def CollateDeps(deps_content):
"""
Take the output of deps_utils.GetDepsContent and return a hash of:
{ submod_name : [ [ submod_os, ... ], submod_url, submod_sha1 ], ... }
"""
spliturl = lambda x: list(x.partition('@')[0::2]) if x else [None, None]
submods = {}
# Non-OS-specific DEPS always override OS-specific deps. This is an interim
# hack until there is a better way to handle OS-specific DEPS.
for (deps_os, val) in deps_content[1].iteritems():
for (dep, url) in val.iteritems():
submod_data = submods.setdefault(dep, [[]] + spliturl(url))
submod_data[0].append(deps_os)
for (dep, url) in deps_content[0].iteritems():
submods[dep] = [['all']] + spliturl(url)
return submods
def WriteGitmodules(submods):
"""
Take the output of CollateDeps, use it to write a .gitmodules file and
return a map of submodule name -> sha1 to be added to the git index.
"""
adds = {}
with open('.gitmodules', 'w') as fh:
for name, (os, url, sha1) in sorted(submods.iteritems()):
if not url:
continue
if url.startswith('svn://'):
logging.warning('Skipping svn url %s', url)
continue
print >> fh, '[submodule "%s"]' % name
print >> fh, '\tpath = %s' % name
print >> fh, '\turl = %s' % url
print >> fh, '\tos = %s' % ','.join(os)
if not sha1:
sha1 = 'master'
# Resolve the ref to a sha1 hash.
if not SHA1_RE.match(sha1):
if sha1.startswith('origin/'):
sha1 = sha1[7:]
output = subprocess.check_output(['git', 'ls-remote', url, sha1])
match = SHA1_REF_RE.match(output)
if not match:
logging.warning('Could not resolve ref %s for %s', sha1, url)
continue
logging.info('Resolved %s for %s to %s', sha1, url, match.group(1))
sha1 = match.group(1)
logging.info('Added submodule %s revision %s', name, sha1)
adds[name] = sha1
subprocess.check_call(['git', 'add', '.gitmodules'])
return adds
def RemoveObsoleteSubmodules():
"""
Delete from the git repository any submodules which aren't in .gitmodules.
"""
lsfiles = subprocess.check_output(['git', 'ls-files', '-s'])
for line in lsfiles.splitlines():
if not line.startswith('160000'):
continue
_, _, _, path = line.split()
cmd = ['git', 'config', '-f', '.gitmodules',
'--get-regexp', 'submodule\..*\.path', '^%s$' % path]
try:
with open(os.devnull, 'w') as nullpipe:
subprocess.check_call(cmd, stdout=nullpipe)
except subprocess.CalledProcessError:
subprocess.check_call(['git', 'update-index', '--force-remove', path])
def main():
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument('--path-prefix',
default=os.path.basename(os.getcwd()) + '/',
help='Ignore any dep outside this prefix. DEPS files can '
"specify dependencies in the repo's parent directory, "
'so the default here is to ignore anything outside the '
"current directory's basename")
parser.add_argument('deps_file', default='DEPS', nargs='?')
options = parser.parse_args()
if not options.path_prefix.endswith('/'):
parser.error("--path-prefix '%s' must end with a '/'" % options.path_prefix)
adds = WriteGitmodules(
SanitizeDeps(
CollateDeps(GetDepsContent(options.deps_file)),
options.path_prefix))
RemoveObsoleteSubmodules()
for submod_path, submod_sha1 in adds.iteritems():
subprocess.check_call(['git', 'update-index', '--add',
'--cacheinfo', '160000', submod_sha1, submod_path])
return 0
if __name__ == '__main__':
sys.exit(main())
| [
"[email protected]"
]
| |
06eab32812567f359d7aea988deb216e87b8b3e1 | d114a6576659a4a299f5965032489d2abbe41282 | /src/computer_vision/nodes/synchronize_img_command_lidar.py | 9c7be96b236672ac1cb80aa900f9db16b72f267c | [
"MIT"
]
| permissive | mldiego/Platooning-F1Tenth | dbc23ff7af3397716be1bbfdf9881da799206855 | ec5eadb137da8428642b3ffd1b8ca31fde4f6dff | refs/heads/master | 2023-03-04T21:08:12.799694 | 2021-02-18T00:11:46 | 2021-02-18T00:11:46 | 230,968,509 | 0 | 0 | MIT | 2021-02-16T17:34:01 | 2019-12-30T19:25:59 | Python | UTF-8 | Python | false | false | 4,887 | py | #!/usr/bin/env python
import rospy
import cv2
from std_msgs.msg import String
from sensor_msgs.msg import Image, CompressedImage,LaserScan
from cv_bridge import CvBridge, CvBridgeError
from message_filters import ApproximateTimeSynchronizer, Subscriber
from ackermann_msgs.msg import AckermannDriveStamped
import imutils
from race.msg import drive_param
import os
import rospkg
import numpy as np
# import sys so we can use packages outside of this folder in
# either python 2 or python 3, I know it's janky, chill
import sys
import os
from pathlib import Path
#insert parent directory into the path
sys.path.insert(0,str(Path(os.path.abspath(__file__)).parent.parent))
from preprocessing.utils import ImageUtils
class MessageSynchronizer:
''' Gathers messages with vehicle information that have similar time stamps
/camera/zed/rgb/image_rect_color/compressed: 18 hz
/camera/zed/rgb/image_rect_color: 18 hz
/vesc/ackermann_cmd_mux/input/teleop: 40 hz
'''
def __init__(self,racecar_name,vesc_name,data_path):
self.image_topic = racecar_name+'/camera/zed/rgb/image_rect_color'
self.drive_topic = vesc_name+'/ackermann_cmd_mux/input/teleop'
self.lidar_topic = racecar_name+'/scan'
print(self.image_topic,self.drive_topic,self.lidar_topic)
self.image_rect_color=Subscriber(self.image_topic,Image)
self.ackermann_stamped=Subscriber(self.drive_topic,AckermannDriveStamped)
self.lidar_sub=Subscriber(self.lidar_topic,LaserScan)
r = rospkg.RosPack()
self.util=ImageUtils()
self.save_path_root=os.path.sep.join([r.get_path('computer_vision'),data_path])
self.cv_bridge=CvBridge()
self.count=0
self.save_count=0
#create the time synchronizer
self.sub = ApproximateTimeSynchronizer([self.image_rect_color,self.ackermann_stamped,self.lidar_sub], queue_size = 20, slop = 0.08)
#register the callback to the synchronizer
self.sub.registerCallback(self.master_callback)
#callback for the synchronized messages
#Note: a negative value means turning to the right, a postive value means turning to the left
def master_callback(self,image,ackermann_msg,lidar_msg): #drive_param):
#convert rosmsg to cv image
try:
cv_image=self.cv_bridge.imgmsg_to_cv2(image,"bgr8")
self.count+=1
except CvBridgeError as e:
print(e)
#convert the steering command to a string to I can store it with the image name
#for efficient data storage
command='%.10f' % ackermann_msg.drive.steering_angle
#replace the period with ~ so it's a valid filename
command=command.replace('.','~')
#save path
save_path=os.path.join(self.save_path_root,self.label_image(ackermann_msg.drive.steering_angle),str(rospy.Time.now())+'~'+command+'.png')
limited_ranges=np.asarray(lidar_msg.ranges)
indices=np.where(limited_ranges>=10.0)[0]
limited_ranges[indices]=10.0
limited_ranges= limited_ranges[29:1053]
limited_ranges = limited_ranges.reshape((32,32,1))
limited_ranges = limited_ranges
if(self.count % 1==0):
dirPath = os.path.split(save_path)[0]
if not 'straight' in dirPath and 'weak_right' not in dirPath and 'weak_left' not in dirPath:
self.save_image(cv_image,save_path)
np.save(save_path.replace(".png",".npy"),limited_ranges)
self.save_count+=1
self.count+=1
#function that categorizes images into left, weak_left, straight, weak_right, right
def label_image(self,steering_angle):
if(steering_angle<-0.261799):
return "right"
elif(steering_angle>0.261799):
return "left"
elif(steering_angle<-0.0523599 and steering_angle>-0.261799):
return "weak_right"
elif(steering_angle>0.0523599 and steering_angle<0.261799):
return "weak_left"
else:
return "straight"
def save_image(self,image,path):
dirPath = os.path.split(path)[0]
# if the output directory does not exist, create it
if not os.path.exists(dirPath):
os.makedirs(dirPath)
print('does not exist')
print(path)
cv2.imwrite(path,image)
if __name__=='__main__':
rospy.init_node('image_command_sync')
args = rospy.myargv()[1:]
# get the racecar name so we know what to subscribe to
racecar_name=args[0]
# get the name of the vesc for the car
vesc_name=args[1]
# path where to store the dataset
data_path = args[2]
# initialize the message filter
mf=MessageSynchronizer(racecar_name,vesc_name,data_path)
# spin so that we can receive messages
rospy.spin() | [
"[email protected]"
]
| |
014a1726dce1e3d670880f2daba7a044700067c4 | bbec348efb79c6588a4cb6bb565c813fe3fe86ad | /pyVpx/pyVsm/pyVsm/ReflectTypes.py | 8fca89c7db09164ff1d65f4acc92cf7fbf57273b | []
| no_license | free-Zen/pvc | 2be60fdc0fd0345039219c802223f987fce3b113 | 8428a84481be319ae739dfbb87715f31810138d9 | refs/heads/master | 2022-02-24T12:13:31.599398 | 2019-10-14T07:49:13 | 2019-10-14T07:49:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,108 | py | # ******* WARNING - AUTO GENERATED CODE - DO NOT EDIT *******
from .VmomiSupport import CreateDataType, CreateManagedType
from .VmomiSupport import CreateEnumType
from .VmomiSupport import AddVersion, AddVersionParent
from .VmomiSupport import AddBreakingChangesInfo
from .VmomiSupport import F_LINK, F_LINKABLE
from .VmomiSupport import F_OPTIONAL, F_SECRET
from .VmomiSupport import newestVersions, stableVersions
from .VmomiSupport import publicVersions, dottedVersions
from .VmomiSupport import oldestVersions
AddVersion("vmodl.version.version0", "", "", 0, "vim25")
AddVersion("vmodl.version.version1", "", "", 0, "vim25")
AddVersion("vmodl.version.version2", "", "", 0, "vim25")
AddVersion("vmodl.reflect.version.version1", "reflect", "1.0", 0, "reflect")
AddVersion("vmodl.reflect.version.version2", "reflect", "2.0", 0, "reflect")
AddVersionParent("vmodl.version.version0", "vmodl.version.version0")
AddVersionParent("vmodl.version.version1", "vmodl.version.version0")
AddVersionParent("vmodl.version.version1", "vmodl.version.version1")
AddVersionParent("vmodl.version.version2", "vmodl.version.version0")
AddVersionParent("vmodl.version.version2", "vmodl.version.version1")
AddVersionParent("vmodl.version.version2", "vmodl.version.version2")
AddVersionParent("vmodl.reflect.version.version1", "vmodl.version.version0")
AddVersionParent("vmodl.reflect.version.version1", "vmodl.version.version1")
AddVersionParent("vmodl.reflect.version.version1", "vmodl.version.version2")
AddVersionParent("vmodl.reflect.version.version1", "vmodl.reflect.version.version1")
AddVersionParent("vmodl.reflect.version.version2", "vmodl.version.version0")
AddVersionParent("vmodl.reflect.version.version2", "vmodl.version.version1")
AddVersionParent("vmodl.reflect.version.version2", "vmodl.version.version2")
AddVersionParent("vmodl.reflect.version.version2", "vmodl.reflect.version.version1")
AddVersionParent("vmodl.reflect.version.version2", "vmodl.reflect.version.version2")
newestVersions.Add("vmodl.reflect.version.version2")
stableVersions.Add("vmodl.reflect.version.version2")
publicVersions.Add("vmodl.reflect.version.version2")
dottedVersions.Add("vmodl.reflect.version.version2")
oldestVersions.Add("vmodl.reflect.version.version1")
CreateManagedType("vmodl.reflect.DynamicTypeManager", "InternalDynamicTypeManager", "vmodl.ManagedObject", "vmodl.reflect.version.version1", None, [("queryTypeInfo", "DynamicTypeMgrQueryTypeInfo", "vmodl.reflect.version.version1", (("filterSpec", "vmodl.reflect.DynamicTypeManager.FilterSpec", "vmodl.reflect.version.version1", F_OPTIONAL, None),), (0, "vmodl.reflect.DynamicTypeManager.AllTypeInfo", "vmodl.reflect.DynamicTypeManager.AllTypeInfo"), "System.Read", None), ("queryMoInstances", "DynamicTypeMgrQueryMoInstances", "vmodl.reflect.version.version1", (("filterSpec", "vmodl.reflect.DynamicTypeManager.FilterSpec", "vmodl.reflect.version.version1", F_OPTIONAL, None),), (F_OPTIONAL, "vmodl.reflect.DynamicTypeManager.MoInstance[]", "vmodl.reflect.DynamicTypeManager.MoInstance[]"), "System.Read", None)])
CreateDataType("vmodl.reflect.DynamicTypeManager.Annotation", "DynamicTypeMgrAnnotation", "vmodl.DynamicData", "vmodl.reflect.version.version1", [("name", "string", "vmodl.reflect.version.version1", 0), ("parameter", "string[]", "vmodl.reflect.version.version1", F_OPTIONAL)])
CreateDataType("vmodl.reflect.DynamicTypeManager.PropertyTypeInfo", "DynamicTypeMgrPropertyTypeInfo", "vmodl.DynamicData", "vmodl.reflect.version.version1", [("name", "string", "vmodl.reflect.version.version1", 0), ("version", "string", "vmodl.reflect.version.version1", 0), ("type", "string", "vmodl.reflect.version.version1", 0), ("privId", "string", "vmodl.reflect.version.version1", F_OPTIONAL), ("msgIdFormat", "string", "vmodl.reflect.version.version1", F_OPTIONAL), ("annotation", "vmodl.reflect.DynamicTypeManager.Annotation[]", "vmodl.reflect.version.version1", F_OPTIONAL)])
CreateEnumType("vmodl.reflect.DynamicTypeManager.PropertyTypeInfo.AnnotationType", "DynamicTypeMgrPropertyTypeInfoAnnotationType", "vmodl.reflect.version.version1", ["optional", "readonly", "linkable", "link"])
CreateDataType("vmodl.reflect.DynamicTypeManager.DataTypeInfo", "DynamicTypeMgrDataTypeInfo", "vmodl.DynamicData", "vmodl.reflect.version.version1", [("name", "string", "vmodl.reflect.version.version1", 0), ("wsdlName", "string", "vmodl.reflect.version.version1", 0), ("version", "string", "vmodl.reflect.version.version1", 0), ("base", "string[]", "vmodl.reflect.version.version1", F_OPTIONAL), ("property", "vmodl.reflect.DynamicTypeManager.PropertyTypeInfo[]", "vmodl.reflect.version.version1", F_OPTIONAL), ("annotation", "vmodl.reflect.DynamicTypeManager.Annotation[]", "vmodl.reflect.version.version1", F_OPTIONAL)])
CreateDataType("vmodl.reflect.DynamicTypeManager.ParamTypeInfo", "DynamicTypeMgrParamTypeInfo", "vmodl.DynamicData", "vmodl.reflect.version.version1", [("name", "string", "vmodl.reflect.version.version1", 0), ("version", "string", "vmodl.reflect.version.version1", 0), ("type", "string", "vmodl.reflect.version.version1", 0), ("privId", "string", "vmodl.reflect.version.version1", F_OPTIONAL), ("annotation", "vmodl.reflect.DynamicTypeManager.Annotation[]", "vmodl.reflect.version.version1", F_OPTIONAL)])
CreateEnumType("vmodl.reflect.DynamicTypeManager.ParamTypeInfo.AnnotationType", "DynamicTypeMgrParamTypeInfoAnnotationType", "vmodl.reflect.version.version1", ["optional", "secret"])
CreateDataType("vmodl.reflect.DynamicTypeManager.MethodTypeInfo", "DynamicTypeMgrMethodTypeInfo", "vmodl.DynamicData", "vmodl.reflect.version.version1", [("name", "string", "vmodl.reflect.version.version1", 0), ("wsdlName", "string", "vmodl.reflect.version.version1", 0), ("version", "string", "vmodl.reflect.version.version1", 0), ("paramTypeInfo", "vmodl.reflect.DynamicTypeManager.ParamTypeInfo[]", "vmodl.reflect.version.version1", F_OPTIONAL), ("returnTypeInfo", "vmodl.reflect.DynamicTypeManager.ParamTypeInfo", "vmodl.reflect.version.version1", F_OPTIONAL), ("fault", "string[]", "vmodl.reflect.version.version1", F_OPTIONAL), ("privId", "string", "vmodl.reflect.version.version1", F_OPTIONAL), ("annotation", "vmodl.reflect.DynamicTypeManager.Annotation[]", "vmodl.reflect.version.version1", F_OPTIONAL)])
CreateEnumType("vmodl.reflect.DynamicTypeManager.MethodTypeInfo.AnnotationType", "DynamicTypeMgrMethodTypeInfoAnnotationType", "vmodl.reflect.version.version1", ["internal"])
CreateDataType("vmodl.reflect.DynamicTypeManager.ManagedTypeInfo", "DynamicTypeMgrManagedTypeInfo", "vmodl.DynamicData", "vmodl.reflect.version.version1", [("name", "string", "vmodl.reflect.version.version1", 0), ("wsdlName", "string", "vmodl.reflect.version.version1", 0), ("version", "string", "vmodl.reflect.version.version1", 0), ("base", "string[]", "vmodl.reflect.version.version1", F_OPTIONAL), ("property", "vmodl.reflect.DynamicTypeManager.PropertyTypeInfo[]", "vmodl.reflect.version.version1", F_OPTIONAL), ("method", "vmodl.reflect.DynamicTypeManager.MethodTypeInfo[]", "vmodl.reflect.version.version1", F_OPTIONAL), ("annotation", "vmodl.reflect.DynamicTypeManager.Annotation[]", "vmodl.reflect.version.version1", F_OPTIONAL)])
CreateDataType("vmodl.reflect.DynamicTypeManager.EnumTypeInfo", "DynamicTypeEnumTypeInfo", "vmodl.DynamicData", "vmodl.reflect.version.version1", [("name", "string", "vmodl.reflect.version.version1", 0), ("wsdlName", "string", "vmodl.reflect.version.version1", 0), ("version", "string", "vmodl.reflect.version.version1", 0), ("value", "string[]", "vmodl.reflect.version.version1", F_OPTIONAL), ("annotation", "vmodl.reflect.DynamicTypeManager.Annotation[]", "vmodl.reflect.version.version1", F_OPTIONAL)])
CreateDataType("vmodl.reflect.DynamicTypeManager.AllTypeInfo", "DynamicTypeMgrAllTypeInfo", "vmodl.DynamicData", "vmodl.reflect.version.version1", [("managedTypeInfo", "vmodl.reflect.DynamicTypeManager.ManagedTypeInfo[]", "vmodl.reflect.version.version1", F_OPTIONAL), ("enumTypeInfo", "vmodl.reflect.DynamicTypeManager.EnumTypeInfo[]", "vmodl.reflect.version.version1", F_OPTIONAL), ("dataTypeInfo", "vmodl.reflect.DynamicTypeManager.DataTypeInfo[]", "vmodl.reflect.version.version1", F_OPTIONAL)])
CreateDataType("vmodl.reflect.DynamicTypeManager.MoInstance", "DynamicTypeMgrMoInstance", "vmodl.DynamicData", "vmodl.reflect.version.version1", [("id", "string", "vmodl.reflect.version.version1", 0), ("moType", "string", "vmodl.reflect.version.version1", 0)])
CreateDataType("vmodl.reflect.DynamicTypeManager.FilterSpec", "DynamicTypeMgrFilterSpec", "vmodl.DynamicData", "vmodl.reflect.version.version1", None)
CreateDataType("vmodl.reflect.DynamicTypeManager.TypeFilterSpec", "DynamicTypeMgrTypeFilterSpec", "vmodl.reflect.DynamicTypeManager.FilterSpec", "vmodl.reflect.version.version1", [("typeSubstr", "string", "vmodl.reflect.version.version1", F_OPTIONAL)])
CreateDataType("vmodl.reflect.DynamicTypeManager.MoFilterSpec", "DynamicTypeMgrMoFilterSpec", "vmodl.reflect.DynamicTypeManager.FilterSpec", "vmodl.reflect.version.version1", [("id", "string", "vmodl.reflect.version.version1", F_OPTIONAL), ("typeSubstr", "string", "vmodl.reflect.version.version1", F_OPTIONAL)])
CreateManagedType("vmodl.reflect.ManagedMethodExecuter", "ReflectManagedMethodExecuter", "vmodl.ManagedObject", "vmodl.reflect.version.version1", None, [("executeSoap", "ExecuteSoap", "vmodl.reflect.version.version1", (("moid", "string", "vmodl.reflect.version.version1", 0, None),("version", "string", "vmodl.reflect.version.version1", 0, None),("method", "string", "vmodl.reflect.version.version1", 0, None),("argument", "vmodl.reflect.ManagedMethodExecuter.SoapArgument[]", "vmodl.reflect.version.version1", F_OPTIONAL, None),), (F_OPTIONAL, "vmodl.reflect.ManagedMethodExecuter.SoapResult", "vmodl.reflect.ManagedMethodExecuter.SoapResult"), None, None), ("fetchSoap", "FetchSoap", "vmodl.reflect.version.version1", (("moid", "string", "vmodl.reflect.version.version1", 0, None),("version", "string", "vmodl.reflect.version.version1", 0, None),("prop", "string", "vmodl.reflect.version.version1", 0, None),), (F_OPTIONAL, "vmodl.reflect.ManagedMethodExecuter.SoapResult", "vmodl.reflect.ManagedMethodExecuter.SoapResult"), None, None)])
CreateDataType("vmodl.reflect.ManagedMethodExecuter.SoapArgument", "ReflectManagedMethodExecuterSoapArgument", "vmodl.DynamicData", "vmodl.reflect.version.version1", [("name", "string", "vmodl.reflect.version.version1", 0), ("val", "string", "vmodl.reflect.version.version1", 0)])
CreateDataType("vmodl.reflect.ManagedMethodExecuter.SoapFault", "ReflectManagedMethodExecuterSoapFault", "vmodl.DynamicData", "vmodl.reflect.version.version1", [("faultMsg", "string", "vmodl.reflect.version.version1", 0), ("faultDetail", "string", "vmodl.reflect.version.version1", F_OPTIONAL)])
CreateDataType("vmodl.reflect.ManagedMethodExecuter.SoapResult", "ReflectManagedMethodExecuterSoapResult", "vmodl.DynamicData", "vmodl.reflect.version.version1", [("response", "string", "vmodl.reflect.version.version1", F_OPTIONAL), ("fault", "vmodl.reflect.ManagedMethodExecuter.SoapFault", "vmodl.reflect.version.version1", F_OPTIONAL)])
| [
"[email protected]"
]
| |
ab6cd8266c32908502d62a2aa3848a27d9d5182b | 4014aa4a5ce0af0f10016b8fd056e26c147e8b42 | /stdlib/src/hmap/std/matching/topic_based/topic_types/flat_numbers.py | 4d48e4cc7bf94ad24eac0e2ff27f4449bf6692f1 | [
"MIT"
]
| permissive | gregjhansell97/hive-map-python-3 | d09ac97a89a9cbddf26ab1c91f698d9e44941144 | d3d4f826f154a2aeea7e251266c221f629574b83 | refs/heads/master | 2020-07-31T12:23:55.983819 | 2020-04-28T23:52:49 | 2020-04-28T23:52:49 | 210,602,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,012 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from abc import abstractmethod
import struct
from hmap.interface.matching.topic_based import HashableTopic
class FlatNumber(HashableTopic):
fmt = ""
def __init__(self, content):
# TODO move to super class (FlatNumber)
self.__raw = struct.pack(self.fmt, content)
self.__content = content
@property
def content(self):
return self.__content
def calcsize(self):
return struct.calcsize(self.fmt)
@classmethod
def serialize(cls, instance):
return instance.__raw
@classmethod
def deserialize(cls, raw_data, lazy=False):
return cls(struct.unpack_from(cls.fmt, raw_data, offset=0)[0])
# return remaining bytes
class FlatByte(FlatNumber):
fmt = "b"
class FlatUByte(FlatNumber):
fmt = "B"
class FlatInt(FlatNumber):
fmt = "i"
class FlatUInt(FlatNumber):
fmt = "I"
# hide parent class
__all__ = ["FlatByte", "FlatUByte", "FlatInt", "FlatUInt"]
| [
"[email protected]"
]
| |
fc81fc7ae77bb68bbe360d676d6ea0f9dc2ffdda | 867796f20586cfa70422945d98e7d5e99edbabc2 | /contactista/migrations/ed99772734e1_initial_revision.py | a7f31619c7e62a4b989f9d250739bd7809b112ba | [
"MIT"
]
| permissive | rizplate/contactista | 500cf7f640b3db94d0b49b921e4b09abdfc56d5b | 8b3030487518cd4767078703aee04041d2004725 | refs/heads/master | 2020-03-28T11:37:02.932371 | 2017-09-15T18:55:52 | 2017-09-15T18:56:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,048 | py | """Initial revision
Revision ID: ed99772734e1
Revises:
Create Date: 2017-08-01 12:48:40.754913
"""
import os
import json
from alembic import op
import sqlalchemy as sa
from sqlalchemy.schema import Sequence, CreateSequence, DropSequence
# revision identifiers, used by Alembic.
revision = 'ed99772734e1'
down_revision = None
branch_labels = ('default',)
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
pronouns_table = op.create_table('pronouns',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('subject_pronoun', sa.String(length=50), nullable=False),
sa.Column('object_pronoun', sa.String(length=50), nullable=False),
sa.Column('possessive_determiner', sa.String(length=50), nullable=False),
sa.Column('possessive_pronoun', sa.String(length=50), nullable=False),
sa.Column('reflexive_pronoun', sa.String(length=50), nullable=False),
sa.PrimaryKeyConstraint('id')
)
role_table = op.create_table('role',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('description', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=255), nullable=False),
sa.Column('password', sa.String(length=255), nullable=True),
sa.Column('active', sa.Boolean(), nullable=True),
sa.Column('confirmed_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_username'), 'user', ['username'], unique=True)
op.create_table('contact',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('note', sa.Text(), nullable=True),
sa.Column('note_format', sa.String(length=20), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('roles_users',
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], )
)
op.create_table('contact_email',
sa.Column('contact_id', sa.Integer(), nullable=False),
sa.Column('category', sa.String(length=50), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('email', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ),
sa.PrimaryKeyConstraint('contact_id', 'category')
)
op.execute(CreateSequence(Sequence('contact_email_position')))
op.create_table('contact_name',
sa.Column('contact_id', sa.Integer(), nullable=False),
sa.Column('category', sa.String(length=50), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.Column('name', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ),
sa.PrimaryKeyConstraint('contact_id', 'category')
)
op.execute(CreateSequence(Sequence('contact_name_position')))
op.create_table('contact_pronouns',
sa.Column('contact_id', sa.Integer(), nullable=False),
sa.Column('pronouns_id', sa.Integer(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ),
sa.ForeignKeyConstraint(['pronouns_id'], ['pronouns.id'], ),
sa.PrimaryKeyConstraint('contact_id', 'pronouns_id')
)
op.execute(CreateSequence(Sequence('contact_pronouns_position')))
# ### end Alembic commands ###
# Seed database with default data
op.bulk_insert(role_table, rows=[
{"name": "superuser", "description": "Unlimited access"},
])
pronouns_fixture_path = os.path.join(
os.path.dirname(os.path.dirname(__file__)),
"fixtures",
"pronouns.json",
)
with open(pronouns_fixture_path) as f:
pronouns_list = json.load(f)
pronouns_objs = [{
"subject_pronoun": line[0],
"object_pronoun": line[1],
"possessive_determiner": line[2],
"possessive_pronoun": line[3],
"reflexive_pronoun": line[4],
} for line in pronouns_list]
op.bulk_insert(pronouns_table, rows=pronouns_objs)
def downgrade():
for seqname in ('contact_pronouns_position', 'contact_name_position',
'contact_email_position',
):
op.execute(DropSequence(Sequence(seqname)))
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('contact_pronouns')
op.drop_table('contact_name')
op.drop_table('contact_email')
op.drop_table('roles_users')
op.drop_table('contact')
op.drop_index(op.f('ix_user_username'), table_name='user')
op.drop_table('user')
op.drop_table('role')
op.drop_table('pronouns')
# ### end Alembic commands ###
| [
"[email protected]"
]
| |
35ffcbdb4fcd1a28e57c02cab3f847dfaea2a016 | 508eff345eb4b7fd6040d6872f5ae626956deb27 | /samples/seal/inspect_seal_model.py | 386db48cd9a964a2eb8d23436b0247c6c09ef6a7 | [
"MIT"
]
| permissive | xuannianc/Mask_RCNN | 7c9e2ed5b3d245cd9a7b42319c61a0aa83ddb295 | c942d5cf68508dd0e22d56a6eb25f8a30a090bda | refs/heads/master | 2020-03-30T00:52:43.433219 | 2018-12-03T05:27:30 | 2018-12-03T05:27:30 | 150,548,502 | 0 | 0 | null | 2018-09-27T07:41:57 | 2018-09-27T07:41:56 | null | UTF-8 | Python | false | false | 26,591 | py | import os
import os.path as osp
import sys
import random
import math
import re
import time
import numpy as np
import tensorflow as tf
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.patches as patches
# Root directory of the project
import glob
# ROOT_DIR = os.path.abspath("../../")
ROOT_DIR = os.path.abspath("/home/adam/workspace/github/Mask_RCNN")
# Import Mask RCNN
sys.path.append(ROOT_DIR) # To find local version of the library
from mrcnn import utils
from mrcnn import visualize
from mrcnn.visualize import display_images
import mrcnn.model as modellib
from mrcnn.model import log
from samples.seal import seal
from samples.seal.seal import remove2
import cv2
SEAL_DIR = os.path.join(ROOT_DIR, 'samples', 'seal')
# Directory to save logs and trained model
MODEL_DIR = os.path.join(SEAL_DIR, 'models')
config = seal.SealConfig()
DATASET_DIR = osp.join(ROOT_DIR, 'datasets', 'seal')
# Override the training configurations with a few changes for inferencing.
class InferenceConfig(config.__class__):
# Run detection on one image at a time
GPU_COUNT = 1
IMAGES_PER_GPU = 1
config = InferenceConfig()
# config.display()
# Device to load the neural network on. Useful if you're training a model on the same machine,
# in which case use CPU and leave the GPU for training.
DEVICE = "/gpu:1" # /cpu:0 or /gpu:0
# Inspect the model in training or inference modes
# values: 'inference' or 'training'
# TODO: code for 'training' test mode not ready yet
TEST_MODE = "inference"
def get_ax(rows=1, cols=1, size=16):
"""Return a Matplotlib Axes array to be used in
all visualizations in the notebook. Provide a
central point to control graph sizes.
Adjust the size attribute to control how big to render images
"""
_, ax = plt.subplots(rows, cols, figsize=(size * cols, size * rows))
return ax
# Load validation dataset
dataset = seal.SealDataset()
dataset.load_seal(DATASET_DIR, "val")
# Must call before using the dataset
dataset.prepare()
print("Images: {}\nClasses: {}".format(len(dataset.image_ids), dataset.class_names))
# Create model in inference mode
with tf.device(DEVICE):
model = modellib.MaskRCNN(mode=TEST_MODE, model_dir=MODEL_DIR, config=config)
# Set path to balloon weights file
# Download file from the Releases page and set its path
# https://github.com/matterport/Mask_RCNN/releases
weights_path = osp.join(MODEL_DIR, 'mask_rcnn_seals_0030.h5')
# Or, load the last model you trained
# weights_path = model.find_last()
# Load weights
print("Loading weights ", weights_path)
model.load_weights(weights_path, by_name=True)
image_id = random.choice(dataset.image_ids)
def display_resized_image():
resized_image, image_meta, gt_class_id, gt_bbox, gt_mask = \
modellib.load_image_gt(dataset, config, image_id, use_mini_mask=False)
image_info = dataset.image_info[image_id]
# Note: image_info 的 id 是 image 的 filename
print("Image ID: {}.{} ({}) {}".format(image_info["source"], image_info["id"], image_id,
dataset.image_reference(image_id)))
# Run object detection
# rois: [N, (y1, x1, y2, x2)] detection bounding boxes
# class_ids: [N] int class IDs
# scores: [N] float probability scores for the class IDs
# masks: [H, W, N] instance binary masks
results = model.detect([resized_image], verbose=1)
# Display results
ax = get_ax()
r = results[0]
visualize.display_instances(resized_image, r['rois'], r['masks'], r['class_ids'],
dataset.class_names, r['scores'], ax=ax, title="Predictions")
log("gt_class_id", gt_class_id)
log("gt_bbox", gt_bbox)
log("gt_mask", gt_mask)
plt.show()
# display_resized_image()
def display_image():
image = dataset.load_image(image_id)
image_info = dataset.image_info[image_id]
# Note: image_info 的 id 是 image 的 filename
print("Image ID: {}.{} ({}) {}".format(image_info["source"], image_info["id"], image_id,
dataset.image_reference(image_id)))
# Run object detection
# rois: [N, (y1, x1, y2, x2)] detection bounding boxes
# class_ids: [N] int class IDs
# scores: [N] float probability scores for the class IDs
# masks: [H, W, N] instance binary masks
results = model.detect([image], verbose=1)
# Display results
ax = get_ax()
r = results[0]
visualize.display_instances(image, r['rois'], r['masks'], r['class_ids'],
dataset.class_names, r['scores'], ax=ax, title="Predictions")
plt.show()
# display_image()
def color_splash():
image = dataset.load_image(image_id)
image_info = dataset.image_info[image_id]
# Note: image_info 的 id 是 image 的 filename
print("Image ID: {}.{} ({}) {}".format(image_info["source"], image_info["id"], image_id,
dataset.image_reference(image_id)))
results = model.detect([image], verbose=1)
r = results[0]
splashed_image = seal.color_splash(image, r['masks'])
display_images([splashed_image], titles='color_splash', cols=1)
# cv2.namedWindow('splashed_image', cv2.WINDOW_NORMAL)
# cv2.imshow('splashed_image', splashed_image)
# cv2.waitKey(0)
# color_splash()
def display_rpn_targets():
# Generate RPN trainig targets
resized_image, image_meta, gt_class_ids, gt_bboxes, gt_masks = \
modellib.load_image_gt(dataset, config, image_id, use_mini_mask=False)
image_info = dataset.image_info[image_id]
# Note: image_info 的 id 是 image 的 filename
print("Image ID: {}.{} ({}) {}".format(image_info["source"], image_info["id"], image_id,
dataset.image_reference(image_id)))
# get_anchors 会把 pixel coordinates 赋值到 self.a
normalized_anchors = model.get_anchors(resized_image.shape)
anchors = model.anchors
# target_rpn_match is 1 for positive anchors, -1 for negative anchors
# and 0 for neutral anchors.
target_rpn_match, target_rpn_deltas = modellib.build_rpn_targets(anchors, gt_class_ids, gt_bboxes, model.config)
log("target_rpn_match", target_rpn_match)
log("target_rpn_deltas", target_rpn_deltas)
positive_anchor_ix = np.where(target_rpn_match[:] == 1)[0]
negative_anchor_ix = np.where(target_rpn_match[:] == -1)[0]
neutral_anchor_ix = np.where(target_rpn_match[:] == 0)[0]
positive_anchors = model.anchors[positive_anchor_ix]
negative_anchors = model.anchors[negative_anchor_ix]
neutral_anchors = model.anchors[neutral_anchor_ix]
log("positive_anchors", positive_anchors)
log("negative_anchors", negative_anchors)
log("neutral anchors", neutral_anchors)
# Apply refinement deltas to positive anchors
refined_anchors = utils.apply_box_deltas(
positive_anchors,
target_rpn_deltas[:positive_anchors.shape[0]] * model.config.RPN_BBOX_STD_DEV)
log("refined_anchors", refined_anchors, )
# Display positive anchors before refinement (dotted) and
# after refinement (solid).
visualize.draw_boxes(resized_image, boxes=positive_anchors, refined_boxes=refined_anchors, ax=get_ax())
plt.show()
# display_rpn_targets()
def display_rpn_prediction():
# Run RPN sub-graph
resized_image, image_meta, gt_class_ids, gt_bboxes, gt_masks = \
modellib.load_image_gt(dataset, config, image_id, use_mini_mask=False)
pillar = model.keras_model.get_layer("ROI").output # node to start searching from
# TF 1.4 and 1.9 introduce new versions of NMS. Search for all names to support TF 1.3~1.10
nms_node = model.ancestor(pillar, "ROI/rpn_non_max_suppression:0")
if nms_node is None:
nms_node = model.ancestor(pillar, "ROI/rpn_non_max_suppression/NonMaxSuppressionV2:0")
if nms_node is None: # TF 1.9-1.10
nms_node = model.ancestor(pillar, "ROI/rpn_non_max_suppression/NonMaxSuppressionV3:0")
rpn = model.run_graph([resized_image], [
("rpn_class", model.keras_model.get_layer("rpn_class").output),
("pre_nms_anchors", model.ancestor(pillar, "ROI/pre_nms_anchors:0")),
("refined_anchors", model.ancestor(pillar, "ROI/refined_anchors:0")),
("refined_anchors_clipped", model.ancestor(pillar, "ROI/refined_anchors_clipped:0")),
("post_nms_anchor_ix", nms_node),
("proposals", model.keras_model.get_layer("ROI").output),
])
ax = get_ax(2, 3)
# Show top anchors by score (before refinement)
limit = 100
# np.flatten() 会把多维数组变成一维数组, 那么此处就默认 batch_size=1, 否则不能这样计算
# 按从大到小排序
sorted_anchor_ids = np.argsort(rpn['rpn_class'][:, :, 1].flatten())[::-1]
visualize.draw_boxes(resized_image, boxes=model.anchors[sorted_anchor_ids[:limit]], ax=ax[0, 0])
# Show top anchors with refinement. Then with clipping to image boundaries
limit = 50
pre_nms_anchors = utils.denorm_boxes(rpn["pre_nms_anchors"][0], resized_image.shape[:2])
refined_anchors = utils.denorm_boxes(rpn["refined_anchors"][0], resized_image.shape[:2])
visualize.draw_boxes(resized_image, boxes=pre_nms_anchors[:limit],
refined_boxes=refined_anchors[:limit], ax=ax[0, 1])
refined_anchors_clipped = utils.denorm_boxes(rpn["refined_anchors_clipped"][0], resized_image.shape[:2])
visualize.draw_boxes(resized_image, refined_boxes=refined_anchors_clipped[:limit], ax=ax[0, 2])
# Show refined anchors after non-max suppression
ixs = rpn["post_nms_anchor_ix"][:limit]
visualize.draw_boxes(resized_image, refined_boxes=refined_anchors_clipped[ixs], ax=ax[1, 0])
# Show final proposals
# These are the same as the previous step (refined anchors
# after NMS) but with coordinates normalized to [0, 1] range.
# Convert back to image coordinates for display
h, w = resized_image.shape[:2]
proposals = rpn['proposals'][0, :limit] * np.array([h, w, h, w])
visualize.draw_boxes(resized_image, refined_boxes=proposals, ax=ax[1, 1])
plt.show()
# display_rpn_prediction()
def display_mrcnn_prediction():
resized_image, image_meta, gt_class_ids, gt_bboxes, gt_masks = \
modellib.load_image_gt(dataset, config, image_id, use_mini_mask=False)
# Get input and output to classifier and mask heads.
mrcnn = model.run_graph([resized_image], [
("proposals", model.keras_model.get_layer("ROI").output),
("probs", model.keras_model.get_layer("mrcnn_class").output),
("deltas", model.keras_model.get_layer("mrcnn_bbox").output),
("masks", model.keras_model.get_layer("mrcnn_mask").output),
("detections", model.keras_model.get_layer("mrcnn_detection").output),
])
ax = get_ax(1, 4)
################################## display detections ###############################################
# Get detection class IDs. Trim zero padding.
det_class_ids = mrcnn['detections'][0, :, 4].astype(np.int32)
padding_start_ix = np.where(det_class_ids == 0)[0][0]
det_class_ids = det_class_ids[:padding_start_ix]
detections = mrcnn['detections'][0, :padding_start_ix]
log('trimmed_detection', detections)
print("{} detections: {}".format(
padding_start_ix, np.array(dataset.class_names)[det_class_ids]))
captions = ["{} {:.3f}".format(dataset.class_names[int(class_id)], score) if class_id > 0 else ""
for class_id, score in zip(detections[:, 4], detections[:, 5])]
visualize.draw_boxes(resized_image.copy(),
refined_boxes=utils.denorm_boxes(detections[:, :4], resized_image.shape[:2]),
visibilities=[2] * len(detections),
captions=captions, title="Detections",
ax=ax[0])
################################### display proposals ##########################################
# Proposals are in normalized coordinates. Scale them to image coordinates.
h, w = resized_image.shape[:2]
proposals = np.around(mrcnn["proposals"][0] * np.array([h, w, h, w])).astype(np.int32)
# Class ID, score, and mask per proposal
# mrcnn 的 shape 为 (batch_size, num_proposals=1000, num_classes)
proposal_class_ids = np.argmax(mrcnn["probs"][0], axis=1)
proposal_class_scores = mrcnn["probs"][0, np.arange(proposal_class_ids.shape[0]), proposal_class_ids]
proposal_class_names = np.array(dataset.class_names)[proposal_class_ids]
proposal_positive_ixs = np.where(proposal_class_ids > 0)[0]
# How many ROIs vs empty rows?
print("{} valid proposals out of {}".format(np.sum(np.any(proposals, axis=1)), proposals.shape[0]))
print("{} positive ROIs".format(len(proposal_positive_ixs)))
# Class counts
print(list(zip(*np.unique(proposal_class_names, return_counts=True))))
# Display a random sample of proposals.
# Proposals classified as background are dotted, and
# the rest show their class and confidence score.
limit = 200
ixs = np.random.randint(0, proposals.shape[0], limit)
captions = ["{} {:.3f}".format(dataset.class_names[c], s) if c > 0 else ""
for c, s in zip(proposal_class_ids[ixs], proposal_class_scores[ixs])]
visualize.draw_boxes(resized_image.copy(), boxes=proposals[ixs],
visibilities=np.where(proposal_class_ids[ixs] > 0, 2, 1),
captions=captions, title="Proposals Before Refinement",
ax=ax[1])
#################################### apply bounding box refinement #############################
# Class-specific bounding box shifts.
# mrcnn['deltas'] 的 shape 为 (batch_size, num_proposals=1000, num_classes, 4)
proposal_deltas = mrcnn["deltas"][0, np.arange(proposals.shape[0]), proposal_class_ids]
log("proposals_deltas", proposal_deltas)
# Apply bounding box transformations
# Shape: (num_proposals=1000, (y1, x1, y2, x2)]
# NOTE: delta 是不分 normalized coordinates 和 pixel coordinates 的
refined_proposals = utils.apply_box_deltas(
proposals, proposal_deltas * config.BBOX_STD_DEV).astype(np.int32)
log("refined_proposals", refined_proposals)
# Show positive proposals
# ids = np.arange(proposals.shape[0]) # Display all
limit = 5
ids = np.random.randint(0, len(proposal_positive_ixs), limit) # Display random sample
captions = ["{} {:.3f}".format(dataset.class_names[class_id], score) if class_id > 0 else ""
for class_id, score in
zip(proposal_class_ids[proposal_positive_ixs][ids], proposal_class_scores[proposal_positive_ixs][ids])]
visualize.draw_boxes(resized_image.copy(), boxes=proposals[proposal_positive_ixs][ids],
refined_boxes=refined_proposals[proposal_positive_ixs][ids],
visibilities=np.where(proposal_class_ids[proposal_positive_ixs][ids] > 0, 1, 0),
captions=captions, title="ROIs After Refinement",
ax=ax[2])
#################################### more steps ################################################
# Remove boxes classified as background
keep_proposal_ixs = np.where(proposal_class_ids > 0)[0]
print("Remove background proposals. Keep {}:\n{}".format(keep_proposal_ixs.shape[0], keep_proposal_ixs))
# Remove low confidence detections
keep_proposal_ixs = np.intersect1d(keep_proposal_ixs,
np.where(proposal_class_scores >= config.DETECTION_MIN_CONFIDENCE)[0])
print("Remove proposals below {} confidence. Keep {}:\n{}".format(
config.DETECTION_MIN_CONFIDENCE, keep_proposal_ixs.shape[0], keep_proposal_ixs))
# Apply per-class non-max suppression
pre_nms_proposals = refined_proposals[keep_proposal_ixs]
pre_nms_proposal_scores = proposal_class_scores[keep_proposal_ixs]
pre_nms_proposal_class_ids = proposal_class_ids[keep_proposal_ixs]
nms_keep_proposal_ixs = []
for class_id in np.unique(pre_nms_proposal_class_ids):
# Pick detections of this class
ixs = np.where(pre_nms_proposal_class_ids == class_id)[0]
# Apply NMS
class_keep = utils.non_max_suppression(pre_nms_proposals[ixs],
pre_nms_proposal_scores[ixs],
config.DETECTION_NMS_THRESHOLD)
# Map indicies
class_keep_proposal_ixs = keep_proposal_ixs[ixs[class_keep]]
nms_keep_proposal_ixs = np.union1d(nms_keep_proposal_ixs, class_keep_proposal_ixs)
print("{:12}: {} -> {}".format(dataset.class_names[class_id][:10], keep_proposal_ixs[ixs],
class_keep_proposal_ixs))
keep_proposal_ixs = np.intersect1d(keep_proposal_ixs, nms_keep_proposal_ixs).astype(np.int32)
print("\nKeep after per-class NMS: {}\n{}".format(keep_proposal_ixs.shape[0], keep_proposal_ixs))
#################################### Show final detections #####################################
ixs = np.arange(len(keep_proposal_ixs)) # Display all
# ixs = np.random.randint(0, len(keep), 10) # Display random sample
captions = ["{} {:.3f}".format(dataset.class_names[c], s) if c > 0 else ""
for c, s in
zip(proposal_class_ids[keep_proposal_ixs][ixs], proposal_class_scores[keep_proposal_ixs][ixs])]
visualize.draw_boxes(
resized_image.copy(), boxes=proposals[keep_proposal_ixs][ixs],
refined_boxes=refined_proposals[keep_proposal_ixs][ixs],
visibilities=np.where(proposal_class_ids[keep_proposal_ixs][ixs] > 0, 1, 0),
captions=captions, title="Detections after NMS",
ax=ax[3])
plt.show()
# display_mrcnn_prediction()
def display_mrcnn_mask_prediction():
#################################### Mask Targets ##############################################
# gt_masks 的 shape 为 (image_height, image_width, num_instances)
resized_image, image_meta, gt_class_ids, gt_bboxes, gt_masks = \
modellib.load_image_gt(dataset, config, image_id, use_mini_mask=False)
display_images(np.transpose(gt_masks, [2, 0, 1]), cmap="Blues")
# Get predictions of mask head
mrcnn = model.run_graph([resized_image], [
("detections", model.keras_model.get_layer("mrcnn_detection").output),
("masks", model.keras_model.get_layer("mrcnn_mask").output),
])
# Get detection class IDs. Trim zero padding.
det_class_ids = mrcnn['detections'][0, :, 4].astype(np.int32)
padding_start_ix = np.where(det_class_ids == 0)[0][0]
det_class_ids = det_class_ids[:padding_start_ix]
print("{} detections: {}".format(padding_start_ix, np.array(dataset.class_names)[det_class_ids]))
# Masks
det_boxes = utils.denorm_boxes(mrcnn["detections"][0, :, :4], resized_image.shape[:2])
# mrcnn['masks'] 的 shape 为 (batch_size, num_instances, mask_height, mask_width, num_classes)
det_mask_specific = np.array([mrcnn["masks"][0, i, :, :, c]
for i, c in enumerate(det_class_ids)])
det_masks = np.array([utils.unmold_mask(mask, det_boxes[i], resized_image.shape)
for i, mask in enumerate(det_mask_specific)])
log("det_mask_specific", det_mask_specific)
display_images(det_mask_specific[:4] * 255, cmap="Blues", interpolation="none")
log("det_masks", det_masks)
display_images(det_masks[:4] * 255, cmap="Blues", interpolation="none")
# display_mrcnn_mask_prediction()
def visualize_activations():
# Get activations of a few sample layers
resized_image, image_meta, gt_class_ids, gt_bboxes, gt_masks = \
modellib.load_image_gt(dataset, config, image_id, use_mini_mask=False)
activations = model.run_graph([resized_image], [
# ("input_image", model.keras_model.get_layer("input_image").output),
("res2c_out", model.keras_model.get_layer("res2c_out").output),
("res3c_out", model.keras_model.get_layer("res3c_out").output),
("res4w_out", model.keras_model.get_layer("res4w_out").output), # for resnet100
("rpn_bbox", model.keras_model.get_layer("rpn_bbox").output),
("roi", model.keras_model.get_layer("ROI").output),
])
# Input image (normalized)
# _ = plt.imshow(modellib.unmold_image(activations["input_image"][0], config))
# Backbone feature map
display_images(np.transpose(activations["res2c_out"][0, :, :, :4], [2, 0, 1]), cols=4)
# visualize_activations()
def show_mrcnn_prediction(image):
resized_image, window, scale, padding, crop = utils.resize_image(
image,
min_dim=config.IMAGE_MIN_DIM,
min_scale=config.IMAGE_MIN_SCALE,
max_dim=config.IMAGE_MAX_DIM,
mode=config.IMAGE_RESIZE_MODE)
# Get input and output to classifier and mask heads.
mrcnn = model.run_graph([resized_image], [
("proposals", model.keras_model.get_layer("ROI").output),
("probs", model.keras_model.get_layer("mrcnn_class").output),
("deltas", model.keras_model.get_layer("mrcnn_bbox").output),
("masks", model.keras_model.get_layer("mrcnn_mask").output),
("detections", model.keras_model.get_layer("mrcnn_detection").output),
])
################################## display detections ###############################################
# Get detection class IDs. Trim zero padding.
det_class_ids = mrcnn['detections'][0, :, 4].astype(np.int32)
padding_start_ix = np.where(det_class_ids == 0)[0][0]
det_class_ids = det_class_ids[:padding_start_ix]
detections = mrcnn['detections'][0, :padding_start_ix]
log('trimmed_detection', detections)
print("{} detections: {}".format(
padding_start_ix, np.array(dataset.class_names)[det_class_ids]))
################################### display proposals ##########################################
# Proposals are in normalized coordinates. Scale them to image coordinates.
h, w = resized_image.shape[:2]
proposals = np.around(mrcnn["proposals"][0] * np.array([h, w, h, w])).astype(np.int32)
# Class ID, score, and mask per proposal
# mrcnn 的 shape 为 (batch_size, num_proposals=1000, num_classes)
proposal_class_ids = np.argmax(mrcnn["probs"][0], axis=1)
proposal_class_scores = mrcnn["probs"][0, np.arange(proposal_class_ids.shape[0]), proposal_class_ids]
proposal_class_names = np.array(dataset.class_names)[proposal_class_ids]
proposal_positive_ixs = np.where(proposal_class_ids > 0)[0]
# How many ROIs vs empty rows?
print("{} valid proposals out of {}".format(np.sum(np.any(proposals, axis=1)), proposals.shape[0]))
print("{} positive ROIs".format(len(proposal_positive_ixs)))
# Class counts
print(list(zip(*np.unique(proposal_class_names, return_counts=True))))
# Display a random sample of proposals.
# Proposals classified as background are dotted, and
# the rest show their class and confidence score.
limit = 200
#################################### apply bounding box refinement #############################
# Class-specific bounding box shifts.
# mrcnn['deltas'] 的 shape 为 (batch_size, num_proposals=1000, num_classes, 4)
proposal_deltas = mrcnn["deltas"][0, np.arange(proposals.shape[0]), proposal_class_ids]
log("proposals_deltas", proposal_deltas)
# Apply bounding box transformations
# Shape: (num_proposals=1000, (y1, x1, y2, x2)]
# NOTE: delta 是不分 normalized coordinates 和 pixel coordinates 的
refined_proposals = utils.apply_box_deltas(
proposals, proposal_deltas * config.BBOX_STD_DEV).astype(np.int32)
log("refined_proposals", refined_proposals)
#################################### more steps ################################################
# Remove boxes classified as background
keep_proposal_ixs = np.where(proposal_class_ids > 0)[0]
print("Remove background proposals. Keep {}:\n{}".format(keep_proposal_ixs.shape[0], keep_proposal_ixs))
# Remove low confidence detections
keep_proposal_ixs = np.intersect1d(keep_proposal_ixs,
np.where(proposal_class_scores >= config.DETECTION_MIN_CONFIDENCE)[0])
print("Remove proposals below {} confidence. Keep {}:\n{}".format(
config.DETECTION_MIN_CONFIDENCE, keep_proposal_ixs.shape[0], keep_proposal_ixs))
# Apply per-class non-max suppression
pre_nms_proposals = refined_proposals[keep_proposal_ixs]
pre_nms_proposal_scores = proposal_class_scores[keep_proposal_ixs]
pre_nms_proposal_class_ids = proposal_class_ids[keep_proposal_ixs]
nms_keep_proposal_ixs = []
for class_id in np.unique(pre_nms_proposal_class_ids):
# Pick detections of this class
ixs = np.where(pre_nms_proposal_class_ids == class_id)[0]
# Apply NMS
class_keep = utils.non_max_suppression(pre_nms_proposals[ixs],
pre_nms_proposal_scores[ixs],
config.DETECTION_NMS_THRESHOLD)
# Map indicies
class_keep_proposal_ixs = keep_proposal_ixs[ixs[class_keep]]
nms_keep_proposal_ixs = np.union1d(nms_keep_proposal_ixs, class_keep_proposal_ixs)
print("{:12}: {} -> {}".format(dataset.class_names[class_id][:10], keep_proposal_ixs[ixs],
class_keep_proposal_ixs))
keep_proposal_ixs = np.intersect1d(keep_proposal_ixs, nms_keep_proposal_ixs).astype(np.int32)
print("\nKeep after per-class NMS: {}\n{}".format(keep_proposal_ixs.shape[0], keep_proposal_ixs))
#################################### Show final detections #####################################
ixs = np.arange(len(keep_proposal_ixs)) # Display all
refined_bboxes = refined_proposals[keep_proposal_ixs][ixs]
refined_bboxes -= np.array([window[0], window[1], window[0], window[1]])
bboxes = refined_bboxes.astype('float32') / scale
bboxes = bboxes.tolist()
return bboxes
# for bbox in bboxes:
# cv2.rectangle(image, (round(bbox[1]), round(bbox[0])), (round(bbox[3]), round(bbox[2])), (0, 255, 0), 2)
# cv2.namedWindow('image', cv2.WINDOW_NORMAL)
# cv2.imshow('image', image)
# cv2.waitKey(0)
# for image_filepath in glob.glob('/home/adam/Pictures/vat/train/*.jpg'):
# image = cv2.imread(image_filepath)
# show_mrcnn_prediction(image)
# for image_path in glob.glob('/home/adam/Videos/*.jpg'):
# remove2(model, image_path)
| [
"[email protected]"
]
| |
0cbac4b32b5e85d2aa2d17639e2fb7a6ece2316b | 66176b6735f9fb3f8eaa649cf5df87a1104b9ddb | /src/csh/align_by_permutation.py | 41f2e3f44a543dfa3dd904a0f6f8b79a69bf18cf | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-philippe-de-muyter",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-public-domain-disclaimer",
"LicenseRef-scancode-other-permissive"
]
| permissive | jswelling/Fiasco | a918b516c8880d4f627be6f8490fe01dc02f91a5 | 0e9264207c385e6c911f5458b9a90866ea14c4be | refs/heads/master | 2021-12-14T00:23:08.924678 | 2021-11-30T05:08:35 | 2021-11-30T05:08:35 | 66,334,022 | 3 | 0 | null | 2021-11-30T05:06:53 | 2016-08-23T04:49:58 | C | UTF-8 | Python | false | false | 9,918 | py | #! /usr/bin/env python
#
# ************************************************************
# * *
# * Permission is hereby granted to any individual or *
# * institution for use, copying, or redistribution of *
# * this code and associated documentation, provided *
# * that such code and documentation are not sold for *
# * profit and the following copyright notice is retained *
# * in the code and documentation: *
# * Copyright (c) 2006 Department of Statistics, *
# * Carnegie Mellon University *
# * *
# * This program is distributed in the hope that it will *
# * be useful, but WITHOUT ANY WARRANTY; without even the *
# * implied warranty of MERCHANTABILITY or FITNESS FOR A *
# * PARTICULAR PURPOSE. Neither Carnegie Mellon University *
# * nor any of the authors assume any liability for *
# * damages, incidental or otherwise, caused by the *
# * installation or use of this software. *
# * *
# * CLINICAL APPLICATIONS ARE NOT RECOMMENDED, AND THIS *
# * SOFTWARE HAS NOT BEEN EVALUATED BY THE UNITED STATES *
# * FDA FOR ANY CLINICAL USE. *
# * *
# ************************************************************
#
import sys
import os
import os.path
import string
import getopt
from math import *
if "FIASCO" in os.environ:
sys.path.append(os.environ["FIASCO"])
from fiasco_utils import *
idString= "$Id: align_by_permutation.py,v 1.6 2007/02/06 21:45:42 welling Exp $"
def checkInputStructure( chunk, unsafeFlag ):
dimstr= chunk.getValue('dimensions');
if dimstr != "xyz":
if dimstr == "xyzt":
if chunk.getDim("t") != 1 and not unsafeFlag:
sys.exit("Input file %s must have t extent 1!"%\
os.path.basename(chunk.ds.fname))
elif dimstr == "vxyzt":
if chunk.getDim("t") != 1 and not unsafeFlag:
sys.exit("Input file %s must have t extent 1!"%\
os.path.basename(chunk.ds.fname))
if chunk.getDim("v") != 1:
sys.exit("Input file %s must have v extent 1!"%\
os.path.basename(chunk.ds.fname))
elif dimstr == "vxyz":
if chunk.getDim("v") != 1:
sys.exit("Input file %s must have v extent 1!"%\
os.path.basename(chunk.ds.fname))
else:
sys.exit("Input file %s must have dimensions (v)xyz(t)!"%\
os.path.basename(chunk.ds.fname))
##############################
#
# Main
#
##############################
# Check for "-help"
if len(sys.argv)>1:
if sys.argv[1] == "-help":
if len(sys.argv)>2:
os.system( "scripthelp %s %s"%(sys.argv[0],sys.argv[2]) );
else:
os.system( "scripthelp %s"%sys.argv[0] );
sys.exit();
try:
(opts,pargs) = getopt.getopt(sys.argv[1:],"vd",["out=","unsafe"])
except:
print("%s: Invalid command line parameter" % sys.argv[0])
describeSelf();
sys.exit()
#Check calling syntax; parse args
if len(pargs) != 2 :
describeSelf()
sys.exit(1)
outDSName= None
unsafeFlag= 0
for a,b in opts:
if a=="-v":
setVerbose(1)
if a=="-d":
setDebug(1)
if a=="--out":
outDSName= b
if a=="--unsafe":
unsafeFlag= 1
if outDSName==None:
sys.exit("Required output dataset name not given.")
inDS= MRIDataset(os.path.abspath(pargs[0]))
inChunk= inDS.getChunk('images')
protoDS= MRIDataset(os.path.abspath(pargs[1]))
protoChunk= protoDS.getChunk('images')
#Check reasonableness of input
checkInputStructure(inChunk,unsafeFlag)
checkInputStructure(protoChunk,unsafeFlag)
# Create a temporary directory
tmpdir= makeTempDir('tmp_align_by_permutation')
homedir= os.getcwd()
# Get relevant dimensions
xdim= inChunk.getDim("x");
ydim= inChunk.getDim("y");
zdim= inChunk.getDim("z");
dimstr= inChunk.getValue('dimensions');
inBBox= BBox(inChunk)
protoBBox= BBox(protoChunk)
if getVerbose():
inBBox.printBounds("Input bounding box:")
protoBBox.printBounds("Prototype bounding box:")
inRHCoordTest= inBBox.zedge.dot(inBBox.xedge.cross(inBBox.yedge))
protoRHCoordTest= protoBBox.zedge.dot(protoBBox.xedge.cross(protoBBox.yedge))
if inRHCoordTest*protoRHCoordTest < 0.0 and not unsafeFlag:
sys.exit("Input and prototype coord systems don't have same handedness!")
inAxes= { "x":inBBox.xedge.clone(), \
"y":inBBox.yedge.clone(), \
"z":inBBox.zedge.clone() }
for v1 in ["x","y","z"]: inAxes[v1].normalize()
protoAxes= { "x":protoBBox.xedge.clone(), \
"y":protoBBox.yedge.clone(), \
"z":protoBBox.zedge.clone() }
for v1 in ["x","y","z"]: protoAxes[v1].normalize()
becomesMap= {}
usedToBeMap= {}
needsReversed= {}
for v1 in ["x","y","z"]:
largestDot= 0.0;
comp= None
for v2 in ["x","y","z"]:
val= inAxes[v1].dot(protoAxes[v2])
if math.fabs(val)>math.fabs(largestDot):
largestDot= val
comp= v2
debugMessage("%s matches %s, dot %f"%(v1,comp,largestDot))
becomesMap[v1]= comp
needsReversed[v1]= ( largestDot < 0 )
debugMessage("becomesMap: %s"%repr(becomesMap))
for v1 in becomesMap.keys():
usedToBeMap[becomesMap[v1]]= v1
debugMessage("usedToBeMap: %s"%repr(usedToBeMap))
debugMessage("needsReversed: %s"%repr(needsReversed))
debugMessage("inAxes: %s"%repr(inAxes))
debugMessage("protoAxes: %s"%repr(protoAxes))
newDimstr= usedToBeMap['x']+usedToBeMap['y']+usedToBeMap['z']
newExtents= "%d:%d:%d"%(inChunk.getDim(usedToBeMap['x']),\
inChunk.getDim(usedToBeMap['y']),\
inChunk.getDim(usedToBeMap['z']))
if dimstr.startswith('v'):
newDimstr= 'v'+newDimstr
newExtents= ":"+newExtents
if dimstr.endswith('t'):
newDimstr= newDimstr+'t'
newExtents= newExtents+":"
debugMessage("dimstr <%s> becomes <%s>, extents <%s>"%\
(dimstr,newDimstr,newExtents))
# Flip the axis vectors as appropriate
for v1 in ['x','y','z']:
if needsReversed[v1]: inAxes[v1]= -1.0*inAxes[v1]
# We will now use the needsReversed info to determine which data
# dimensions need to be flipped. There is a trick here, since the
# Y data dimension is opposite the Y coordinate dimension in Fiasco
# coordinates. Thus we first dink with the needsReversed info
# to correct for this.
if becomesMap['y'] != 'y':
needsReversed[usedToBeMap['y']]= ( not needsReversed[usedToBeMap['y']] )
needsReversed['y']= ( not needsReversed['y'] )
debugMessage("needsReversed after correction for data order: %s"%\
repr(needsReversed))
# Handle axis reversals via the double-fft trick
currentDSName= inDS.fname
if needsReversed['x']:
if needsReversed['y']:
# use xy fft
safeRun("mri_fft -d xy -fwd -cpx %s %s"%\
(currentDSName,os.path.join(tmpdir,"tmp1")))
safeRun("mri_fft -d xy -fwd -mod %s %s"%\
(os.path.join(tmpdir,"tmp1"),os.path.join(tmpdir,"tmp2")))
needsReversed['y']= 0
else:
# use x fft
safeRun("mri_fft -d x -fwd -cpx %s %s"%\
(currentDSName,os.path.join(tmpdir,"tmp1")))
safeRun("mri_fft -d x -fwd -mod %s %s"%\
(os.path.join(tmpdir,"tmp1"),os.path.join(tmpdir,"tmp2")))
currentDSName= os.path.join(tmpdir,"tmp2")
needsReversed['x']= 0
if not dimstr.startswith('v'):
safeRun("mri_remap -order %s %s"%(dimstr,currentDSName))
if needsReversed['y']:
if needsReversed['z']:
# use yz fft
safeRun("mri_fft -d yz -fwd -cpx %s %s"%\
(currentDSName,os.path.join(tmpdir,"tmp3")))
safeRun("mri_fft -d yz -fwd -mod %s %s"%\
(os.path.join(tmpdir,"tmp3"),os.path.join(tmpdir,"tmp4")))
needsReversed['z']= 0
else:
# use y fft
safeRun("mri_fft -d y -fwd -cpx %s %s"%\
(currentDSName,os.path.join(tmpdir,"tmp3")))
safeRun("mri_fft -d y -fwd -mod %s %s"%\
(os.path.join(tmpdir,"tmp3"),os.path.join(tmpdir,"tmp4")))
currentDSName= os.path.join(tmpdir,"tmp4")
needsReversed['y']= 0
if not dimstr.startswith('v'):
safeRun("mri_remap -order %s %s"%(dimstr,currentDSName))
if needsReversed['z']:
# use z fft
safeRun("mri_fft -d z -fwd -cpx %s %s"%\
(currentDSName,os.path.join(tmpdir,"tmp5")))
safeRun("mri_fft -d z -fwd -mod %s %s"%\
(os.path.join(tmpdir,"tmp5"),os.path.join(tmpdir,"tmp6")))
currentDSName= os.path.join(tmpdir,"tmp6")
needsReversed['z']= 0
if not dimstr.startswith('v'):
safeRun("mri_remap -order %s %s"%(dimstr,currentDSName))
debugMessage("inAxes now %s"%repr(inAxes))
if dimstr != newDimstr:
safeRun("mri_permute -order %s %s %s"%(newDimstr,currentDSName,outDSName))
safeRun("mri_remap -order %s -len %s %s"%(dimstr,newExtents,outDSName))
else:
safeRun("mri_copy_dataset %s %s"%(currentDSName,outDSName))
outDS= MRIDataset(outDSName)
outChunk= outDS.getChunk('images')
outBBox= BBox(outChunk)
outBBox.setCtr(inBBox.ctr)
outBBox.setVox([inChunk.getFloat("voxel_spacing.%s"%usedToBeMap['x']),\
inChunk.getFloat("voxel_spacing.%s"%usedToBeMap['y']),\
inChunk.getFloat("voxel_spacing.%s"%usedToBeMap['z'])])
outBBox.setCorners(inAxes[usedToBeMap['x']],\
inAxes[usedToBeMap['y']],\
inAxes[usedToBeMap['z']])
if getVerbose():
outBBox.printBounds("Output bounding box:")
outBBox.exportBounds()
# Clean up
os.chdir(homedir)
if not getDebug():
removeTmpDir(tmpdir)
| [
"[email protected]"
]
| |
b6ddfd1034f68fcb04d7dd7367c60d64d74c567f | 0da8fdae806b73e9dc57e052dcf1171c5a2c7f28 | /01_Python基础/05_高级数据类型/study_17_字符串的查找和替换.py | 21ac39e23afe3709e5be97d72fd7c073e80aff77 | []
| no_license | xujinshan361/python_study_code | ed37db128c55ee2ad9f7b2db04785c632a7115d4 | e6ce0bdd8243dfaadf56213ef0120d215de0d0cd | refs/heads/master | 2020-12-10T12:19:45.792310 | 2020-01-13T12:48:22 | 2020-01-13T12:48:22 | 233,592,034 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 666 | py | hello_str = "hello word"
# 1.判断是否以指定字符串开始
print(hello_str.startswith("he"))
# 2.判断是否以指定字符串结束
print(hello_str.endswith("word"))
# 3.查找指定字符串
# index同样可以查找指定的字符串在大字符串中的索引
print(hello_str.find("lo"))
# index 如果指定的字符串不存在,会报错
# find如果指定的字符串不存在,会返回-1
# print(hello_str.index("abc"))
print(hello_str.find("abc"))
# 4.替换字符串
# replace 方法执行完成后,会返回一个新的字符串
# 注意:不会修改原有的字符串内容
print(hello_str.replace("word", "python"))
print(hello_str)
| [
"[email protected]"
]
| |
e40d8657052e26d4cd67730ceea350b9fcbf5a6c | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/constant/ParamConstants.py | 1de953edff0cd37434ed88f9fc6c6feb2d8c34c5 | [
"Apache-2.0"
]
| permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 938 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 2017-12-20
@author: liuqun
'''
COMMON_PARAM_KEYS = set()
P_APP_ID = "app_id"
COMMON_PARAM_KEYS.add(P_APP_ID)
P_METHOD = "method"
COMMON_PARAM_KEYS.add(P_METHOD)
P_FORMAT = "format"
COMMON_PARAM_KEYS.add(P_FORMAT)
P_CHARSET = "charset"
COMMON_PARAM_KEYS.add(P_CHARSET)
P_SIGN_TYPE = "sign_type"
COMMON_PARAM_KEYS.add(P_SIGN_TYPE)
P_SIGN = "sign"
COMMON_PARAM_KEYS.add(P_SIGN)
P_ENCRYPT_TYPE = "encrypt_type"
COMMON_PARAM_KEYS.add(P_ENCRYPT_TYPE)
P_TIMESTAMP = "timestamp"
COMMON_PARAM_KEYS.add(P_TIMESTAMP)
P_VERSION = "version"
COMMON_PARAM_KEYS.add(P_VERSION)
P_NOTIFY_URL = "notify_url"
COMMON_PARAM_KEYS.add(P_NOTIFY_URL)
P_RETURN_URL = "return_url"
COMMON_PARAM_KEYS.add(P_RETURN_URL)
P_AUTH_TOKEN = "auth_token"
COMMON_PARAM_KEYS.add(P_AUTH_TOKEN)
P_APP_AUTH_TOKEN = "app_auth_token"
COMMON_PARAM_KEYS.add(P_APP_AUTH_TOKEN)
P_BIZ_CONTENT = "biz_content"
| [
"[email protected]"
]
| |
6552dea2d2667854202895aec4f0df5259855cbc | b0f6dbd92c368bd68fa1aafd67fdde9c323ab1be | /config.py | 578b0ee4e0b9ed526e8784e67ae9a7c91b5a685d | [
"Apache-2.0"
]
| permissive | niezhongliang/InsightFace-v3 | ac62cff7d4aeb957fac9189ccca26976f9a045e9 | e10cefec3bf0c465c92c42980ecbdb32eacc6dd5 | refs/heads/master | 2020-09-15T20:36:16.087481 | 2019-11-23T00:23:46 | 2019-11-23T00:23:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,130 | py | import logging
import os
import torch
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # sets device for model and PyTorch tensors
# Model parameters
image_w = 112
image_h = 112
channel = 3
emb_size = 512
# Training parameters
num_workers = 8 # for data-loading; right now, only 1 works with h5py
grad_clip = 5. # clip gradients at an absolute value of
print_freq = 100 # print training/validation stats every __ batches
checkpoint = None # path to checkpoint, None if none
# Data parameters
num_classes = 85164
num_samples = 3804846
DATA_DIR = 'data'
faces_ms1m_folder = 'data/faces_ms1m_112x112'
path_imgidx = os.path.join(faces_ms1m_folder, 'train.idx')
path_imgrec = os.path.join(faces_ms1m_folder, 'train.rec')
IMG_DIR = 'data/images'
pickle_file = 'data/faces_ms1m_112x112.pickle'
def get_logger():
logger = logging.getLogger()
handler = logging.StreamHandler()
formatter = logging.Formatter("%(asctime)s %(levelname)s \t%(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.INFO)
return logger
logger = get_logger()
| [
"[email protected]"
]
| |
f9460bdd828edd3892ba9506260ad360ad7bfbad | ef3a7391b0a5c5d8e276355e97cbe4de621d500c | /venv/Lib/site-packages/thinc/neural/train.py | 1a0492b1e6ef38288d5f82838d0e13063fc3efe1 | [
"MIT",
"Apache-2.0"
]
| permissive | countBMB/BenjiRepo | 143f6da5d198ea6f06404b4559e1f4528b71b3eb | 79d882263baaf2a11654ca67d2e5593074d36dfa | refs/heads/master | 2022-12-11T07:37:04.807143 | 2019-12-25T11:26:29 | 2019-12-25T11:26:29 | 230,090,428 | 1 | 1 | Apache-2.0 | 2022-12-08T03:21:09 | 2019-12-25T11:05:59 | Python | UTF-8 | Python | false | false | 1,862 | py | # coding: utf8
from __future__ import unicode_literals
import numpy.random
from tqdm import tqdm
from .optimizers import Adam, linear_decay
class Trainer(object):
def __init__(self, model, **cfg):
self.ops = model.ops
self.model = model
self.L2 = cfg.get("L2", 0.0)
self.optimizer = Adam(model.ops, 0.001, decay=0.0, eps=1e-8, L2=self.L2)
self.batch_size = cfg.get("batch_size", 128)
self.nb_epoch = cfg.get("nb_epoch", 20)
self.i = 0
self.dropout = cfg.get("dropout", 0.0)
self.dropout_decay = cfg.get("dropout_decay", 0.0)
self.each_epoch = []
def __enter__(self):
return self, self.optimizer
def __exit__(self, exc_type, exc_val, exc_tb):
self.model.use_params(self.optimizer.averages)
def iterate(self, train_X, train_y, progress_bar=True):
orig_dropout = self.dropout
for i in range(self.nb_epoch):
indices = numpy.arange(len(train_X))
numpy.random.shuffle(indices)
indices = self.ops.asarray(indices)
j = 0
with tqdm(total=indices.shape[0], leave=False) as pbar:
while j < indices.shape[0]:
slice_ = indices[j : j + self.batch_size]
X = _take_slice(train_X, slice_)
y = _take_slice(train_y, slice_)
yield X, y
self.dropout = linear_decay(orig_dropout, self.dropout_decay, j)
j += self.batch_size
if progress_bar:
pbar.update(self.batch_size)
for func in self.each_epoch:
func()
def _take_slice(data, slice_):
if isinstance(data, list) or isinstance(data, tuple):
return [data[int(i)] for i in slice_]
else:
return data[slice_]
| [
"[email protected]"
]
| |
f272ca7a76a6022c13d750e5b42a0e7394821aaf | 3e1ae16ca94c652b5a05a9c150d85d5e909186b7 | /edward2/tensorflow/layers/made.py | d5f4b7e92bd56f65897396ccc65cf8a83204397d | [
"Apache-2.0"
]
| permissive | google/edward2 | c2c75f2b7a72b96eb9cdef94756c47fc205e3110 | ccdb9bfb11fe713bc449f0e884b405f619f58059 | refs/heads/main | 2023-08-31T10:33:16.291114 | 2023-07-27T20:36:19 | 2023-07-27T20:37:25 | 201,102,332 | 710 | 96 | Apache-2.0 | 2023-09-11T20:41:55 | 2019-08-07T18:01:40 | Jupyter Notebook | UTF-8 | Python | false | false | 9,656 | py | # coding=utf-8
# Copyright 2023 The Edward2 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Masked autoencoder for distribution estimation."""
import numpy as np
import tensorflow as tf
class MADE(tf.keras.Model):
"""Masked autoencoder for distribution estimation (Germain et al., 2015).
MADE takes as input a real Tensor of shape [..., length, channels] and returns
a Tensor of shape [..., length, units] and same dtype. It masks layer weights
to satisfy autoregressive constraints with respect to the length dimension. In
particular, for a given ordering, each input dimension of length can be
reconstructed from previous dimensions.
The output's units dimension captures per-time-step representations. For
example, setting units to 2 can parameterize the location and log-scale of an
autoregressive Gaussian distribution.
"""
def __init__(self,
units,
hidden_dims,
input_order='left-to-right',
hidden_order='left-to-right',
activation=None,
use_bias=True,
**kwargs):
"""Constructs network.
Args:
units: Positive integer, dimensionality of the output space.
hidden_dims: list with the number of hidden units per layer. It does not
include the output layer; those number of units will always be set to
the input dimension multiplied by `num_heads`. Each hidden unit size
must be at least the size of length (otherwise autoregressivity is not
possible).
input_order: Order of degrees to the input units: 'random',
'left-to-right', 'right-to-left', or an array of an explicit order.
For example, 'left-to-right' builds an autoregressive model
p(x) = p(x1) p(x2 | x1) ... p(xD | x<D).
hidden_order: Order of degrees to the hidden units: 'random',
'left-to-right'. If 'left-to-right', hidden units are allocated equally
(up to a remainder term) to each degree.
activation: Activation function.
use_bias: Whether to use a bias.
**kwargs: Keyword arguments of parent class.
"""
super(MADE, self).__init__(**kwargs)
self.units = int(units)
self.hidden_dims = hidden_dims
self.input_order = input_order
self.hidden_order = hidden_order
self.activation = tf.keras.activations.get(activation)
self.use_bias = use_bias
self.network = tf.keras.Sequential([])
def build(self, input_shape):
input_shape = tf.TensorShape(input_shape)
length = input_shape[-2]
channels = input_shape[-1]
if length is None or channels is None:
raise ValueError('The two last dimensions of the inputs to '
'`MADE` should be defined. Found `None`.')
masks = create_masks(input_dim=length,
hidden_dims=self.hidden_dims,
input_order=self.input_order,
hidden_order=self.hidden_order)
# Input-to-hidden layer: [..., length, channels] -> [..., hidden_dims[0]].
self.network.add(tf.keras.layers.Reshape([length * channels]))
# Tile the mask so each element repeats contiguously; this is compatible
# with the autoregressive contraints unlike naive tiling.
mask = masks[0]
mask = tf.tile(mask[:, tf.newaxis, :], [1, channels, 1])
mask = tf.reshape(mask, [mask.shape[0] * channels, mask.shape[-1]])
if self.hidden_dims:
layer = tf.keras.layers.Dense(
self.hidden_dims[0],
kernel_initializer=make_masked_initializer(mask),
kernel_constraint=make_masked_constraint(mask),
activation=self.activation,
use_bias=self.use_bias)
self.network.add(layer)
# Hidden-to-hidden layers: [..., hidden_dims[l-1]] -> [..., hidden_dims[l]].
for l in range(1, len(self.hidden_dims)):
layer = tf.keras.layers.Dense(
self.hidden_dims[l],
kernel_initializer=make_masked_initializer(masks[l]),
kernel_constraint=make_masked_constraint(masks[l]),
activation=self.activation,
use_bias=self.use_bias)
self.network.add(layer)
# Hidden-to-output layer: [..., hidden_dims[-1]] -> [..., length, units].
# Tile the mask so each element repeats contiguously; this is compatible
# with the autoregressive contraints unlike naive tiling.
if self.hidden_dims:
mask = masks[-1]
mask = tf.tile(mask[..., tf.newaxis], [1, 1, self.units])
mask = tf.reshape(mask, [mask.shape[0], mask.shape[1] * self.units])
layer = tf.keras.layers.Dense(
length * self.units,
kernel_initializer=make_masked_initializer(mask),
kernel_constraint=make_masked_constraint(mask),
activation=None,
use_bias=self.use_bias)
self.network.add(layer)
self.network.add(tf.keras.layers.Reshape([length, self.units]))
self.built = True
def call(self, inputs): # pytype: disable=signature-mismatch # overriding-parameter-count-checks
return self.network(inputs)
def create_degrees(input_dim,
hidden_dims,
input_order='left-to-right',
hidden_order='left-to-right'):
"""Returns a list of degree vectors, one for each input and hidden layer.
A unit with degree d can only receive input from units with degree < d. Output
units always have the same degree as their associated input unit.
Args:
input_dim: Number of inputs.
hidden_dims: list with the number of hidden units per layer. It does not
include the output layer. Each hidden unit size must be at least the size
of length (otherwise autoregressivity is not possible).
input_order: Order of degrees to the input units: 'random', 'left-to-right',
'right-to-left', or an array of an explicit order. For example,
'left-to-right' builds an autoregressive model
p(x) = p(x1) p(x2 | x1) ... p(xD | x<D).
hidden_order: Order of degrees to the hidden units: 'random',
'left-to-right'. If 'left-to-right', hidden units are allocated equally
(up to a remainder term) to each degree.
"""
if (isinstance(input_order, str) and
input_order not in ('random', 'left-to-right', 'right-to-left')):
raise ValueError('Input order is not valid.')
if hidden_order not in ('random', 'left-to-right'):
raise ValueError('Hidden order is not valid.')
degrees = []
if isinstance(input_order, str):
input_degrees = np.arange(1, input_dim + 1)
if input_order == 'right-to-left':
input_degrees = np.flip(input_degrees, 0)
elif input_order == 'random':
np.random.shuffle(input_degrees)
else:
input_order = np.array(input_order)
if np.all(np.sort(input_order) != np.arange(1, input_dim + 1)):
raise ValueError('invalid input order')
input_degrees = input_order
degrees.append(input_degrees)
for units in hidden_dims:
if hidden_order == 'random':
min_prev_degree = min(np.min(degrees[-1]), input_dim - 1)
hidden_degrees = np.random.randint(
low=min_prev_degree, high=input_dim, size=units)
elif hidden_order == 'left-to-right':
hidden_degrees = (np.arange(units) % max(1, input_dim - 1) +
min(1, input_dim - 1))
degrees.append(hidden_degrees)
return degrees
def create_masks(input_dim,
hidden_dims,
input_order='left-to-right',
hidden_order='left-to-right'):
"""Returns a list of binary mask matrices respecting autoregressive ordering.
Args:
input_dim: Number of inputs.
hidden_dims: list with the number of hidden units per layer. It does not
include the output layer; those number of units will always be set to
input_dim downstream. Each hidden unit size must be at least the size of
length (otherwise autoregressivity is not possible).
input_order: Order of degrees to the input units: 'random', 'left-to-right',
'right-to-left', or an array of an explicit order. For example,
'left-to-right' builds an autoregressive model
p(x) = p(x1) p(x2 | x1) ... p(xD | x<D).
hidden_order: Order of degrees to the hidden units: 'random',
'left-to-right'. If 'left-to-right', hidden units are allocated equally
(up to a remainder term) to each degree.
"""
degrees = create_degrees(input_dim, hidden_dims, input_order, hidden_order)
masks = []
# Create input-to-hidden and hidden-to-hidden masks.
for input_degrees, output_degrees in zip(degrees[:-1], degrees[1:]):
mask = tf.cast(input_degrees[:, np.newaxis] <= output_degrees, tf.float32)
masks.append(mask)
# Create hidden-to-output mask.
mask = tf.cast(degrees[-1][:, np.newaxis] < degrees[0], tf.float32)
masks.append(mask)
return masks
def make_masked_initializer(mask):
initializer = tf.keras.initializers.GlorotUniform()
def masked_initializer(shape, dtype=None):
return mask * initializer(shape, dtype)
return masked_initializer
def make_masked_constraint(mask):
constraint = tf.identity
def masked_constraint(x):
return mask * constraint(x)
return masked_constraint
| [
"[email protected]"
]
| |
85a20a5685d762ddec4eeda36978c63036c74206 | 6a01a9287a4c23c7f11b7c5399cfb96bbe42eba8 | /python/scripts/make_id_table_with_diff_expr.py | 314e25e631430921796b32ad7d8d52c104d61aff | [
"MIT"
]
| permissive | xguse/gmm-to-gff-transcripts-vs-snps | 3c25bf2752aee76174d5dab92060fe7269caf99f | 75337135ab8ff6d840af3cfccfe6404a06777a54 | refs/heads/master | 2021-01-19T01:50:33.473897 | 2016-08-02T20:31:18 | 2016-08-02T20:31:18 | 54,731,430 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,455 | py | """Describe here what this rule accomplishes."""
import pandas as pd
import numpy as np
# Settings
edger_results_labels = snakemake.params.edger_results_labels
cufflinks_results_labels = snakemake.params.cufflinks_results_labels
# input
edger_results = snakemake.input.edger_results
cufflinks_results = snakemake.input.cufflinks_results
ids_no_diff_expr = snakemake.input.ids_no_diff_expr
#output
ids_with_diff_expr = snakemake.output.ids_with_diff_expr
def load_and_filter_diff_expr_data(path,ids,comparison,program,fdr_thresh):
"""Return new dataframe that has standardized and filtered the DE input tables.
`path` (str):
location of input file
`ids` (dataframe):
with the following columns
- tcons_id
- xloc_id
- gene_id_external
- gene_id_internal
`comparison` (str):
describe the RNA-seq analysis run ('midgut', 'salivary gland', etc)
`program` (str):
one of ['edger', 'cufflinks']
`fdr_thresh` (float):
defining multiple testing significance threshold above which DE tests should NOT be reported
"""
column_conversions = {'edger': {'Gene_Name': 'gene_id_external',
'Gene_ID': 'xloc_id',
'logFC': 'lg2_fc',
'PValue': 'p',
'FDR': 'fdr'},
'cufflinks': {'gene': 'gene_id_external',
'gene_id': 'xloc_id',
'log2.fold_change.': 'lg2_fc',
'p_value': 'p',
'q_value': 'fdr'},
}
keep_columns = ["de_id", "xloc_id", "tcons_id","gene_id_external","gene_id_internal","lg2_fc","p","fdr","comparison","program"]
de_id_program_map = {'edger': 'EDGR',
'cufflinks': 'CUFF',
}
# Load
df = pd.read_csv(path, sep='\t')
# Convert Columns
df = df.rename(columns=column_conversions[program])
# Make missing fdr values NaN
df['fdr'] = df.fdr.apply(lambda i: np.nan if i == '-' else i)
# Filter for fdr
df = df.query(""" fdr <= 0.05 """).copy()
# Add Columns
df['program'] = program
df['comparison'] = comparison
df['de_id'] = generate_de_ids(df=df,
de_type=de_id_program_map[program],
type_mod='|{comparison}'.format(comparison=comparison),
nlen=7)
# Join external and internal IDS
df = pd.merge(left=df, right=ids_no_diff_expr,
how='left',
on=None, left_on=None, right_on=None,
left_index=False, right_index=False,
sort=False, suffixes=('_x', '_y'), copy=True, indicator=False).fillna('-')
# Retain only needed columns
df = df[keep_columns]
# Return dataframe
return df.copy()
def generate_de_ids(df,de_type,type_mod='',nlen=7):
"""Generate unique tracking IDs for each statistical test of diff expr."""
template = '{de}{mod}_{{0:0{nlen}d}}'.format(de=de_type, mod=type_mod, nlen=nlen)
return [template.format(n) for n in range(1,len(df)+1)]
ids_no_diff_expr = pd.read_csv(ids_no_diff_expr)
table_list = []
# Load EDGER DE reults
for name, path in zip(edger_results_labels, edger_results):
df = load_and_filter_diff_expr_data(path=path,
ids=ids_no_diff_expr,
comparison=name,
program='edger',
fdr_thresh=0.05)
table_list.append(df)
# Load CUFFLINKS DE reults
for name, path in zip(cufflinks_results_labels, cufflinks_results):
df = load_and_filter_diff_expr_data(path=path,
ids=ids_no_diff_expr,
comparison=name,
program='cufflinks',
fdr_thresh=0.05)
table_list.append(df)
# Concat all result files into single dataframe
combined = pd.concat(objs=table_list, axis=0)
# Write out the resulting dataframe
combined.to_csv(path_or_buf=ids_with_diff_expr,
sep=',',
header=True, index=False,)
| [
"[email protected]"
]
| |
f258f81afafb2186624f0028d7416f7aca37869d | 3114430ce15c18281117459e26eea4b774e3998a | /day4/accounts/models.py | 1fd9d1bf8a13f354846f792bd07b42ea810b5486 | [
"MIT"
]
| permissive | Joseamica/Easily-written-Django | c02e7333e84ca2257b7b8bfae3f6732898c5000a | 0b746638751702c453db9490fe29ef6d34e4a3bc | refs/heads/master | 2021-05-27T20:25:41.341149 | 2014-05-25T08:25:53 | 2014-05-25T08:25:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 445 | py | from django.db import models
from django.contrib.auth.models import User
import hashlib
# Create your models here.
class Account(models.Model):
user = models.OneToOneField(User)
def gravatar_url(self):
return "http://www.gravatar.com/avatar/%s?s=50" % hashlib.md5(self.user.email).hexdigest()
def __unicode__(self):
return self.user
User.profile = property(lambda u: UserProfile.objects.get_or_create(user=u)[0])
| [
"[email protected]"
]
| |
538fc3e6a7b554c75a45025f802bf9fb341dae19 | d6e287bbba11be4906e599d1362c9ef89c4fb9de | /modules/utils/datasets/__init__.py | 53671ef9604559f6da0848293411281007d9f83b | [
"MIT"
]
| permissive | bityangke/WSDDN.pytorch-1 | 67d52f158238f2d5b234ddefeb7f05f06bf6b123 | 9a67323c80566cacc762c68021824aa80a82c524 | refs/heads/master | 2022-11-15T00:22:44.903418 | 2020-07-06T13:43:22 | 2020-07-06T13:43:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 201 | py | '''import all'''
from .Sampler import GroupSampler
from .VOCDataset import VOCDataset
from .Builder import buildDataloader
'''define alll'''
__all__ = ['GroupSampler', 'VOCDataset', 'buildDataloader'] | [
"[email protected]"
]
| |
0a390ae66c096ec7b6e7b0aff70e9f8e2f83aec5 | 68d38b305b81e0216fa9f6769fe47e34784c77f2 | /alascrapy/spiders/tomsguide_fr.py | 512927ada4008f9bf18b66205719a64e70329068 | []
| no_license | ADJet1437/ScrapyProject | 2a6ed472c7c331e31eaecff26f9b38b283ffe9c2 | db52844411f6dac1e8bd113cc32a814bd2ea3632 | refs/heads/master | 2022-11-10T05:02:54.871344 | 2020-02-06T08:01:17 | 2020-02-06T08:01:17 | 237,448,562 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,269 | py | # -*- coding: utf8 -*-
from datetime import datetime
import re
from scrapy.http import Request, HtmlResponse
from scrapy.selector import Selector
from alascrapy.spiders.base_spiders.ala_spider import AlaSpider
from alascrapy.spiders.base_spiders.bazaarvoice_spider import BVNoSeleniumSpider
from alascrapy.lib.generic import get_full_url, date_format
import alascrapy.lib.dao.incremental_scraping as incremental_utils
from alascrapy.items import CategoryItem, ProductItem, ReviewItem, ProductIdItem
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from alascrapy.lib.selenium_browser import SeleniumBrowser
class Tomsguide_frSpider(AlaSpider):
name = 'tomsguide_fr'
allowed_domains = ['tomsguide.fr']
start_urls = ['http://www.tomsguide.fr/articles/tests/']
def parse(self, response):
original_url = response.url
product = response.meta.get("product", {})
review = response.meta.get("review", {})
url_xpath = "(//ul[@class='pager']//li)[last()]/a/@href"
single_url = self.extract(response.xpath(url_xpath))
if single_url:
matches = None
if "":
matches = re.search("", single_url, re.IGNORECASE)
if matches:
single_url = matches.group(0)
else:
return
single_url = get_full_url(original_url, single_url)
print single_url,'='*30
request = Request(single_url, callback=self.parse)
try:
request.meta["product"] = product
except:
pass
try:
request.meta["review"] = review
except:
pass
yield request
urls_xpath = "//ul[@class='listing-items']/li/div/div[1]/a/@href"
params_regex = {}
urls = self.extract_list(response.xpath(urls_xpath))
for single_url in urls:
matches = None
if "":
matches = re.search("", single_url, re.IGNORECASE)
if matches:
single_url = matches.group(0)
else:
continue
single_url = get_full_url(original_url, single_url)
request = Request(single_url, callback=self.level_2)
try:
request.meta["product"] = product
except:
pass
try:
request.meta["review"] = review
except:
pass
yield request
def level_2(self, response):
original_url = response.url
product = response.meta.get("product", {})
review = response.meta.get("review", {})
category_leaf_xpath = "(//ul[@class='breadcrumb']/li//text())[last()-1]"
category_path_xpath = "//ul[@class='breadcrumb']/li//text()"
category = CategoryItem()
category['category_url'] = original_url
category['category_leaf'] = self.extract(response.xpath(category_leaf_xpath))
category['category_path'] = self.extract_all(response.xpath(category_path_xpath), ' | ')
if self.should_skip_category(category):
return
yield category
product_xpaths = {
"source_internal_id": "//link[@rel='canonical']/@href",
"ProductName":"//span[@class='sbs-header-title']//text()",
"OriginalCategoryName":"//ul[@class='breadcrumb']/li//text()",
}
product = self.init_item_by_xpaths(response, "product", product_xpaths)
product['TestUrl'] = original_url
picurl = product.get("PicURL", "")
if picurl and picurl[:2] == "//":
product["PicURL"] = "https:" + product["PicURL"]
if picurl and picurl[:1] == "/":
product["PicURL"] = get_full_url(original_url, picurl)
manuf = product.get("ProductManufacturer", "")
if manuf == "" and ""[:2] != "//":
product["ProductManufacturer"] = ""
try:
product["OriginalCategoryName"] = category['category_path']
except:
pass
ocn = product.get("OriginalCategoryName", "")
if ocn == "" and "//ul[@class='breadcrumb']/li//text()"[:2] != "//":
product["OriginalCategoryName"] = "//ul[@class='breadcrumb']/li//text()"
review_xpaths = {
"source_internal_id": "//link[@rel='canonical']/@href",
"ProductName":"//span[@class='sbs-header-title']//text()",
"SourceTestRating":"//div[@class='p-u-1-3 inner10 review-bar-rating']//text()",
"TestDateText":"//div[@class='author nolinks']//time[@itemprop='datePublished']//text()",
"TestPros":"(//div[@class='sbs-advice-title'])[1]/following-sibling::ul/li//text()",
"TestCons":"(//div[@class='sbs-advice-title'])[2]/following-sibling::ul/li//text()",
"TestVerdict":"//span[@class='sbc-advice-text']//text()",
"Author":"//div[@class='author nolinks']//span[@itemprop='author']//text()",
"TestTitle":"//h1[@itemprop='headline']//text()",
}
review = self.init_item_by_xpaths(response, "review", review_xpaths)
review['TestUrl'] = original_url
try:
review['ProductName'] = product['ProductName']
review['source_internal_id'] = product['source_internal_id']
except:
pass
awpic_link = review.get("AwardPic", "")
if awpic_link and awpic_link[:2] == "//":
review["AwardPic"] = "https:" + review["AwardPic"]
if awpic_link and awpic_link[:1] == "/":
review["AwardPic"] = get_full_url(original_url, awpic_link)
matches = None
field_value = product.get("source_internal_id", "")
if field_value:
matches = re.search("(\d+)(?=\.html)", field_value, re.IGNORECASE)
if matches:
product["source_internal_id"] = matches.group(1)
matches = None
field_value = review.get("source_internal_id", "")
if field_value:
matches = re.search("(\d+)(?=\.html)", field_value, re.IGNORECASE)
if matches:
review["source_internal_id"] = matches.group(1)
matches = None
field_value = review.get("SourceTestRating", "")
if field_value:
matches = re.search("(\d+)(?=\/)", field_value, re.IGNORECASE)
if matches:
review["SourceTestRating"] = matches.group(1)
matches = None
field_value = review.get("TestDateText", "")
if field_value:
matches = re.search("(.*)(?=\d{2}:)", field_value, re.IGNORECASE)
if matches:
review["TestDateText"] = matches.group(1)
if review["TestDateText"]:
review["TestDateText"] = review["TestDateText"].lower().replace('.'.lower(), "")
review["TestDateText"] = review["TestDateText"].strip()
review["TestDateText"] = date_format(review["TestDateText"], "%d %B %Y", ["fr"])
review["SourceTestScale"] = "10"
review["DBaseCategoryName"] = "PRO"
yield product
yield review
| [
"[email protected]"
]
| |
826f60594002015e659cc80aca283bfe601d0b98 | 0c958692bb3abf99ecbd03bd75a605b202d4da5a | /CRAB/MuNu/synch/2014ocbr24/synchThree.py | 0bc6084ab362fd99b029e74554cc6bfc9b96b5f1 | []
| no_license | tmrhombus/UWAnalysis | a9ed18a7ba8726522c8d98fbdc018c77d80c5cc5 | eb9e0794e1b847f36c660a55d3631176a39148e2 | refs/heads/master | 2021-01-23T20:46:41.578341 | 2017-05-01T08:26:57 | 2017-05-01T08:26:57 | 10,620,824 | 0 | 0 | null | 2014-10-21T11:21:16 | 2013-06-11T12:19:43 | Python | UTF-8 | Python | false | false | 4,427 | py | cut = 'C1_data_2014ocbr23_m12e10_smrGenNu_clnMu'
#cut = 'C3_tt_2014ocbr23_m12e10_smrGenNu_clnMu'
andreas_events = set([line.strip() for line in open('./comp/%s_and.txt'%(cut))])
jelenas_events = set([line.strip() for line in open('./comp/%s_jel.txt'%(cut))])
toms_events = set([line.strip() for line in open('./comp/%s_tom.txt'%(cut))])
just_andrea = []
just_jelena = []
just_tom = []
t_noJ = []
t_noA = []
j_noT = []
j_noA = []
a_noT = []
a_noJ = []
t_j = []
a_t = []
j_a = []
t_j_a = []
#runover = set([line.strip() for line in open('./comp/badevents.txt')])
runover = set([])
for toms_event in toms_events:
tj = False
ta = False
for jelenas_event in jelenas_events:
if long(toms_event) == long(jelenas_event):
tj = True
break
for andreas_event in andreas_events:
if long(toms_event) == long(andreas_event):
ta = True
break
if tj == False and ta == False:
just_tom.append(toms_event)
runover.add(toms_event)
if tj == False:
t_noJ.append(toms_event)
runover.add(toms_event)
if ta == False:
t_noA.append(toms_event)
runover.add(toms_event)
if tj == True and ta == True: t_j_a.append(toms_event)
if tj == True: t_j.append(toms_event)
if ta == True: a_t.append(toms_event)
for andreas_event in andreas_events:
at = False
aj = False
for toms_event in toms_events:
if long(andreas_event) == long(toms_event):
at = True
break
for jelenas_event in jelenas_events:
if long(andreas_event) == long(jelenas_event):
aj = True
break
if at == False and aj == False:
just_andrea.append(andreas_event)
runover.add(andreas_event)
if at == False:
a_noT.append(andreas_event)
runover.add(andreas_event)
if aj == False:
a_noJ.append(andreas_event)
runover.add(andreas_event)
if aj == True: j_a.append(andreas_event)
for jelenas_event in jelenas_events:
ja = False
jt = False
for andreas_event in andreas_events:
if long(andreas_event) == long(jelenas_event):
ja = True
break
for toms_event in toms_events:
if long(toms_event) == long(jelenas_event):
jt = True
if ja == False and jt == False:
just_jelena.append(jelenas_event)
runover.add(jelenas_event)
if ja == False:
j_noA.append(jelenas_event)
runover.add(jelenas_event)
if jt == False:
j_noT.append(jelenas_event)
runover.add(jelenas_event)
print( "http://www.hep.wisc.edu/~tperry/wbb/synch/2014ocbr24/%s_comp.log"%(cut))
log = open('/afs/hep.wisc.edu/home/tperry/www/wbb/synch/2014ocbr24/%s_comp.log'%(cut),'w')
log.write("Andreas Events: %s\n"%len(andreas_events))
log.write("Jelenas Events: %s\n"%len(jelenas_events))
log.write("Toms Events: %s\n"%len(toms_events ))
log.write("All Three: %s\n\n"%len(t_j_a))
log.write(" Tom Has, Jelena Doesn't (%s)\n"%len(t_noJ))
for e in t_noJ: log.write(" "+e)
log.write("\n\n")
log.write(" Jelena Has, Tom Doesn't (%s)\n"%len(j_noT))
for e in j_noT: log.write(" "+e)
log.write("\n\n")
log.write("====================================================================\n\n")
log.write(" Tom Has, Andrea Doesn't (%s)\n"%len(t_noA))
for e in t_noA: log.write(" "+e)
log.write("\n\n")
log.write(" Andrea Has, Tom Doesn't (%s)\n"%len(a_noT))
for e in a_noT: log.write(" "+e)
log.write("\n\n")
log.write("====================================================================\n\n")
log.write(" Jelena Has, Andrea Doesn't (%s)\n"%len(j_noA))
for e in j_noA: log.write(" "+e)
log.write("\n\n")
log.write(" Andrea Has, Jelena Doesn't (%s)\n"%len(a_noJ))
for e in a_noJ: log.write(" "+e)
log.write("\n\n")
log.write("We All Have %s\n"%len(t_j_a))
for e in t_j_a: log.write(e+" ")
log.write("\n\n")
log.write("Tom Has %s\n"%len(toms_events))
for e in toms_events: log.write(e+" ")
log.write("\n\n")
log.write("Jelena Has %s\n"%len(jelenas_events))
for e in jelenas_events: log.write(e+" ")
log.write("\n\n")
log.write("Andreas Has %s\n"%len(andreas_events))
for e in andreas_events: log.write(e+" ")
log.write("\n\n")
log.write("Run Over\n")
log.write("eventsToProcess = cms.untracked.VEventRange(")
bades = []
for e in set(runover): bades.append("'1:%s'"%e)
badlist = ",".join(bades)
log.write("%s)"%(badlist))
log.write("\n\n")
log.write("eventsToProcess = cms.untracked.VEventRange(")
badet = []
for e in set(runover): badet.append("'1:%s-1:%s'"%(e,e))
badliss = ",".join(badet)
log.write("%s)"%(badliss))
log.write("\n\n")
#lob = open('./comp/badevents.txt','a')
#for e in set(runover): lob.write("%s\n"%(e))
| [
"[email protected]"
]
| |
6d8ce22c751efd861956be268dafc8c2f00f3fbd | c0acf82a18b8e90cd38afedb02e45e53425a067e | /pyecharts/custom/overlap.py | e2cdd57622347e115a2fe03fcdc86c1ef34f05fd | [
"MIT"
]
| permissive | caideyang/pyecharts | 66b61d0400ea15b25ef7fb90f7305647343eea3a | c13f2fecece566359b2c881705bf96337c42ce40 | refs/heads/master | 2021-01-22T13:48:00.474761 | 2017-08-18T07:09:53 | 2017-08-18T07:09:53 | 100,685,801 | 1 | 0 | null | 2017-08-18T07:31:13 | 2017-08-18T07:31:13 | null | UTF-8 | Python | false | false | 1,609 | py | #!/usr/bin/env python
# coding=utf-8
class Overlap(object):
def __init__(self):
self._chart = None
def add(self, chart):
"""
:param chart:
chart instance
:return:
"""
if self._chart is None:
self._chart = chart
else:
self.__custom(self.__get_series(chart))
def __get_series(self, chart):
""" Get chart series data
:param chart:
chart instance
:return:
"""
return (
chart._option.get('legend')[0].get('data'),
chart._option.get('series'),
)
def __custom(self, series):
""" Appends the data for the series of the chart type
:param series:
series data
"""
_name, _series = series
for n in _name:
self._chart._option.get('legend')[0].get('data').append(n)
for s in _series:
self._chart._option.get('series').append(s)
def render(self, path="render.html"):
"""
:param path:
:return:
"""
self._chart.render(path)
def render_embed(self):
"""
:return:
"""
return self._chart.render_embed()
def show_config(self):
"""
:return:
"""
import pprint
return pprint.pprint(self._chart._option)
@property
def chart(self):
"""
:return:
"""
return self._chart
def _repr_html_(self):
"""
:return:
"""
return self._chart._repr_html_()
| [
"[email protected]"
]
| |
003d9d838b7372a3284b30915aec63707830d821 | ef20884169d10ec9ac4d1d3b77ee35245d248294 | /practice/deep-learning-from-scratch-2/np_random_choice.py | 9360dbb41512575e33d9d1d800f8a11e55fdeec2 | []
| no_license | heaven324/Deeplearning | 64016671879cdf1742eff6f374cfb640cfc708ae | a7a8d590fa13f53348f83f8c808538affbc7b3e8 | refs/heads/master | 2023-05-05T08:54:27.888155 | 2021-05-22T08:25:47 | 2021-05-22T08:25:47 | 188,010,607 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 727 | py | import numpy as np
print(np.random.choice(10)) # 5
print(np.random.choice(10)) # 9
# words에서 하나만 무작위로 샘플링
words = ['you', 'say', 'goodbye', 'I', 'hello', '.']
print(np.random.choice(words))
# 5개만 무작위로 샘플링(중복 있음)
print(np.random.choice(words, size = 5))
# 5개만 무작위로 샘플링(중복 없음)
print(np.random.choice(words, size = 5, replace = False))
# 확률분포에 따라 샘플링
p = [0.5, 0.1, 0.05, 0.2, 0.05, 0.1]
print(np.random.choice(words, p = p))
# 0.75제곱을 하는 이유( 빈도가 낮은 단어의 확률을 살짝 높이기 위해서)
p = [0.7, 0.29, 0.01]
new_p = np.power(p, 0.75)
print(new_p)
new_p /= np.sum(new_p)
print(new_p)
| [
"[email protected]"
]
| |
778459e47142827e3629b6af6b3dbfc2ccc5d25e | ce990be34e8759efb96b890d9676da313fd2d9b4 | /tests/python/contrib/test_ethosu/cascader/test_plan.py | ddc40b49ac8a8de119af6b9b19d208ef745f4899 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"Unlicense",
"Zlib",
"MIT",
"Apache-2.0",
"BSD-2-Clause"
]
| permissive | tmoreau89/tvm | 291c0b1beb13503e18b1e45f135aaf334660b68d | 8136173a631bf6c7274d26285349225fcf6e495f | refs/heads/master | 2022-11-23T08:36:24.853648 | 2022-11-21T07:36:57 | 2022-11-21T07:36:57 | 119,757,672 | 5 | 1 | Apache-2.0 | 2019-03-22T23:06:53 | 2018-01-31T23:41:33 | Python | UTF-8 | Python | false | false | 7,708 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm.contrib.ethosu.cascader as cs
import pytest
def test_plan(DRAM, SRAM):
subgraph = cs.TESubgraph([], None)
part = cs.InlinePart(
subgraph,
[
cs.Propagator(
[[1, 0, 0], [0, 1, 0], [0, 0, 1]],
[0, 0],
),
],
)
tensor_1 = cs.Tensor([10, 10], "uint8")
tensor_2 = cs.Tensor([10, 10], "uint8")
part.set_input(0, tensor_1)
part.set_output(tensor_2)
tensor_1.add_consumer(part)
tensor_2.add_producer(part)
output_stripe_config = cs.StripeConfig(
shape=[5, 5],
extent=[10, 10],
strides=[5, 5],
order=[1, 2],
stripes=[2, 2],
offset=[0, 0],
)
tensor_config_out = cs.TensorConfig(
tensor=tensor_2,
home_region=DRAM,
state=cs.TensorConfigState.BOUNDARY,
buffer_mode=cs.BufferMode.RECOMPUTE,
stripe_configs=[output_stripe_config],
copy_tensor=False,
)
input_stripe_config = part.calculate_input_stripe_configs(output_stripe_config)[0]
tensor_config_in = cs.TensorConfig(
tensor=tensor_1,
home_region=DRAM,
state=cs.TensorConfigState.INTERIOR,
buffer_mode=cs.BufferMode.ROLLING,
stripe_configs=[input_stripe_config],
copy_tensor=False,
)
tensor_configs = {tensor_1: tensor_config_in, tensor_2: tensor_config_out}
open_configs = frozenset([tensor_config_in])
part_group = frozenset([part])
interior_region = SRAM
memory_usage = 100
cycles = 20
plan = cs.Plan(
tensor_configs=tensor_configs,
open_configs=open_configs,
output_config=tensor_config_out,
part_group=part_group,
interior_region=interior_region,
memory_usage=memory_usage,
cycles=cycles,
)
assert plan.tensor_configs == tensor_configs
assert plan.open_configs == open_configs
assert plan.output_config == tensor_config_out
assert plan.part_group == part_group
assert plan.interior_region == interior_region
assert plan.memory_usage == memory_usage
assert plan.cycles == cycles
def test_plan_merge(DRAM, SRAM):
subgraph = cs.TESubgraph([], None)
part_1 = cs.InlinePart(
subgraph,
[
cs.Propagator(
[[2, 0, 0], [0, 2, 0], [0, 0, 1]],
[0, 0],
),
],
)
part_2 = cs.InlinePart(
subgraph,
[
cs.Propagator(
[[1, 0, 0], [0, 1, 0], [0, 0, 1]],
[0, 0],
),
cs.Propagator(
[[0, 0, 6], [0, 0, 6], [0, 0, 1]],
[0, 0],
),
cs.Propagator(
[[1, 0], [0, 1]],
[0],
),
],
)
tensor_1 = cs.Tensor([20, 20], "uint8")
tensor_2 = cs.Tensor([10, 10], "uint8")
tensor_3 = cs.Tensor([6, 6], "uint8")
tensor_4 = cs.Tensor([10], "uint8")
tensor_5 = cs.Tensor([10, 10], "uint8")
part_1.set_input(0, tensor_1)
part_1.set_output(tensor_2)
tensor_1.add_consumer(part_1)
tensor_2.add_producer(part_1)
part_2.set_input(0, tensor_2)
part_2.set_input(1, tensor_3)
part_2.set_input(2, tensor_4)
part_2.set_output(tensor_5)
tensor_2.add_consumer(part_2)
tensor_3.add_consumer(part_2)
tensor_4.add_consumer(part_2)
tensor_5.add_producer(part_2)
output_stripe_config = cs.StripeConfig(
shape=[5, 5],
extent=[10, 10],
strides=[5, 5],
order=[1, 2],
stripes=[2, 2],
offset=[0, 0],
)
tensor_config_5 = cs.TensorConfig(
tensor=tensor_5,
home_region=DRAM,
state=cs.TensorConfigState.BOUNDARY,
buffer_mode=cs.BufferMode.RECOMPUTE,
stripe_configs=[output_stripe_config],
copy_tensor=False,
)
input_stripe_configs = part_2.calculate_input_stripe_configs(output_stripe_config)
tensor_config_4 = cs.TensorConfig(
tensor=tensor_4,
home_region=DRAM,
state=cs.TensorConfigState.BOUNDARY,
buffer_mode=cs.BufferMode.RECOMPUTE,
stripe_configs=[input_stripe_configs[2]],
copy_tensor=False,
)
tensor_config_3 = cs.TensorConfig(
tensor=tensor_3,
home_region=SRAM,
state=cs.TensorConfigState.INTERIOR,
buffer_mode=cs.BufferMode.RECOMPUTE,
stripe_configs=[input_stripe_configs[1]],
copy_tensor=False,
)
tensor_config_2 = cs.TensorConfig(
tensor=tensor_2,
home_region=SRAM,
state=cs.TensorConfigState.INTERIOR,
buffer_mode=cs.BufferMode.ROLLING,
stripe_configs=[input_stripe_configs[0]],
copy_tensor=False,
)
input_stripe_config = part_1.calculate_input_stripe_configs(input_stripe_configs[0])[0]
tensor_config_1 = cs.TensorConfig(
tensor=tensor_1,
home_region=DRAM,
state=cs.TensorConfigState.BOUNDARY,
buffer_mode=cs.BufferMode.ROLLING,
stripe_configs=[input_stripe_config],
copy_tensor=False,
)
tensor_configs = {tensor_1: tensor_config_1, tensor_2: tensor_config_2}
open_configs = frozenset([tensor_config_2])
part_group = frozenset([part_1])
interior_region = SRAM
memory_usage = 100
cycles = 20
plan_1 = cs.Plan(
tensor_configs=tensor_configs,
open_configs=open_configs,
output_config=tensor_config_2,
part_group=part_group,
interior_region=interior_region,
memory_usage=memory_usage,
cycles=cycles,
)
tensor_configs = {
tensor_2: tensor_config_2,
tensor_3: tensor_config_3,
tensor_4: tensor_config_4,
tensor_5: tensor_config_5,
}
open_configs = frozenset([tensor_config_2, tensor_config_3])
part_group = frozenset([part_2])
interior_region = SRAM
memory_usage = 200
cycles = 30
plan_2 = cs.Plan(
tensor_configs=tensor_configs,
open_configs=open_configs,
output_config=tensor_config_5,
part_group=part_group,
interior_region=interior_region,
memory_usage=memory_usage,
cycles=cycles,
)
merged_plan = plan_1.merge(plan_2)
assert merged_plan.tensor_configs == {
tensor_1: tensor_config_1,
tensor_2: tensor_config_2,
tensor_3: tensor_config_3,
tensor_4: tensor_config_4,
tensor_5: tensor_config_5,
}
assert merged_plan.open_configs == frozenset([tensor_config_3])
assert merged_plan.output_config == tensor_config_5
assert merged_plan.part_group == frozenset([part_1, part_2])
assert merged_plan.interior_region == interior_region
assert merged_plan.memory_usage == plan_1.memory_usage + plan_2.memory_usage
assert merged_plan.cycles == plan_1.cycles + plan_2.cycles
if __name__ == "__main__":
pytest.main([__file__])
| [
"[email protected]"
]
| |
b57aa04bb1157d20423de65671bee218d8715f6d | 730b92e439dbb013950b8bbf417cfde1bb40f8b9 | /Python/Add-Binary.py | 8b8be13ae529418ef8672901ffeb760e078c1eb4 | []
| no_license | yuede/Lintcode | fdbca5984c2860c8b532b5f4d99bce400b0b26d0 | d40b7ca1c03af7005cc78b26b877a769ca0ab723 | refs/heads/master | 2021-01-13T04:14:32.754210 | 2015-08-22T13:15:54 | 2015-08-22T13:15:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 833 | py | class Solution:
# @param {string} a a number
# @param {string} b a number
# @return {string} the result
def addBinary(self, a, b):
# Write your code here
pa = len(a) - 1
pb = len(b) - 1
s = ""
d = 0
while pa >= 0 and pb >= 0:
cur = d + int(a[pa]) + int(b[pb])
pa -= 1
pb -= 1
s += str(cur % 2)
d = cur / 2
while pa >= 0:
cur = d + int(a[pa])
pa -= 1
s += str(cur % 2)
d = cur / 2
while pb >= 0:
cur = d + int(b[pb])
pb -= 1
s += str(cur % 2)
d = cur / 2
if d > 0:
s += str(d)
rs = ""
for i in range(len(s)):
rs += s[len(s) - 1 - i]
return rs | [
"[email protected]"
]
| |
97a5797d6b970d29dbea2c4c90e09131f13ca91c | e5efada3529d94875455c4230c8dabe27fb72a89 | /apps/search/migrations/0015_advancedsearchpage_simplesearchpage.py | 74a14dceeeef2ab60fb56655bb00ed68b2a72af6 | []
| no_license | alexmon1989/uma | d8c321fb0ec9b1a9039b1c83aeaaff774f657416 | 5dea579d634eeb1c8103c21157299b33ca5590f0 | refs/heads/master | 2023-08-03T04:31:13.598577 | 2023-07-22T18:17:13 | 2023-07-22T18:17:13 | 154,835,498 | 0 | 0 | null | 2023-03-02T11:20:54 | 2018-10-26T13:02:12 | Nunjucks | UTF-8 | Python | false | false | 1,712 | py | # Generated by Django 2.1.3 on 2019-10-10 13:38
import ckeditor_uploader.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('search', '0014_auto_20190719_1155'),
]
operations = [
migrations.CreateModel(
name='AdvancedSearchPage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description_uk', ckeditor_uploader.fields.RichTextUploadingField(verbose_name='Опис сторінки (укр.)')),
('description_en', ckeditor_uploader.fields.RichTextUploadingField(verbose_name='Опис сторінки (англ.)')),
],
options={
'verbose_name': 'Сторінка розширенного пошуку',
'verbose_name_plural': 'Сторінка розширенного пошуку',
},
),
migrations.CreateModel(
name='SimpleSearchPage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description_uk', ckeditor_uploader.fields.RichTextUploadingField(verbose_name='Опис сторінки (укр.)')),
('description_en', ckeditor_uploader.fields.RichTextUploadingField(verbose_name='Опис сторінки (англ.)')),
],
options={
'verbose_name': 'Сторінка простого пошуку',
'verbose_name_plural': 'Сторінка простого пошуку',
},
),
]
| [
"[email protected]"
]
| |
52980438ee437a5977680307d4b13bd673f3b1a3 | 6d7a67be5c2aa1bcebdcfd5bec855c0172c8f01f | /convert_weight.py | 55566963e7439f9fb4e9649bdd289f5114337916 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause",
"Apache-2.0"
]
| permissive | JWHennessey/stylegan2-pytorch-1 | 19184e1713b9bcfce6404fb6d19478f1dbcc56ec | 88852e3695d3ffd9281787690c3f8796dc1e225a | refs/heads/master | 2020-12-11T17:17:04.082956 | 2020-01-14T18:44:39 | 2020-01-14T18:44:39 | 233,909,977 | 0 | 0 | NOASSERTION | 2020-01-14T18:37:33 | 2020-01-14T18:37:32 | null | UTF-8 | Python | false | false | 6,849 | py | import argparse
import os
import sys
import pickle
import math
import torch
import numpy as np
from torchvision import utils
from model import Generator, Discriminator
def convert_modconv(vars, source_name, target_name, flip=False):
weight = vars[source_name + '/weight'].value().eval()
mod_weight = vars[source_name + '/mod_weight'].value().eval()
mod_bias = vars[source_name + '/mod_bias'].value().eval()
noise = vars[source_name + '/noise_strength'].value().eval()
bias = vars[source_name + '/bias'].value().eval()
dic = {
'conv.weight': np.expand_dims(weight.transpose((3, 2, 0, 1)), 0),
'conv.modulation.weight': mod_weight.transpose((1, 0)),
'conv.modulation.bias': mod_bias + 1,
'noise.weight': np.array([noise]),
'activate.bias': bias,
}
dic_torch = {}
for k, v in dic.items():
dic_torch[target_name + '.' + k] = torch.from_numpy(v)
if flip:
dic_torch[target_name + '.conv.weight'] = torch.flip(
dic_torch[target_name + '.conv.weight'], [3, 4]
)
return dic_torch
def convert_conv(vars, source_name, target_name, bias=True, start=0):
weight = vars[source_name + '/weight'].value().eval()
dic = {'weight': weight.transpose((3, 2, 0, 1))}
if bias:
dic['bias'] = vars[source_name + '/bias'].value().eval()
dic_torch = {}
dic_torch[target_name + f'.{start}.weight'] = torch.from_numpy(dic['weight'])
if bias:
dic_torch[target_name + f'.{start + 1}.bias'] = torch.from_numpy(dic['bias'])
return dic_torch
def convert_torgb(vars, source_name, target_name):
weight = vars[source_name + '/weight'].value().eval()
mod_weight = vars[source_name + '/mod_weight'].value().eval()
mod_bias = vars[source_name + '/mod_bias'].value().eval()
bias = vars[source_name + '/bias'].value().eval()
dic = {
'conv.weight': np.expand_dims(weight.transpose((3, 2, 0, 1)), 0),
'conv.modulation.weight': mod_weight.transpose((1, 0)),
'conv.modulation.bias': mod_bias + 1,
'bias': bias.reshape((1, 3, 1, 1)),
}
dic_torch = {}
for k, v in dic.items():
dic_torch[target_name + '.' + k] = torch.from_numpy(v)
return dic_torch
def convert_dense(vars, source_name, target_name):
weight = vars[source_name + '/weight'].value().eval()
bias = vars[source_name + '/bias'].value().eval()
dic = {'weight': weight.transpose((1, 0)), 'bias': bias}
dic_torch = {}
for k, v in dic.items():
dic_torch[target_name + '.' + k] = torch.from_numpy(v)
return dic_torch
def update(state_dict, new):
for k, v in new.items():
if k not in state_dict:
raise KeyError(k + ' is not found')
if v.shape != state_dict[k].shape:
raise ValueError(f'Shape mismatch: {v.shape} vs {state_dict[k].shape}')
state_dict[k] = v
def discriminator_fill_statedict(statedict, vars, size):
log_size = int(math.log(size, 2))
update(statedict, convert_conv(vars, f'{size}x{size}/FromRGB', 'convs.0'))
conv_i = 1
for i in range(log_size - 2, 0, -1):
reso = 4 * 2 ** i
update(statedict, convert_conv(vars, f'{reso}x{reso}/Conv0', f'convs.{conv_i}.conv1'))
update(statedict, convert_conv(vars, f'{reso}x{reso}/Conv1_down', f'convs.{conv_i}.conv2', start=1))
update(statedict, convert_conv(vars, f'{reso}x{reso}/Skip', f'convs.{conv_i}.skip', start=1, bias=False))
conv_i += 1
update(statedict, convert_conv(vars, f'4x4/Conv', 'final_conv'))
update(statedict, convert_dense(vars, f'4x4/Dense0', 'final_linear.0'))
update(statedict, convert_dense(vars, f'Output', 'final_linear.1'))
return statedict
def fill_statedict(state_dict, vars, size):
log_size = int(math.log(size, 2))
for i in range(8):
update(state_dict, convert_dense(vars, f'G_mapping/Dense{i}', f'style.{i + 1}'))
update(
state_dict,
{
'input.input': torch.from_numpy(
vars['G_synthesis/4x4/Const/const'].value().eval()
)
},
)
update(state_dict, convert_torgb(vars, 'G_synthesis/4x4/ToRGB', 'to_rgb1'))
for i in range(log_size - 2):
reso = 4 * 2 ** (i + 1)
update(
state_dict,
convert_torgb(vars, f'G_synthesis/{reso}x{reso}/ToRGB', f'to_rgbs.{i}'),
)
update(state_dict, convert_modconv(vars, 'G_synthesis/4x4/Conv', 'conv1'))
conv_i = 0
for i in range(log_size - 2):
reso = 4 * 2 ** (i + 1)
update(
state_dict,
convert_modconv(
vars,
f'G_synthesis/{reso}x{reso}/Conv0_up',
f'convs.{conv_i}',
flip=True,
),
)
update(
state_dict,
convert_modconv(
vars, f'G_synthesis/{reso}x{reso}/Conv1', f'convs.{conv_i + 1}'
),
)
conv_i += 2
return state_dict
if __name__ == '__main__':
device = 'cuda'
parser = argparse.ArgumentParser()
parser.add_argument('--repo', type=str, required=True)
parser.add_argument('--gen', action='store_true')
parser.add_argument('--disc', action='store_true')
parser.add_argument('path', metavar='PATH')
args = parser.parse_args()
sys.path.append(args.repo)
from dnnlib import tflib
tflib.init_tf()
with open(args.path, 'rb') as f:
generator, discriminator, g_ema = pickle.load(f)
size = g_ema.output_shape[2]
g = Generator(size, 512, 8)
state_dict = g.state_dict()
state_dict = fill_statedict(state_dict, g_ema.vars, size)
g.load_state_dict(state_dict)
latent_avg = torch.from_numpy(g_ema.vars['dlatent_avg'].value().eval())
ckpt = {'g_ema': state_dict, 'latent_avg': latent_avg}
if args.gen:
g_train = Generator(size, 512, 8)
g_train_state = g_train.state_dict()
g_train_state = fill_statedict(g_train_state, generator.vars, size)
ckpt['g'] = g_train_state
if args.disc:
disc = Discriminator(size)
d_state = disc.state_dict()
d_state = discriminator_fill_statedict(d_state, discriminator.vars, size)
ckpt['d'] = d_state
name = os.path.splitext(os.path.basename(args.path))[0]
torch.save(ckpt, name + '.pt')
batch_size = {256: 16, 512: 9, 1024: 4}
n_sample = batch_size.get(size, 25)
g = g.to(device)
x = torch.randn(n_sample, 512).to(device)
with torch.no_grad():
img, _ = g([x], truncation=0.5, truncation_latent=latent_avg.to(device))
utils.save_image(
img, name + '.png', nrow=int(n_sample ** 0.5), normalize=True, range=(-1, 1)
)
| [
"[email protected]"
]
| |
7659459672a025e4b61ead51cea0f2b290a37634 | 796d2e8c1b801b745e628876b20f03399b343b39 | /learning/admin.py | 4505b532c666c0cb54ee8ef0d215b472516390d7 | []
| no_license | educatecomtr/stocks | 25b675996350d8496bf163ffef2d539ebcb9c6ec | 16fa674a694981ce77be4e915440883f3d08e65e | refs/heads/master | 2021-06-25T22:18:29.239692 | 2020-12-26T13:17:14 | 2020-12-26T13:17:14 | 180,432,643 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 100 | py | from django.contrib import admin
from learning.models import Product
admin.site.register(Product)
| [
"[email protected]"
]
| |
782a5e2a11fe39696a75f0f5a033a5af024cc786 | f8ffac4fa0dbe27316fa443a16df8a3f1f5cff05 | /Python/Counting_Valleys.py | db3c7a3eda8be589ae74a986fadb83c8e44b2c00 | []
| no_license | ankitniranjan/HackerrankSolutions | e27073f9837787a8af7a0157d95612028c07c974 | e110c72d3b137cf4c5cef6e91f58a17452c54c08 | refs/heads/master | 2023-03-16T19:06:17.805307 | 2021-03-09T16:28:39 | 2021-03-09T16:28:39 | 292,994,949 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 528 | py | import math
import os
import random
import re
import sys
# Complete the countingValleys function below.
def countingValleys(n, s):
level=valley=0
for i in range(n):
if(s[i]=='U'):
level+=1
if(level==0):
valley+=1
else:
level-=1
return valley
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
n = int(input())
s = input()
result = countingValleys(n, s)
fptr.write(str(result) + '\n')
fptr.close()
| [
"[email protected]"
]
| |
ab55393ddc0e46a0f229ce84b50466d0ac1cb266 | 65701888f7e09716b83ddbb965a50b7c62b0f287 | /blocks/google/common_block.py | fb2ba923f68bc8aedfef5cc46a894ff664e758b9 | []
| no_license | ColinKennedy/auto_docstring | 6a4a27c16434cb6d94db435226758a09627d9252 | dbca838630faf410a277069aedbecb82cfeedae9 | refs/heads/master | 2021-04-12T12:36:31.825008 | 2018-11-05T01:49:49 | 2018-11-05T01:49:49 | 89,107,892 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,343 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# TODO : Just had an idea. Why not change the gross "if X.is_valid(obj): return X(obj)
# into a single classmethod? That'd look way better and potentially be
# easier to loop over
#
'''The classes and functions needed to parse the types of all astroid nodes.
This module does most of the heavy-lifting for args return-types. It can
parse functions within functions, infer an object's type, and even recursively
traverse imported modules to get an object's type.
'''
# IMPORT STANDARD LIBRARIES
import abc
import os
# IMPORT THIRD-PARTY LIBRARIES
import six
# IMPORT LOCAL LIBRARIES
from ...config import environment
from ...parsing import visit
from ...core import check
from . import common_type
@six.add_metaclass(abc.ABCMeta)
class CommonBlock(object):
'''An abstract class used to implement a Google-style block.
Attributes:
label (str): The block display text.
'''
label = 'Header label'
@staticmethod
@abc.abstractmethod
def draw(info):
'''Create the docstring lines to represent the given `info`.
Args:
info (dict[str]):
The parsed AST node whose type needs to be found and then
converted into a string.
Returns:
list[str]: The lines to create.
'''
return []
@abc.abstractproperty
def name(self):
'''str: A unique name to use to identify this block-type.'''
return '_unique_id'
@classmethod
def get_starting_lines(cls):
'''list[str]: Get the label used for the top of this block.'''
return ['{}:'.format(cls.label)]
@staticmethod
def get_spacing():
return
@staticmethod
def get_spacing():
'''int: Get the number of newlines to separate each docstring block.'''
try:
return int(os.getenv('AUTO_DOCSTRING_BLOCK_SPACING', '1'))
except TypeError:
return 1
@staticmethod
def _expand_types(obj, include_type=False):
r'''Wrap the given `obj` with a specific docstring-class wrapper.
Args:
obj (`astroid.NodeNG`):
Some node to wrap.
include_type (bool, optional):
If True and `obj` is a container of some kind, for example
a list of strs, then `obj` will be printed like "list[str]".
If False, `obj` would be printed as just "str".
This parameter is used primarily mainly for keeping return-types
from accidentally printing its container-type twice when
the container is nested.
Default is False.
Returns:
`SpecialType` or `ComprehensionContainerType` or `ContainerType` \
or `IterableType` or `Type`: .
The wrapped type.
'''
if common_type.SpecialType.is_valid(obj):
return common_type.SpecialType(obj)
obj = visit.get_value(obj)
if common_type.DictComprehensionContainerType.is_valid(obj):
return common_type.DictComprehensionContainerType(obj)
if common_type.ComprehensionContainerType.is_valid(obj):
return common_type.ComprehensionContainerType(obj)
if common_type.ContainerType.is_valid(obj):
return common_type.ContainerType(obj)
if check.is_itertype(obj):
return common_type.IterableType(obj, include_type=include_type)
return common_type.Type(obj)
@staticmethod
def _change_type_to_str(*objs):
'''Create the full string of all return-types for the given `objs`.
Args:
*objs (list[:class:`auto_docstring.blocks.google.common_block.Type`]):
The types to change into strings.
Returns:
str: The final set of return types for the given objects. This string
will be added to the auto-generated docstrings, directly.
'''
items = []
for item in [obj.as_str() for obj in objs]:
if item not in items:
items.append(item)
return common_type.make_items_text(items)
@six.add_metaclass(abc.ABCMeta)
class MultiTypeBlock(CommonBlock):
'''The base-class used to create "Returns" and "Yields" blocks.'''
_info_key = '_some_key'
name = 'multitype_base_block'
@classmethod
def _process_args(cls, info):
expected_object = info.get(cls._info_key)
if not expected_object:
return []
indent = ''
# Check if I need this if-statement
if info.get('lines'):
indent = environment.get_default_indent()
info['indent'] = indent
obj_types = cls._expand_types(expected_object)
type_info_as_str = cls._change_type_to_str(*obj_types)
return [type_info_as_str]
@classmethod
def _build_indented_docstring_lines(cls, lines, indent='', multiline=False):
return [cls._make_line(line, indent=indent, multiline=multiline)
for line in lines]
@classmethod
def draw(cls, info):
# '''Create the docstring lines to represent the given `info`.
# Note:
# If no data is found for cls._info_key, this method will return
# an empty list.
# Args:
# info (dict[str, list[`astroid.NodeNG`]]):
# The parsed AST node whose type needs to be found and then
# converted into a string.
# Returns:
# list[str]: The lines to create.
# '''
lines = cls._process_args(info)
if not lines:
return []
starting_lines = []
all_lines = info.get('lines', [])
if all_lines:
starting_lines = cls.get_starting_lines()
multiline = is_multiline(all_lines)
docstring_lines = cls._build_indented_docstring_lines(
lines,
info.get('indent', ''),
multiline=is_multiline(all_lines),
)
return starting_lines + docstring_lines
@staticmethod
def _make_line(obj_type, indent, multiline=False):
'''Create the docstring line for the given input.
Args:
indent (str):
The amount of space to add to the docstring block.
obj_type (str):
The type of the object. Example: "tuple[str]", "bool".
multiline (`obj`, optional):
If True, get the user's preferred separator and place it between
the return type and the return description.
If False, force the separator to just be " " so that the return
statement will stay on a single line.
Default is False.
Returns:
str: The created docstring line.
'''
if obj_type:
# This ":" is needed for parsing by auto_docstring
obj_type = ':' + obj_type
if not multiline:
sep = ' '
else:
sep = environment.get_description_separator()
return '{indent}{{{obj_type}!f}}:{sep}{{!f}}.'.format(
indent=indent,
obj_type=obj_type,
sep=sep,
)
def is_multiline(lines):
return len(lines) > 1
| [
"[email protected]"
]
| |
9b5f678ee01f74948e3abe78205622ca733d1def | f6d96e9505103428402ea9772fdd0b48c4dff7e9 | /tests/test_models/test_place.py | 4bd8e6e2e665353886e8de7c111a98acd68c7add | []
| no_license | KarenCampo777/AirBnB_clone | 8271a2a7f75c01ea875b9232a939f1f58f484705 | 95051e3c7c05837b89966caae55bb54eef81c95f | refs/heads/master | 2023-03-14T03:41:18.367359 | 2021-02-24T22:32:17 | 2021-02-24T22:32:17 | 276,201,869 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,527 | py | #!/usr/bin/python3
"""
Test module for place module
"""
from models.place import Place
import models
import unittest
import os
import datetime
class TestPlace(unittest.TestCase):
""" Testing an Place instance """
def setUp(self):
"""
Setting up the test instance
"""
self.my_base1 = Place()
self.my_base2 = Place()
def Tearown(self):
"""
Closing the test instance
"""
del self.my_base1
del self.my_base2
def test_create(self):
"""
Testing creation of an Place instance
"""
self.assertIsInstance(self.my_base1, Place)
def test_permissions(self):
"""
Testing file permissions to be executable
"""
self.assertTrue(os.access("models/place.py", os.X_OK))
def test_id(self):
"""
Testing if attribute id is as unique as a string type
"""
self.assertIsInstance(self.my_base1.id, str)
self.assertNotEqual(self.my_base1.id, self.my_base2.id)
def test_dates(self):
"""
Testing created_at and updated_at of instances
"""
self.assertIsInstance(self.my_base1.created_at, datetime.datetime)
self.assertIsInstance(self.my_base1.updated_at, datetime.datetime)
prev_date = self.my_base1.updated_at
self.my_base1.save()
self.assertNotEqual(prev_date, self.my_base1.updated_at)
def test_str_format(self):
"""
Testing the function __str__ to have the correct format
[<class name>] (<self.id>) <self.__dict__>
"""
o = self.my_base1
msg1 = o.__str__()
msg2 = "[{}] ({}) {}".format(o.__class__.__name__, o.id, o.__dict__)
self.assertEqual(msg1, msg2)
def test_to_dict(self):
"""
Testing to_dict function to return correct format
"""
ins = self.my_base1
obj = ins.to_dict()
self.assertIsInstance(obj, dict)
self.assertTrue('__class__' in obj)
self.assertEqual(obj['__class__'], 'Place')
self.assertIsInstance(obj['created_at'], str)
self.assertIsInstance(obj['updated_at'], str)
self.assertEqual(obj['created_at'], ins.created_at.isoformat())
self.assertEqual(obj['updated_at'], ins.updated_at.isoformat())
def test_docstring(self):
"""
Testing documentation on place
"""
self.assertIsNotNone(models.place.__doc__)
self.assertIsNotNone(Place.__doc__)
| [
"[email protected]"
]
| |
ada10adc0bef6aee3f66cc6505c04af63ade6437 | ca2818572d17285210792694ba1f07c99e11d9ad | /setup.py | 209a4bd93203208084c183cf32cece8f76ddf3bd | [
"Apache-2.0"
]
| permissive | tomzhang/codesnap | cc335e8a63af70ed0121b222eb4fc2e35841b0b0 | 04e11176888243052c46a6a04a1ba63a8f80d684 | refs/heads/master | 2022-11-29T16:23:05.625385 | 2020-08-09T07:11:58 | 2020-08-09T07:11:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,134 | py | import setuptools
from distutils.core import Extension
with open("README.md") as f:
long_description = f.read()
setuptools.setup(
name="codesnap",
version="0.0.4",
author="Tian Gao",
author_email="[email protected]",
description="A profiling tool that can visualize python code in flame graph",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/gaogaotiantian/codesnap",
packages=setuptools.find_packages("src"),
package_dir={"":"src"},
package_data={
"codesnap": [
"html/*.js",
"html/*.css",
"html/*.html"
]
},
ext_modules=[
Extension(
"codesnap.snaptrace",
sources = [
"src/codesnap/modules/snaptrace.c",
]
)
],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Quality Assurance",
],
python_requires=">=3.5",
)
| [
"[email protected]"
]
| |
33bada0a6ebc9c86ad48aa12cb5fff42acd3588a | 3b43cf4cfc666798ebe85ed1db8858034b13d45c | /tests/universal_functions_tests/power_tests/normal.py | dab58c185239f89bab51ee55c80dbe61e5d4326a | [
"Apache-2.0"
]
| permissive | Pandinosaurus/legate.numpy | 5428b80a0a53ab882cd74b5dbf5fd86c7ee82199 | 896f4fd9b32db445da6cdabf7b78d523fca96936 | refs/heads/master | 2023-06-27T04:33:52.982601 | 2021-07-01T21:39:52 | 2021-07-01T21:39:52 | 358,820,941 | 0 | 0 | Apache-2.0 | 2021-08-01T02:57:57 | 2021-04-17T08:06:05 | C++ | UTF-8 | Python | false | false | 1,000 | py | # Copyright 2021 NVIDIA Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
import legate.numpy as lg
def test():
bases_np = np.random.randn(4, 5)
# avoid fractional exponents
exponents_np = np.random.randint(10, size=(4, 5)).astype(np.float64)
bases = lg.array(bases_np)
exponents = lg.array(exponents_np)
assert lg.allclose(
lg.power(bases, exponents), np.power(bases_np, exponents_np)
)
if __name__ == "__main__":
test()
| [
"[email protected]"
]
| |
153121106ebc24e5c336526de3225d92751f09bb | 2efa07bd7d8864950fb2f377386d74a2ee992d3a | /project.py | c044ceba15827ad76acdba4c58a966ae8a6a9a4b | []
| no_license | Sbk3824/RestaurantWebsite | 9e318c30dd65fe46fe8adbc3aa4ccde0a4924f74 | a7930a6d487700d2d6fa54201f87eda48d16528f | refs/heads/master | 2020-03-20T19:19:48.387025 | 2018-07-06T01:52:47 | 2018-07-06T01:52:47 | 137,632,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,252 | py | from flask import Flask, render_template, request, redirect, jsonify, url_for, flash
from sqlalchemy import create_engine, asc
from sqlalchemy.orm import sessionmaker
from database_setup import Base, Restaurant, MenuItem, User
from flask import session as login_session
import random
import string
from oauth2client.client import flow_from_clientsecrets
from oauth2client.client import FlowExchangeError
import httplib2
import json
from flask import make_response
import requests
app = Flask(__name__)
CLIENT_ID = json.loads(
open('client_secrets.json', 'r').read())['web']['client_id']
APPLICATION_NAME = "Restaurant Menu Application"
# Connect to Database and create database session
engine = create_engine('sqlite:///restaurantmenuwithusers.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
# Create anti-forgery state token
@app.route('/login')
def showLogin():
state = ''.join(random.choice(string.ascii_uppercase + string.digits)
for x in xrange(32))
login_session['state'] = state
# return "The current session state is %s" % login_session['state']
return render_template('login.html', STATE=state)
@app.route('/fbconnect', methods=['POST'])
def fbconnect():
if request.args.get('state') != login_session['state']:
response = make_response(json.dumps('Invalid state parameter.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
access_token = request.data
print "access token received %s " % access_token
app_id = json.loads(open('fb_client_secrets.json', 'r').read())[
'web']['app_id']
app_secret = json.loads(
open('fb_client_secrets.json', 'r').read())['web']['app_secret']
url = 'https://graph.facebook.com/oauth/access_token?grant_type=fb_exchange_token&client_id=%s&client_secret=%s&fb_exchange_token=%s' % (
app_id, app_secret, access_token)
h = httplib2.Http()
result = h.request(url, 'GET')[1]
# Use token to get user info from API
userinfo_url = "https://graph.facebook.com/v2.8/me"
'''
Due to the formatting for the result from the server token exchange we have to
split the token first on commas and select the first index which gives us the key : value
for the server access token then we split it on colons to pull out the actual token value
and replace the remaining quotes with nothing so that it can be used directly in the graph
api calls
'''
token = result.split(',')[0].split(':')[1].replace('"', '')
url = 'https://graph.facebook.com/v2.8/me?access_token=%s&fields=name,id,email' % token
h = httplib2.Http()
result = h.request(url, 'GET')[1]
# print "url sent for API access:%s"% url
# print "API JSON result: %s" % result
data = json.loads(result)
login_session['provider'] = 'facebook'
login_session['username'] = data["name"]
login_session['email'] = data["email"]
login_session['facebook_id'] = data["id"]
# The token must be stored in the login_session in order to properly logout
login_session['access_token'] = token
# Get user picture
url = 'https://graph.facebook.com/v2.8/me/picture?access_token=%s&redirect=0&height=200&width=200' % token
h = httplib2.Http()
result = h.request(url, 'GET')[1]
data = json.loads(result)
login_session['picture'] = data["data"]["url"]
# see if user exists
user_id = getUserID(login_session['email'])
if not user_id:
user_id = createUser(login_session)
login_session['user_id'] = user_id
output = ''
output += '<h1>Welcome, '
output += login_session['username']
output += '!</h1>'
output += '<img src="'
output += login_session['picture']
output += ' " style = "width: 300px; height: 300px;border-radius: 150px;-webkit-border-radius: 150px;-moz-border-radius: 150px;"> '
flash("Now logged in as %s" % login_session['username'])
return output
@app.route('/fbdisconnect')
def fbdisconnect():
facebook_id = login_session['facebook_id']
# The access token must me included to successfully logout
access_token = login_session['access_token']
url = 'https://graph.facebook.com/%s/permissions?access_token=%s' % (facebook_id,access_token)
h = httplib2.Http()
result = h.request(url, 'DELETE')[1]
return "you have been logged out"
@app.route('/gconnect', methods=['POST'])
def gconnect():
# Validate state token
if request.args.get('state') != login_session['state']:
response = make_response(json.dumps('Invalid state parameter.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Obtain authorization code
code = request.data
try:
# Upgrade the authorization code into a credentials object
oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='')
oauth_flow.redirect_uri = 'postmessage'
credentials = oauth_flow.step2_exchange(code)
except FlowExchangeError:
response = make_response(
json.dumps('Failed to upgrade the authorization code.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Check that the access token is valid.
access_token = credentials.access_token
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?access_token=%s'
% access_token)
h = httplib2.Http()
result = json.loads(h.request(url, 'GET')[1])
# If there was an error in the access token info, abort.
if result.get('error') is not None:
response = make_response(json.dumps(result.get('error')), 500)
response.headers['Content-Type'] = 'application/json'
return response
# Verify that the access token is used for the intended user.
gplus_id = credentials.id_token['sub']
if result['user_id'] != gplus_id:
response = make_response(
json.dumps("Token's user ID doesn't match given user ID."), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Verify that the access token is valid for this app.
if result['issued_to'] != CLIENT_ID:
response = make_response(
json.dumps("Token's client ID does not match app's."), 401)
print "Token's client ID does not match app's."
response.headers['Content-Type'] = 'application/json'
return response
stored_access_token = login_session.get('access_token')
stored_gplus_id = login_session.get('gplus_id')
if stored_access_token is not None and gplus_id == stored_gplus_id:
response = make_response(json.dumps('Current user is already connected.'),
200)
response.headers['Content-Type'] = 'application/json'
return response
# Store the access token in the session for later use.
login_session['access_token'] = credentials.access_token
login_session['gplus_id'] = gplus_id
# Get user info
userinfo_url = "https://www.googleapis.com/oauth2/v1/userinfo"
params = {'access_token': credentials.access_token, 'alt': 'json'}
answer = requests.get(userinfo_url, params=params)
data = answer.json()
login_session['username'] = data['name']
login_session['picture'] = data['picture']
login_session['email'] = data['email']
# ADD PROVIDER TO LOGIN SESSION
login_session['provider'] = 'google'
# see if user exists, if it doesn't make a new one
user_id = getUserID(data["email"])
if not user_id:
user_id = createUser(login_session)
login_session['user_id'] = user_id
output = ''
output += '<h1>Welcome, '
output += login_session['username']
output += '!</h1>'
output += '<img src="'
output += login_session['picture']
output += ' " style = "width: 300px; height: 300px;border-radius: 150px;-webkit-border-radius: 150px;-moz-border-radius: 150px;"> '
flash("you are now logged in as %s" % login_session['username'])
print "done!"
return output
# User Helper Functions
def createUser(login_session):
newUser = User(name=login_session['username'], email=login_session[
'email'], picture=login_session['picture'])
session.add(newUser)
session.commit()
user = session.query(User).filter_by(email=login_session['email']).one()
return user.id
def getUserInfo(user_id):
user = session.query(User).filter_by(id=user_id).one()
return user
def getUserID(email):
try:
user = session.query(User).filter_by(email=email).one()
return user.id
except:
return None
# DISCONNECT - Revoke a current user's token and reset their login_session
@app.route('/gdisconnect')
def gdisconnect():
# Only disconnect a connected user.
access_token = login_session.get('access_token')
if access_token is None:
response = make_response(
json.dumps('Current user not connected.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
url = 'https://accounts.google.com/o/oauth2/revoke?token=%s' % access_token
h = httplib2.Http()
result = h.request(url, 'GET')[0]
if result['status'] == '200':
response = make_response(json.dumps('Successfully disconnected.'), 200)
response.headers['Content-Type'] = 'application/json'
return response
else:
response = make_response(json.dumps('Failed to revoke token for given user.', 400))
response.headers['Content-Type'] = 'application/json'
return response
# JSON APIs to view Restaurant Information
@app.route('/restaurant/<int:restaurant_id>/menu/JSON')
def restaurantMenuJSON(restaurant_id):
restaurant = session.query(Restaurant).filter_by(id=restaurant_id).one()
items = session.query(MenuItem).filter_by(
restaurant_id=restaurant_id).all()
return jsonify(MenuItems=[i.serialize for i in items])
@app.route('/restaurant/<int:restaurant_id>/menu/<int:menu_id>/JSON')
def menuItemJSON(restaurant_id, menu_id):
Menu_Item = session.query(MenuItem).filter_by(id=menu_id).one()
return jsonify(Menu_Item=Menu_Item.serialize)
@app.route('/restaurant/JSON')
def restaurantsJSON():
restaurants = session.query(Restaurant).all()
return jsonify(restaurants=[r.serialize for r in restaurants])
# Show all restaurants
@app.route('/')
@app.route('/restaurant/')
def showRestaurants():
restaurants = session.query(Restaurant).order_by(asc(Restaurant.name))
if 'username' not in login_session:
return render_template('publicrestaurants.html', restaurants=restaurants)
else:
return render_template('restaurants.html', restaurants=restaurants)
# Create a new restaurant
@app.route('/restaurant/new/', methods=['GET', 'POST'])
def newRestaurant():
if 'username' not in login_session:
return redirect('/login')
if request.method == 'POST':
newRestaurant = Restaurant(
name=request.form['name'], user_id=login_session['user_id'])
session.add(newRestaurant)
flash('New Restaurant %s Successfully Created' % newRestaurant.name)
session.commit()
return redirect(url_for('showRestaurants'))
else:
return render_template('newRestaurant.html')
# Edit a restaurant
@app.route('/restaurant/<int:restaurant_id>/edit/', methods=['GET', 'POST'])
def editRestaurant(restaurant_id):
editedRestaurant = session.query(
Restaurant).filter_by(id=restaurant_id).one()
if 'username' not in login_session:
return redirect('/login')
if editedRestaurant.user_id != login_session['user_id']:
return "<script>function myFunction() {alert('You are not authorized to edit this restaurant. Please create your own restaurant in order to edit.');}</script><body onload='myFunction()'>"
if request.method == 'POST':
if request.form['name']:
editedRestaurant.name = request.form['name']
flash('Restaurant Successfully Edited %s' % editedRestaurant.name)
return redirect(url_for('showRestaurants'))
else:
return render_template('editRestaurant.html', restaurant=editedRestaurant)
# Delete a restaurant
@app.route('/restaurant/<int:restaurant_id>/delete/', methods=['GET', 'POST'])
def deleteRestaurant(restaurant_id):
restaurantToDelete = session.query(
Restaurant).filter_by(id=restaurant_id).one()
if 'username' not in login_session:
return redirect('/login')
if restaurantToDelete.user_id != login_session['user_id']:
return "<script>function myFunction() {alert('You are not authorized to delete this restaurant. Please create your own restaurant in order to delete.');}</script><body onload='myFunction()'>"
if request.method == 'POST':
session.delete(restaurantToDelete)
flash('%s Successfully Deleted' % restaurantToDelete.name)
session.commit()
return redirect(url_for('showRestaurants', restaurant_id=restaurant_id))
else:
return render_template('deleteRestaurant.html', restaurant=restaurantToDelete)
# Show a restaurant menu
@app.route('/restaurant/<int:restaurant_id>/')
@app.route('/restaurant/<int:restaurant_id>/menu/')
def showMenu(restaurant_id):
restaurant = session.query(Restaurant).filter_by(id=restaurant_id).one()
creator = getUserInfo(restaurant.user_id)
items = session.query(MenuItem).filter_by(
restaurant_id=restaurant_id).all()
if 'username' not in login_session or creator.id != login_session['user_id']:
return render_template('publicmenu.html', items=items, restaurant=restaurant, creator=creator)
else:
return render_template('menu.html', items=items, restaurant=restaurant, creator=creator)
# Create a new menu item
@app.route('/restaurant/<int:restaurant_id>/menu/new/', methods=['GET', 'POST'])
def newMenuItem(restaurant_id):
if 'username' not in login_session:
return redirect('/login')
restaurant = session.query(Restaurant).filter_by(id=restaurant_id).one()
if login_session['user_id'] != restaurant.user_id:
return "<script>function myFunction() {alert('You are not authorized to add menu items to this restaurant. Please create your own restaurant in order to add items.');}</script><body onload='myFunction()'>"
if request.method == 'POST':
newItem = MenuItem(name=request.form['name'], description=request.form['description'], price=request.form[
'price'], course=request.form['course'], restaurant_id=restaurant_id, user_id=restaurant.user_id)
session.add(newItem)
session.commit()
flash('New Menu %s Item Successfully Created' % (newItem.name))
return redirect(url_for('showMenu', restaurant_id=restaurant_id))
else:
return render_template('newmenuitem.html', restaurant_id=restaurant_id)
# Edit a menu item
@app.route('/restaurant/<int:restaurant_id>/menu/<int:menu_id>/edit', methods=['GET', 'POST'])
def editMenuItem(restaurant_id, menu_id):
if 'username' not in login_session:
return redirect('/login')
editedItem = session.query(MenuItem).filter_by(id=menu_id).one()
restaurant = session.query(Restaurant).filter_by(id=restaurant_id).one()
if login_session['user_id'] != restaurant.user_id:
return "<script>function myFunction() {alert('You are not authorized to edit menu items to this restaurant. Please create your own restaurant in order to edit items.');}</script><body onload='myFunction()'>"
if request.method == 'POST':
if request.form['name']:
editedItem.name = request.form['name']
if request.form['description']:
editedItem.description = request.form['description']
if request.form['price']:
editedItem.price = request.form['price']
if request.form['course']:
editedItem.course = request.form['course']
session.add(editedItem)
session.commit()
flash('Menu Item Successfully Edited')
return redirect(url_for('showMenu', restaurant_id=restaurant_id))
else:
return render_template('editmenuitem.html', restaurant_id=restaurant_id, menu_id=menu_id, item=editedItem)
# Delete a menu item
@app.route('/restaurant/<int:restaurant_id>/menu/<int:menu_id>/delete', methods=['GET', 'POST'])
def deleteMenuItem(restaurant_id, menu_id):
if 'username' not in login_session:
return redirect('/login')
restaurant = session.query(Restaurant).filter_by(id=restaurant_id).one()
itemToDelete = session.query(MenuItem).filter_by(id=menu_id).one()
if login_session['user_id'] != restaurant.user_id:
return "<script>function myFunction() {alert('You are not authorized to delete menu items to this restaurant. Please create your own restaurant in order to delete items.');}</script><body onload='myFunction()'>"
if request.method == 'POST':
session.delete(itemToDelete)
session.commit()
flash('Menu Item Successfully Deleted')
return redirect(url_for('showMenu', restaurant_id=restaurant_id))
else:
return render_template('deleteMenuItem.html', item=itemToDelete)
# Disconnect based on provider
@app.route('/disconnect')
def disconnect():
if 'provider' in login_session:
if login_session['provider'] == 'google':
gdisconnect()
del login_session['gplus_id']
del login_session['access_token']
if login_session['provider'] == 'facebook':
fbdisconnect()
del login_session['facebook_id']
del login_session['username']
del login_session['email']
del login_session['picture']
del login_session['user_id']
del login_session['provider']
flash("You have successfully been logged out.")
return redirect(url_for('showRestaurants'))
else:
flash("You were not logged in")
return redirect(url_for('showRestaurants'))
if __name__ == '__main__':
app.secret_key = 'super_secret_key'
app.debug = True
app.run(host='0.0.0.0', port=5555) | [
"[email protected]"
]
| |
8a4209560e01a9bb2625b02445afa69dcf3b28fc | e7ff2f9e21a94f2956b8c79f268dc6d45b41237b | /Frontend/node_modules/watchpack-chokidar2/node_modules/fsevents/build/config.gypi | b5962c025c83982c05fecf7c1819e71e4893c18a | [
"MIT"
]
| permissive | vipul-07/MERN-Project | fcb4af686557b99b802404e8622905781e89bbc3 | c0bdd3b5dfc73b2657b8563d069360e11466714a | refs/heads/master | 2023-02-14T15:42:38.653627 | 2021-01-10T05:35:02 | 2021-01-10T05:35:02 | 317,460,195 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,709 | gypi | # Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"asan": 0,
"build_v8_with_gn": "false",
"coverage": "false",
"dcheck_always_on": 0,
"debug_nghttp2": "false",
"debug_node": "false",
"enable_lto": "false",
"enable_pgo_generate": "false",
"enable_pgo_use": "false",
"error_on_warn": "false",
"force_dynamic_crt": 0,
"host_arch": "x64",
"icu_data_in": "../../deps/icu-tmp/icudt67l.dat",
"icu_endianness": "l",
"icu_gyp_path": "tools/icu/icu-generic.gyp",
"icu_path": "deps/icu-small",
"icu_small": "false",
"icu_ver_major": "67",
"is_debug": 0,
"llvm_version": "11.0",
"napi_build_version": "6",
"node_byteorder": "little",
"node_debug_lib": "false",
"node_enable_d8": "false",
"node_install_npm": "true",
"node_module_version": 83,
"node_no_browser_globals": "false",
"node_prefix": "/usr/local",
"node_release_urlbase": "https://nodejs.org/download/release/",
"node_shared": "false",
"node_shared_brotli": "false",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_nghttp2": "false",
"node_shared_openssl": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_target_type": "executable",
"node_use_bundled_v8": "true",
"node_use_dtrace": "true",
"node_use_etw": "false",
"node_use_node_code_cache": "true",
"node_use_node_snapshot": "true",
"node_use_openssl": "true",
"node_use_v8_platform": "true",
"node_with_ltcg": "false",
"node_without_node_options": "false",
"openssl_fips": "",
"openssl_is_fips": "false",
"ossfuzz": "false",
"shlib_suffix": "83.dylib",
"target_arch": "x64",
"v8_enable_31bit_smis_on_64bit_arch": 0,
"v8_enable_gdbjit": 0,
"v8_enable_i18n_support": 1,
"v8_enable_inspector": 1,
"v8_enable_lite_mode": 0,
"v8_enable_object_print": 1,
"v8_enable_pointer_compression": 0,
"v8_no_strict_aliasing": 1,
"v8_optimized_debug": 1,
"v8_promise_internal_field_count": 1,
"v8_random_seed": 0,
"v8_trace_maps": 0,
"v8_use_siphash": 1,
"want_separate_host_toolset": 0,
"xcode_version": "11.0",
"nodedir": "/Users/apple/Library/Caches/node-gyp/14.10.1",
"standalone_static_library": 1,
"dry_run": "",
"legacy_bundling": "",
"save_dev": "",
"browser": "",
"commit_hooks": "true",
"only": "",
"viewer": "man",
"also": "",
"rollback": "true",
"sign_git_commit": "",
"audit": "true",
"usage": "",
"globalignorefile": "/usr/local/etc/npmignore",
"init_author_url": "",
"maxsockets": "50",
"shell": "/bin/zsh",
"metrics_registry": "https://registry.npmjs.org/",
"parseable": "",
"shrinkwrap": "true",
"init_license": "ISC",
"timing": "",
"if_present": "",
"cache_max": "Infinity",
"init_author_email": "",
"sign_git_tag": "",
"cert": "",
"git_tag_version": "true",
"local_address": "",
"long": "",
"preid": "",
"fetch_retries": "2",
"registry": "https://registry.npmjs.org/",
"key": "",
"message": "%s",
"versions": "",
"globalconfig": "/usr/local/etc/npmrc",
"always_auth": "",
"logs_max": "10",
"prefer_online": "",
"cache_lock_retries": "10",
"global_style": "",
"update_notifier": "true",
"audit_level": "low",
"heading": "npm",
"fetch_retry_mintimeout": "10000",
"offline": "",
"read_only": "",
"searchlimit": "20",
"access": "",
"json": "",
"allow_same_version": "",
"description": "true",
"engine_strict": "",
"https_proxy": "",
"init_module": "/Users/apple/.npm-init.js",
"userconfig": "/Users/apple/.npmrc",
"cidr": "",
"node_version": "14.10.1",
"user": "",
"auth_type": "legacy",
"editor": "vi",
"ignore_prepublish": "",
"save": "true",
"script_shell": "",
"tag": "latest",
"before": "",
"global": "",
"progress": "true",
"ham_it_up": "",
"optional": "true",
"searchstaleness": "900",
"bin_links": "true",
"force": "",
"save_prod": "",
"searchopts": "",
"depth": "Infinity",
"node_gyp": "/usr/local/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js",
"rebuild_bundle": "true",
"sso_poll_frequency": "500",
"unicode": "true",
"fetch_retry_maxtimeout": "60000",
"ca": "",
"save_prefix": "^",
"scripts_prepend_node_path": "warn-only",
"sso_type": "oauth",
"strict_ssl": "true",
"tag_version_prefix": "v",
"dev": "",
"fetch_retry_factor": "10",
"group": "20",
"save_exact": "",
"cache_lock_stale": "60000",
"prefer_offline": "",
"version": "",
"cache_min": "10",
"otp": "",
"cache": "/Users/apple/.npm",
"searchexclude": "",
"color": "true",
"package_lock": "true",
"fund": "true",
"package_lock_only": "",
"save_optional": "",
"user_agent": "npm/6.14.8 node/v14.10.1 darwin x64",
"ignore_scripts": "",
"cache_lock_wait": "10000",
"production": "",
"save_bundle": "",
"send_metrics": "",
"init_version": "1.0.0",
"node_options": "",
"umask": "0022",
"scope": "",
"git": "git",
"init_author_name": "",
"onload_script": "",
"tmp": "/var/folders/0w/px0kn_6s561dhjplhgbypnj80000gn/T",
"unsafe_perm": "true",
"format_package_lock": "true",
"link": "",
"prefix": "/usr/local"
}
}
| [
"[email protected]"
]
| |
f32cc09e9b5e4191dae2fb825a128f8ca6aa38c6 | 2e2a02ec8323982975ace3d249b22a42d8b97a1f | /skipper.py | 11171dc6ca97629b3d735b09f2921f679e80ed68 | []
| no_license | datagovua/os-budget-ukraine | 4e8c6d0373aead42890349befbd69bf8e8fef0a1 | 3a45f89c3872c9b9b45fb1206da445989b37b335 | refs/heads/master | 2021-01-13T02:49:03.608617 | 2016-12-22T20:59:14 | 2016-12-23T01:14:22 | 77,156,721 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | import logging
from datapackage_pipelines.wrapper import ingest, spew
_, datapackage, resource_iterator = ingest()
def intTryParse(value):
try:
int(value)
return True
except ValueError:
return False
def process(res):
for row in res:
if intTryParse(row['1.0']):
yield row
spew(datapackage, (process(res) for res in resource_iterator))
| [
"[email protected]"
]
| |
b0258289543572c3d2fd2b3d83991eb4e2d9f4dc | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/orchs/svcsencap.py | 83b1a538fc7c72069845b02465a56b59e320b8da | []
| no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 8,108 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class SvcsEncap(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = ClassMeta("cobra.model.orchs.SvcsEncap")
meta.moClassName = "orchsSvcsEncap"
meta.rnFormat = "encap-%(name)s"
meta.category = MoCategory.REGULAR
meta.label = "IP Pool Resource Instance"
meta.writeAccessMask = 0x2001
meta.readAccessMask = 0x2001
meta.isDomainable = False
meta.isReadOnly = False
meta.isConfigurable = True
meta.isDeletable = True
meta.isContextRoot = False
meta.childClasses.add("cobra.model.tag.Tag")
meta.childClasses.add("cobra.model.fault.Counts")
meta.childClasses.add("cobra.model.health.Inst")
meta.childClasses.add("cobra.model.aaa.RbacAnnotation")
meta.childClasses.add("cobra.model.orchs.RsSvcsEncapToSvcAlloc")
meta.childClasses.add("cobra.model.tag.Annotation")
meta.childNamesAndRnPrefix.append(("cobra.model.orchs.RsSvcsEncapToSvcAlloc", "rssvcsEncapToSvcAlloc-"))
meta.childNamesAndRnPrefix.append(("cobra.model.tag.Annotation", "annotationKey-"))
meta.childNamesAndRnPrefix.append(("cobra.model.aaa.RbacAnnotation", "rbacDom-"))
meta.childNamesAndRnPrefix.append(("cobra.model.tag.Tag", "tagKey-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Counts", "fltCnts"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.Inst", "health"))
meta.parentClasses.add("cobra.model.orchs.Config")
meta.superClasses.add("cobra.model.orchs.Entity")
meta.superClasses.add("cobra.model.orchs.Element")
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.naming.NamedIdentifiedObject")
meta.rnPrefixes = [
('encap-', True),
]
prop = PropMeta("str", "annotation", "annotation", 38579, PropCategory.REGULAR)
prop.label = "Annotation. Suggested format orchestrator:value"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("annotation", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "descr", "descr", 28290, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "encap", "encap", 28246, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("encap", prop)
prop = PropMeta("str", "encapNsName", "encapNsName", 28248, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 512)]
meta.props.add("encapNsName", prop)
prop = PropMeta("str", "extMngdBy", "extMngdBy", 40718, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "undefined"
prop._addConstant("msc", "msc", 1)
prop._addConstant("undefined", "undefined", 0)
meta.props.add("extMngdBy", prop)
prop = PropMeta("str", "guid", "guid", 28255, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.regex = ['[[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}]{0,1}']
meta.props.add("guid", prop)
prop = PropMeta("str", "id", "id", 28253, PropCategory.REGULAR)
prop.label = "Id"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("id", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "legLoc", "legLoc", 28245, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 512)]
meta.props.add("legLoc", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "monPolDn", "monPolDn", 28679, PropCategory.REGULAR)
prop.label = "Monitoring policy attached to this observable object"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("monPolDn", prop)
prop = PropMeta("str", "name", "name", 28294, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
prop.range = [(1, 128)]
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "rtrId", "rtrId", 28247, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
meta.props.add("rtrId", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "uid", "uid", 8, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("uid", prop)
meta.namingProps.append(getattr(meta.props, "name"))
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Ancestor"
meta.deploymentQueryPaths.append(DeploymentPathMeta("CtxToNwIf", "Private Network to Interface", "cobra.model.nw.If"))
def __init__(self, parentMoOrDn, name, markDirty=True, **creationProps):
namingVals = [name]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
]
| |
3157fe6fdd447bb57057687c3cf1dc6b2b15380c | 7eadcb17f2555a80b1d7a065a9f9cefbe3c127e7 | /frappe/translate.py | d3d629a6fbd8419a6d004f9b68b66dfb0ab778d2 | [
"MIT"
]
| permissive | sivaranjanipalanivel/frappesample | a97f7c636d5f5869e3410a57bc3ac82d32884d0e | e37ff70ac92c16d1fb0bce5eb11dad62c9ff7564 | refs/heads/master | 2020-07-24T17:00:35.139985 | 2019-09-12T07:21:50 | 2019-09-12T07:21:50 | 207,989,569 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,483 | py | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
from six import iteritems, text_type, string_types
"""
frappe.translate
~~~~~~~~~~~~~~~~
Translation tools for frappe
"""
import frappe, os, re, codecs, json
from frappe.model.utils import render_include, InvalidIncludePath
from frappe.utils import strip
from jinja2 import TemplateError
import itertools, operator
def guess_language(lang_list=None):
"""Set `frappe.local.lang` from HTTP headers at beginning of request"""
lang_codes = frappe.request.accept_languages.values()
if not lang_codes:
return frappe.local.lang
guess = None
if not lang_list:
lang_list = get_all_languages() or []
for l in lang_codes:
code = l.strip()
if not isinstance(code, text_type):
code = text_type(code, 'utf-8')
if code in lang_list or code == "en":
guess = code
break
# check if parent language (pt) is setup, if variant (pt-BR)
if "-" in code:
code = code.split("-")[0]
if code in lang_list:
guess = code
break
return guess or frappe.local.lang
def get_user_lang(user=None):
"""Set frappe.local.lang from user preferences on session beginning or resumption"""
if not user:
user = frappe.session.user
# via cache
lang = frappe.cache().hget("lang", user)
if not lang:
# if defined in user profile
lang = frappe.db.get_value("User", user, "language")
if not lang:
lang = frappe.db.get_default("lang")
if not lang:
lang = frappe.local.lang or 'en'
frappe.cache().hset("lang", user, lang)
return lang
def get_lang_code(lang):
return frappe.db.get_value('Language', {'language_name': lang}) or lang
def set_default_language(lang):
"""Set Global default language"""
frappe.db.set_default("lang", lang)
frappe.local.lang = lang
def get_all_languages():
"""Returns all language codes ar, ch etc"""
def _get():
if not frappe.db:
frappe.connect()
return frappe.db.sql_list('select name from tabLanguage')
return frappe.cache().get_value('languages', _get)
def get_lang_dict():
"""Returns all languages in dict format, full name is the key e.g. `{"english":"en"}`"""
return dict(frappe.db.sql('select language_name, name from tabLanguage'))
def get_dict(fortype, name=None):
"""Returns translation dict for a type of object.
:param fortype: must be one of `doctype`, `page`, `report`, `include`, `jsfile`, `boot`
:param name: name of the document for which assets are to be returned.
"""
fortype = fortype.lower()
cache = frappe.cache()
asset_key = fortype + ":" + (name or "-")
translation_assets = cache.hget("translation_assets", frappe.local.lang, shared=True) or {}
if not asset_key in translation_assets:
if fortype=="doctype":
messages = get_messages_from_doctype(name)
elif fortype=="page":
messages = get_messages_from_page(name)
elif fortype=="report":
messages = get_messages_from_report(name)
elif fortype=="include":
messages = get_messages_from_include_files()
elif fortype=="jsfile":
messages = get_messages_from_file(name)
elif fortype=="boot":
messages = get_messages_from_include_files()
messages += frappe.db.sql("select 'Print Format:', name from `tabPrint Format`")
messages += frappe.db.sql("select 'DocType:', name from tabDocType")
messages += frappe.db.sql("select 'Role:', name from tabRole")
messages += frappe.db.sql("select 'Module:', name from `tabModule Def`")
messages += frappe.db.sql("select 'Module:', label from `tabDesktop Icon` where standard=1 or owner=%s",
frappe.session.user)
message_dict = make_dict_from_messages(messages)
message_dict.update(get_dict_from_hooks(fortype, name))
# remove untranslated
message_dict = {k:v for k, v in iteritems(message_dict) if k!=v}
translation_assets[asset_key] = message_dict
cache.hset("translation_assets", frappe.local.lang, translation_assets, shared=True)
return translation_assets[asset_key]
def get_dict_from_hooks(fortype, name):
translated_dict = {}
hooks = frappe.get_hooks("get_translated_dict")
for (hook_fortype, fortype_name) in hooks:
if hook_fortype == fortype and fortype_name == name:
for method in hooks[(hook_fortype, fortype_name)]:
translated_dict.update(frappe.get_attr(method)())
return translated_dict
def add_lang_dict(code):
"""Extracts messages and returns Javascript code snippet to be appened at the end
of the given script
:param code: Javascript code snippet to which translations needs to be appended."""
messages = extract_messages_from_code(code)
messages = [message for pos, message in messages]
code += "\n\n$.extend(frappe._messages, %s)" % json.dumps(make_dict_from_messages(messages))
return code
def make_dict_from_messages(messages, full_dict=None):
"""Returns translated messages as a dict in Language specified in `frappe.local.lang`
:param messages: List of untranslated messages
"""
out = {}
if full_dict==None:
full_dict = get_full_dict(frappe.local.lang)
for m in messages:
if m[1] in full_dict:
out[m[1]] = full_dict[m[1]]
return out
def get_lang_js(fortype, name):
"""Returns code snippet to be appended at the end of a JS script.
:param fortype: Type of object, e.g. `DocType`
:param name: Document name
"""
return "\n\n$.extend(frappe._messages, %s)" % json.dumps(get_dict(fortype, name))
def get_full_dict(lang):
"""Load and return the entire translations dictionary for a language from :meth:`frape.cache`
:param lang: Language Code, e.g. `hi`
"""
if not lang:
return {}
# found in local, return!
if getattr(frappe.local, 'lang_full_dict', None) and frappe.local.lang_full_dict.get(lang, None):
return frappe.local.lang_full_dict
frappe.local.lang_full_dict = load_lang(lang)
try:
# get user specific transaltion data
user_translations = get_user_translations(lang)
except Exception:
user_translations = None
if user_translations:
frappe.local.lang_full_dict.update(user_translations)
return frappe.local.lang_full_dict
def load_lang(lang, apps=None):
"""Combine all translations from `.csv` files in all `apps`.
For derivative languages (es-GT), take translations from the
base language (es) and then update translations from the child (es-GT)"""
if lang=='en':
return {}
out = frappe.cache().hget("lang_full_dict", lang, shared=True)
if not out:
out = {}
for app in (apps or frappe.get_all_apps(True)):
path = os.path.join(frappe.get_pymodule_path(app), "translations", lang + ".csv")
out.update(get_translation_dict_from_file(path, lang, app) or {})
if '-' in lang:
parent = lang.split('-')[0]
parent_out = load_lang(parent)
parent_out.update(out)
out = parent_out
frappe.cache().hset("lang_full_dict", lang, out, shared=True)
return out or {}
def get_translation_dict_from_file(path, lang, app):
"""load translation dict from given path"""
cleaned = {}
if os.path.exists(path):
csv_content = read_csv_file(path)
for item in csv_content:
if len(item)==3:
# with file and line numbers
cleaned[item[1]] = strip(item[2])
elif len(item)==2:
cleaned[item[0]] = strip(item[1])
elif item:
raise Exception("Bad translation in '{app}' for language '{lang}': {values}".format(
app=app, lang=lang, values=repr(item).encode("utf-8")
))
return cleaned
def get_user_translations(lang):
out = frappe.cache().hget('lang_user_translations', lang)
if out is None:
out = {}
for fields in frappe.get_all('Translation',
fields= ["source_name", "target_name"], filters={'language': lang}):
out.update({fields.source_name: fields.target_name})
frappe.cache().hset('lang_user_translations', lang, out)
return out
def clear_cache():
"""Clear all translation assets from :meth:`frappe.cache`"""
cache = frappe.cache()
cache.delete_key("langinfo")
# clear translations saved in boot cache
cache.delete_key("bootinfo")
cache.delete_key("lang_full_dict", shared=True)
cache.delete_key("translation_assets", shared=True)
cache.delete_key("lang_user_translations")
def get_messages_for_app(app):
"""Returns all messages (list) for a specified `app`"""
messages = []
modules = ", ".join(['"{}"'.format(m.title().replace("_", " ")) \
for m in frappe.local.app_modules[app]])
# doctypes
if modules:
for name in frappe.db.sql_list("""select name from tabDocType
where module in ({})""".format(modules)):
messages.extend(get_messages_from_doctype(name))
# pages
for name, title in frappe.db.sql("""select name, title from tabPage
where module in ({})""".format(modules)):
messages.append((None, title or name))
messages.extend(get_messages_from_page(name))
# reports
for name in frappe.db.sql_list("""select tabReport.name from tabDocType, tabReport
where tabReport.ref_doctype = tabDocType.name
and tabDocType.module in ({})""".format(modules)):
messages.append((None, name))
messages.extend(get_messages_from_report(name))
for i in messages:
if not isinstance(i, tuple):
raise Exception
# workflow based on app.hooks.fixtures
messages.extend(get_messages_from_workflow(app_name=app))
# custom fields based on app.hooks.fixtures
messages.extend(get_messages_from_custom_fields(app_name=app))
# app_include_files
messages.extend(get_all_messages_from_js_files(app))
# server_messages
messages.extend(get_server_messages(app))
return deduplicate_messages(messages)
def get_messages_from_doctype(name):
"""Extract all translatable messages for a doctype. Includes labels, Python code,
Javascript code, html templates"""
messages = []
meta = frappe.get_meta(name)
messages = [meta.name, meta.module]
if meta.description:
messages.append(meta.description)
# translations of field labels, description and options
for d in meta.get("fields"):
messages.extend([d.label, d.description])
if d.fieldtype=='Select' and d.options:
options = d.options.split('\n')
if not "icon" in options[0]:
messages.extend(options)
# translations of roles
for d in meta.get("permissions"):
if d.role:
messages.append(d.role)
messages = [message for message in messages if message]
messages = [('DocType: ' + name, message) for message in messages if is_translatable(message)]
# extract from js, py files
doctype_file_path = frappe.get_module_path(meta.module, "doctype", meta.name, meta.name)
messages.extend(get_messages_from_file(doctype_file_path + ".js"))
messages.extend(get_messages_from_file(doctype_file_path + "_list.js"))
messages.extend(get_messages_from_file(doctype_file_path + "_list.html"))
messages.extend(get_messages_from_file(doctype_file_path + "_calendar.js"))
# workflow based on doctype
messages.extend(get_messages_from_workflow(doctype=name))
return messages
def get_messages_from_workflow(doctype=None, app_name=None):
assert doctype or app_name, 'doctype or app_name should be provided'
# translations for Workflows
workflows = []
if doctype:
workflows = frappe.get_all('Workflow', filters={'document_type': doctype})
else:
fixtures = frappe.get_hooks('fixtures', app_name=app_name) or []
for fixture in fixtures:
if isinstance(fixture, string_types) and fixture == 'Worflow':
workflows = frappe.get_all('Workflow')
break
elif isinstance(fixture, dict) and fixture.get('dt', fixture.get('doctype')) == 'Workflow':
workflows.extend(frappe.get_all('Workflow', filters=fixture.get('filters')))
messages = []
for w in workflows:
states = frappe.db.sql(
'select distinct state from `tabWorkflow Document State` where parent=%s',
(w['name'],), as_dict=True)
messages.extend([('Workflow: ' + w['name'], state['state']) for state in states if is_translatable(state['state'])])
states = frappe.db.sql(
'select distinct message from `tabWorkflow Document State` where parent=%s and message is not null',
(w['name'],), as_dict=True)
messages.extend([("Workflow: " + w['name'], state['message'])
for state in states if is_translatable(state['message'])])
actions = frappe.db.sql(
'select distinct action from `tabWorkflow Transition` where parent=%s',
(w['name'],), as_dict=True)
messages.extend([("Workflow: " + w['name'], action['action']) \
for action in actions if is_translatable(action['action'])])
return messages
def get_messages_from_custom_fields(app_name):
fixtures = frappe.get_hooks('fixtures', app_name=app_name) or []
custom_fields = []
for fixture in fixtures:
if isinstance(fixture, string_types) and fixture == 'Custom Field':
custom_fields = frappe.get_all('Custom Field', fields=['name','label', 'description', 'fieldtype', 'options'])
break
elif isinstance(fixture, dict) and fixture.get('dt', fixture.get('doctype')) == 'Custom Field':
custom_fields.extend(frappe.get_all('Custom Field', filters=fixture.get('filters'),
fields=['name','label', 'description', 'fieldtype', 'options']))
messages = []
for cf in custom_fields:
for prop in ('label', 'description'):
if not cf.get(prop) or not is_translatable(cf[prop]):
continue
messages.append(('Custom Field - {}: {}'.format(prop, cf['name']), cf[prop]))
if cf['fieldtype'] == 'Selection' and cf.get('options'):
for option in cf['options'].split('\n'):
if option and 'icon' not in option and is_translatable(option):
messages.append(('Custom Field - Description: ' + cf['name'], option))
return messages
def get_messages_from_page(name):
"""Returns all translatable strings from a :class:`frappe.core.doctype.Page`"""
return _get_messages_from_page_or_report("Page", name)
def get_messages_from_report(name):
"""Returns all translatable strings from a :class:`frappe.core.doctype.Report`"""
report = frappe.get_doc("Report", name)
messages = _get_messages_from_page_or_report("Report", name,
frappe.db.get_value("DocType", report.ref_doctype, "module"))
# TODO position here!
if report.query:
messages.extend([(None, message) for message in re.findall('"([^:,^"]*):', report.query) if is_translatable(message)])
messages.append((None,report.report_name))
return messages
def _get_messages_from_page_or_report(doctype, name, module=None):
if not module:
module = frappe.db.get_value(doctype, name, "module")
doc_path = frappe.get_module_path(module, doctype, name)
messages = get_messages_from_file(os.path.join(doc_path, frappe.scrub(name) +".py"))
if os.path.exists(doc_path):
for filename in os.listdir(doc_path):
if filename.endswith(".js") or filename.endswith(".html"):
messages += get_messages_from_file(os.path.join(doc_path, filename))
return messages
def get_server_messages(app):
"""Extracts all translatable strings (tagged with :func:`frappe._`) from Python modules
inside an app"""
messages = []
for basepath, folders, files in os.walk(frappe.get_pymodule_path(app)):
for dontwalk in (".git", "public", "locale"):
if dontwalk in folders: folders.remove(dontwalk)
for f in files:
f = frappe.as_unicode(f)
if f.endswith(".py") or f.endswith(".html") or f.endswith(".js"):
messages.extend(get_messages_from_file(os.path.join(basepath, f)))
return messages
def get_messages_from_include_files(app_name=None):
"""Returns messages from js files included at time of boot like desk.min.js for desk and web"""
messages = []
for file in (frappe.get_hooks("app_include_js", app_name=app_name) or []) + (frappe.get_hooks("web_include_js", app_name=app_name) or []):
messages.extend(get_messages_from_file(os.path.join(frappe.local.sites_path, file)))
return messages
def get_all_messages_from_js_files(app_name=None):
"""Extracts all translatable strings from app `.js` files"""
messages = []
for app in ([app_name] if app_name else frappe.get_installed_apps()):
if os.path.exists(frappe.get_app_path(app, "public")):
for basepath, folders, files in os.walk(frappe.get_app_path(app, "public")):
if "frappe/public/js/lib" in basepath:
continue
for fname in files:
if fname.endswith(".js") or fname.endswith(".html"):
messages.extend(get_messages_from_file(os.path.join(basepath, fname)))
return messages
def get_messages_from_file(path):
"""Returns a list of transatable strings from a code file
:param path: path of the code file
"""
apps_path = get_bench_dir()
if os.path.exists(path):
with open(path, 'r') as sourcefile:
return [(os.path.relpath(" +".join([path, str(pos)]), apps_path),
message) for pos, message in extract_messages_from_code(sourcefile.read(), path.endswith(".py"))]
else:
# print "Translate: {0} missing".format(os.path.abspath(path))
return []
def extract_messages_from_code(code, is_py=False):
"""Extracts translatable srings from a code file
:param code: code from which translatable files are to be extracted
:param is_py: include messages in triple quotes e.g. `_('''message''')`"""
try:
code = render_include(code)
except (TemplateError, ImportError, InvalidIncludePath):
# Exception will occur when it encounters John Resig's microtemplating code
pass
messages = []
messages += [(m.start(), m.groups()[0]) for m in re.compile('_\("([^"]*)"').finditer(code)]
messages += [(m.start(), m.groups()[0]) for m in re.compile("_\('([^']*)'").finditer(code)]
if is_py:
messages += [(m.start(), m.groups()[0]) for m in re.compile('_\("{3}([^"]*)"{3}.*\)').finditer(code)]
messages = [(pos, message) for pos, message in messages if is_translatable(message)]
return pos_to_line_no(messages, code)
def is_translatable(m):
if re.search("[a-zA-Z]", m) and not m.startswith("fa fa-") and not m.endswith("px") and not m.startswith("eval:"):
return True
return False
def pos_to_line_no(messages, code):
ret = []
messages = sorted(messages, key=lambda x: x[0])
newlines = [m.start() for m in re.compile('\\n').finditer(code)]
line = 1
newline_i = 0
for pos, message in messages:
while newline_i < len(newlines) and pos > newlines[newline_i]:
line+=1
newline_i+= 1
ret.append((line, message))
return ret
def read_csv_file(path):
"""Read CSV file and return as list of list
:param path: File path"""
from csv import reader
with codecs.open(path, 'r', 'utf-8') as msgfile:
data = msgfile.read()
# for japanese! #wtf
data = data.replace(chr(28), "").replace(chr(29), "")
data = reader([r.encode('utf-8') for r in data.splitlines()])
newdata = [[text_type(val, 'utf-8') for val in row] for row in data]
return newdata
def write_csv_file(path, app_messages, lang_dict):
"""Write translation CSV file.
:param path: File path, usually `[app]/translations`.
:param app_messages: Translatable strings for this app.
:param lang_dict: Full translated dict.
"""
app_messages.sort(key = lambda x: x[1])
from csv import writer
with open(path, 'wb') as msgfile:
w = writer(msgfile, lineterminator='\n')
for p, m in app_messages:
t = lang_dict.get(m, '')
# strip whitespaces
t = re.sub('{\s?([0-9]+)\s?}', "{\g<1>}", t)
w.writerow([p.encode('utf-8') if p else '', m.encode('utf-8'), t.encode('utf-8')])
def get_untranslated(lang, untranslated_file, get_all=False):
"""Returns all untranslated strings for a language and writes in a file
:param lang: Language code.
:param untranslated_file: Output file path.
:param get_all: Return all strings, translated or not."""
clear_cache()
apps = frappe.get_all_apps(True)
messages = []
untranslated = []
for app in apps:
messages.extend(get_messages_for_app(app))
messages = deduplicate_messages(messages)
def escape_newlines(s):
return (s.replace("\\\n", "|||||")
.replace("\\n", "||||")
.replace("\n", "|||"))
if get_all:
print(str(len(messages)) + " messages")
with open(untranslated_file, "w") as f:
for m in messages:
# replace \n with ||| so that internal linebreaks don't get split
f.write((escape_newlines(m[1]) + os.linesep).encode("utf-8"))
else:
full_dict = get_full_dict(lang)
for m in messages:
if not full_dict.get(m[1]):
untranslated.append(m[1])
if untranslated:
print(str(len(untranslated)) + " missing translations of " + str(len(messages)))
with open(untranslated_file, "w") as f:
for m in untranslated:
# replace \n with ||| so that internal linebreaks don't get split
f.write((escape_newlines(m) + os.linesep).encode("utf-8"))
else:
print("all translated!")
def update_translations(lang, untranslated_file, translated_file):
"""Update translations from a source and target file for a given language.
:param lang: Language code (e.g. `en`).
:param untranslated_file: File path with the messages in English.
:param translated_file: File path with messages in language to be updated."""
clear_cache()
full_dict = get_full_dict(lang)
def restore_newlines(s):
return (s.replace("|||||", "\\\n")
.replace("| | | | |", "\\\n")
.replace("||||", "\\n")
.replace("| | | |", "\\n")
.replace("|||", "\n")
.replace("| | |", "\n"))
translation_dict = {}
for key, value in zip(frappe.get_file_items(untranslated_file, ignore_empty_lines=False),
frappe.get_file_items(translated_file, ignore_empty_lines=False)):
# undo hack in get_untranslated
translation_dict[restore_newlines(key)] = restore_newlines(value)
full_dict.update(translation_dict)
for app in frappe.get_all_apps(True):
write_translations_file(app, lang, full_dict)
def import_translations(lang, path):
"""Import translations from file in standard format"""
clear_cache()
full_dict = get_full_dict(lang)
full_dict.update(get_translation_dict_from_file(path, lang, 'import'))
for app in frappe.get_all_apps(True):
write_translations_file(app, lang, full_dict)
def rebuild_all_translation_files():
"""Rebuild all translation files: `[app]/translations/[lang].csv`."""
for lang in get_all_languages():
for app in frappe.get_all_apps():
write_translations_file(app, lang)
def write_translations_file(app, lang, full_dict=None, app_messages=None):
"""Write a translation file for a given language.
:param app: `app` for which translations are to be written.
:param lang: Language code.
:param full_dict: Full translated language dict (optional).
:param app_messages: Source strings (optional).
"""
if not app_messages:
app_messages = get_messages_for_app(app)
if not app_messages:
return
tpath = frappe.get_pymodule_path(app, "translations")
frappe.create_folder(tpath)
write_csv_file(os.path.join(tpath, lang + ".csv"),
app_messages, full_dict or get_full_dict(lang))
def send_translations(translation_dict):
"""Append translated dict in `frappe.local.response`"""
if "__messages" not in frappe.local.response:
frappe.local.response["__messages"] = {}
frappe.local.response["__messages"].update(translation_dict)
def deduplicate_messages(messages):
ret = []
op = operator.itemgetter(1)
messages = sorted(messages, key=op)
for k, g in itertools.groupby(messages, op):
ret.append(next(g))
return ret
def get_bench_dir():
return os.path.join(frappe.__file__, '..', '..', '..', '..')
def rename_language(old_name, new_name):
if not frappe.db.exists('Language', new_name):
return
language_in_system_settings = frappe.db.get_single_value("System Settings", "language")
if language_in_system_settings == old_name:
frappe.db.set_value("System Settings", "System Settings", "language", new_name)
frappe.db.sql("""update `tabUser` set language=%(new_name)s where language=%(old_name)s""",
{ "old_name": old_name, "new_name": new_name })
| [
"[email protected]"
]
| |
80a7585e86a4e8633b65ccb5495c63da103934b7 | 8bbeb7b5721a9dbf40caa47a96e6961ceabb0128 | /python3/216.Combination Sum III(组合总和 III).py | 4c2a30d0cd8dad2c5c465ba3a4dfdb989f691e11 | [
"MIT"
]
| permissive | lishulongVI/leetcode | bb5b75642f69dfaec0c2ee3e06369c715125b1ba | 6731e128be0fd3c0bdfe885c1a409ac54b929597 | refs/heads/master | 2020-03-23T22:17:40.335970 | 2018-07-23T14:46:06 | 2018-07-23T14:46:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,388 | py | """
<div>
<p>Find all possible combinations of <i><b>k</b></i> numbers that add up to a number <i><b>n</b></i>, given that only numbers from 1 to 9 can be used and each combination should be a unique set of numbers.</p>
<p><strong>Note:</strong></p>
<ul>
<li>All numbers will be positive integers.</li>
<li>The solution set must not contain duplicate combinations.</li>
</ul>
<p><strong>Example 1:</strong></p>
<pre>
<strong>Input:</strong> <i><b>k</b></i> = 3, <i><b>n</b></i> = 7
<strong>Output:</strong> [[1,2,4]]
</pre>
<p><strong>Example 2:</strong></p>
<pre>
<strong>Input:</strong> <i><b>k</b></i> = 3, <i><b>n</b></i> = 9
<strong>Output:</strong> [[1,2,6], [1,3,5], [2,3,4]]
</pre>
</div>
<p>找出所有相加之和为 <em><strong>n</strong> </em>的 <strong><em>k </em></strong>个数的组合<strong><em>。</em></strong>组合中只允许含有 1 - 9 的正整数,并且每种组合中不存在重复的数字。</p>
<p><strong>说明:</strong></p>
<ul>
<li>所有数字都是正整数。</li>
<li>解集不能包含重复的组合。 </li>
</ul>
<p><strong>示例 1:</strong></p>
<pre><strong>输入:</strong> <em><strong>k</strong></em> = 3, <em><strong>n</strong></em> = 7
<strong>输出:</strong> [[1,2,4]]
</pre>
<p><strong>示例 2:</strong></p>
<pre><strong>输入:</strong> <em><strong>k</strong></em> = 3, <em><strong>n</strong></em> = 9
<strong>输出:</strong> [[1,2,6], [1,3,5], [2,3,4]]
</pre>
<p>找出所有相加之和为 <em><strong>n</strong> </em>的 <strong><em>k </em></strong>个数的组合<strong><em>。</em></strong>组合中只允许含有 1 - 9 的正整数,并且每种组合中不存在重复的数字。</p>
<p><strong>说明:</strong></p>
<ul>
<li>所有数字都是正整数。</li>
<li>解集不能包含重复的组合。 </li>
</ul>
<p><strong>示例 1:</strong></p>
<pre><strong>输入:</strong> <em><strong>k</strong></em> = 3, <em><strong>n</strong></em> = 7
<strong>输出:</strong> [[1,2,4]]
</pre>
<p><strong>示例 2:</strong></p>
<pre><strong>输入:</strong> <em><strong>k</strong></em> = 3, <em><strong>n</strong></em> = 9
<strong>输出:</strong> [[1,2,6], [1,3,5], [2,3,4]]
</pre>
"""
class Solution:
def combinationSum3(self, k, n):
"""
:type k: int
:type n: int
:rtype: List[List[int]]
"""
| [
"[email protected]"
]
| |
c5daf96e1ec9ac90dc1db252619f073fb6d4df6d | 179a0f995f5a3eb7a6005f8e96498ef21b2bf166 | /docs/conf.py | 45ccdf6d81b9baf63f859bf4fc96836c47707904 | [
"MIT"
]
| permissive | VB6Hobbyst7/pycatia | 845052a4584318bf0cf0861512203ddd337a7bca | cff309fe2b4802ff2b2c5c984f8064747f81065d | refs/heads/master | 2023-04-14T20:28:51.427101 | 2021-04-27T11:03:42 | 2021-04-27T11:03:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,936 | py | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
from unittest.mock import MagicMock
sys.path.insert(0, os.path.abspath('..'))
class Mock(MagicMock):
@classmethod
def __getattr__(cls, name):
return MagicMock()
MOCK_MODULES = ['pywin32', 'win32com.client', 'pywintypes']
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
# -- Project information -----------------------------------------------------
project = 'pycatia'
copyright = '2020, Paul Bourne'
author = 'Paul Bourne'
# The short X.Y version
version = '0.5.0'
# The full version, including alpha/beta/rc tags
release = version
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.intersphinx',
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.todo',
'sphinx_togglebutton',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'github_user': 'evereux',
'github_repo': 'pycatia',
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_theme_path = []
html_css_files = [
'css/pycatia.css',
]
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'pycatiadoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'pycatia.tex', 'pycatia Documentation',
'Paul Bourne', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pycatia', 'pycatia Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'pycatia', 'pycatia Documentation',
author, 'pycatia', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
# -- Options for todo extension ----------------------------------------------
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
| [
"[email protected]"
]
| |
5b32271db5c92ee02645aa87c392e218c743cf69 | 4e8ac215b672b333f19da87787c0d8768fee439e | /MIDI Remote Scripts/ableton/v2/control_surface/control/control.py | b050616c5b7e2facc9c92208f7c4ed9e683e3dbf | [
"MIT"
]
| permissive | aarkwright/ableton_devices | 593f47293c673aa56f6e0347ca6444b7fce2812a | fe5df3bbd64ccbc136bba722ba1e131a02969798 | refs/heads/master | 2020-07-02T08:11:21.137438 | 2019-08-09T13:48:06 | 2019-08-09T13:48:06 | 201,467,890 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,384 | py | # uncompyle6 version 3.3.5
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.7.3 (default, Apr 24 2019, 15:29:51) [MSC v.1915 64 bit (AMD64)]
# Embedded file name: c:\Jenkins\live\output\win_64_static\Release\python-bundle\MIDI Remote Scripts\ableton\v2\control_surface\control\control.py
# Compiled at: 2019-05-15 03:17:58
from __future__ import absolute_import, print_function, unicode_literals
from functools import partial
from ...base import lazy_attribute, mixin, nop, task, Disconnectable, EventObject, NamedTuple
__all__ = ('Control', 'InputControl', 'ControlManager', 'control_event', 'control_color',
'Connectable')
class ControlManager(EventObject):
"""
Base class needed to define Controls. The Control Manager stores the state of the
Controls.
"""
def __init__(self, *a, **k):
super(ControlManager, self).__init__(*a, **k)
self._control_states = dict()
def add_control(self, name, control):
"""
Dynamically adds a Control to the object. The Control will be added to the object
as an attribute with the given `name`.
"""
if hasattr(self, name):
raise AttributeError(b'Control would overwrite an existing property')
control_state = control._get_state(self)
setattr(self, name, control_state)
return control_state
@lazy_attribute
def _tasks(self):
"""
Task Group for Controls for time-based events and feedback.
"""
return task.TaskGroup()
def control_notifications_enabled(self):
"""
Override to enable/disable triggering events for all Controls in this
Control Manager.
"""
return True
def update(self):
"""
Sends the current feedback to all Control Elements that are connected to Controls
of this Control Manager.
"""
for control_state in self._control_states.values():
control_state.update()
def control_event(event_name):
"""
Defines an event of a Control. The event can be used in two ways:
* As a function-decorator on a class level
* By assigning a callable to the event
Only one listener can be connected with an event.
Events need to be defined on a Control class-level.
"""
def event_decorator(self):
def event_listener_decorator(event_listener):
assert event_listener not in self._event_listeners
self._event_listeners[event_name] = event_listener
return self
return event_listener_decorator
def event_setter(self, event_listener):
self._event_listeners[event_name] = event_listener
return property(event_decorator, event_setter)
class control_color(object):
"""
Defines a color of a Control. The color is created with a default color and will
be update the Control every time a new color is set.
Colors need to be defined on Control-state level.
"""
def __init__(self, default_color, *a, **k):
super(control_color, self).__init__(*a, **k)
self.default_color = default_color
def __get__(self, obj, owner):
if obj is None or self not in obj._colors:
return self.default_color
else:
return obj._colors[self]
def __set__(self, obj, val):
obj._colors[self] = val
obj._send_current_color()
class Control(object):
"""
Base class for all Controls. Controls are used to define a high level interface for
low level Control Elements. They add a useful set of functionality to it:
* Well defined and descriptive events that compensate for inconsistencies of
the received MIDI.
* Logic and state common in other UI frameworks, like an enabled state to deactivate
the Control under certain circumstances.
* Feedback to represents different states of the Control.
Controls are a virtual representation of a relation between a hardware control and
a piece of logic. A Control needs to be connected with a Control Element to be
functional. The Control Element is connected and disconnected by using
:meth:`Control.State.set_control_element`. The user of a Control does not need to
care a Control Element is currently connected, which makes working with Controls
much less error-prone than working with Control Elements directly.
Controls are a Descriptor on a class level, so listeners can be easily defined using
decorators. Events are defined using :func:`control_event`. Classes using Controls
need to inherit from :class:`ControlManager`.
The Control needs an actual stateful representation, that instantiated for each
instance of the class implementing it. This is defined in the inner State-class.
"""
class State(EventObject):
"""
State-full representation of the Control.
"""
enabled = True
def __init__(self, control=None, manager=None, *a, **k):
super(Control.State, self).__init__(*a, **k)
assert control is not None
assert manager is not None
self._colors = dict()
self._manager = manager
self._event_listeners = control._event_listeners
self._control_element = None
self._has_tasks = False
manager.register_disconnectable(self)
return
def disconnect(self):
super(Control.State, self).disconnect()
if self._has_tasks:
self.tasks.kill()
self.tasks.clear()
@lazy_attribute
def tasks(self):
"""
Returns a Task Group for this Control. The Task Group is created the first
time the property is accessed.
"""
self._has_tasks = True
return self._manager._tasks.add(task.TaskGroup())
def set_control_element(self, control_element):
"""
Connect a Control with a Control Element or disconnect the Control if
None is passed. When connecting, the Control Element is reset and the
Controls current color is sent. When disconnecting, the Control Element
needs to be updates by its new owner.
"""
self._control_element = control_element
if self._control_element:
self._control_element.reset_state()
def _call_listener(self, listener_name, *args):
listener = self._event_listeners.get(listener_name, None)
if listener is not None and self._notifications_enabled():
args = args + (self,)
listener(self._manager, *args)
return
def _has_listener(self, listener_name):
return listener_name in self._event_listeners
def _event_listener_required(self):
return len(self._event_listeners) > 0
def _notifications_enabled(self):
return self.enabled and self._manager.control_notifications_enabled()
def update(self):
pass
def _send_current_color(self):
pass
_extra_kws = {}
_extra_args = []
def __init__(self, extra_args=None, extra_kws=None, *a, **k):
super(Control, self).__init__(*a, **k)
self._event_listeners = {}
if extra_args is not None:
self._extra_args = extra_args
if extra_kws is not None:
self._extra_kws = extra_kws
return
def __get__(self, manager, owner):
if manager is not None:
return self._get_state(manager)
else:
return self
def __set__(self, manager, owner):
raise RuntimeError(b'Cannot change control.')
def _make_control_state(self, manager):
return self.State(control=self, manager=manager, *self._extra_args, **self._extra_kws)
def _get_state(self, manager, state_factory=None):
if self not in manager._control_states:
if state_factory is None:
state_factory = self._make_control_state
manager._control_states[self] = None
manager._control_states[self] = state_factory(manager)
if manager._control_states[self] is None:
raise RuntimeError(b'Cannot fetch state during construction of controls.')
return manager._control_states[self]
def _clear_state(self, manager):
if self in manager._control_states:
del manager._control_states[self]
class InputControl(Control):
"""
Base Class for Controls that react to a MIDI value event.
"""
value = control_event(b'value')
class State(Control.State):
"""
State-full representation of the Control.
"""
def __init__(self, control=None, channel=None, identifier=None, *a, **k):
super(InputControl.State, self).__init__(control=control, *a, **k)
self._value_slot = None
self._channel = channel
self._identifier = identifier
self._register_value_slot(self._manager, control)
self._manager.register_disconnectable(self)
return
def set_control_element(self, control_element):
"""
Connects the Control to the value-event of the Control Element and sets the
defined :attr:`channel` and :attr:`identifier`.
"""
super(InputControl.State, self).set_control_element(control_element)
if self._value_slot:
self._value_slot.subject = control_element
if self._control_element:
if self._channel is not None:
self._control_element.set_channel(self._channel)
if self._identifier is not None:
self._control_element.set_identifier(self._identifier)
return
def _register_value_slot(self, manager, control):
if self._event_listener_required():
self._value_slot = self.register_slot(None, self._on_value, b'value')
return
def _on_value(self, value, *a, **k):
self._call_listener(b'value', value)
@property
def channel(self):
"""
Translates the channel of the received MIDI when sent to Live.
"""
return self._channel
@channel.setter
def channel(self, channel):
self._channel = channel
if self._control_element:
self._control_element.set_channel(self._channel)
@property
def identifier(self):
"""
Translates the identifier of the received MIDI when sent to Live.
"""
return self._identifier
@identifier.setter
def identifier(self, value):
self._identifier = value
if self._control_element:
self._control_element.set_identifier(self._identifier)
class ProxyControl(object):
"""
Control that has its own event listeners, but forwards everything else from the
proxied control. This way, a derived class can forward the control of its base class.
"""
def __init__(self, control=None, *a, **k):
super(ProxyControl, self).__init__(*a, **k)
self._control = control
assert not self._control._event_listeners, b'Cannot forward control that already has events.'
def _make_control_state(self, manager):
"""
Pass the proxy control to the state, as this one includes the event handlers
"""
return self._control.State(control=self, manager=manager, *self._control._extra_args, **self._control._extra_kws)
def _get_state(self, manager, state_factory=None):
return self._control._get_state(manager, self._make_control_state)
def _clear_state(self, manager):
self._control._clear_state(manager)
def forward_control(control):
return mixin(ProxyControl, control.__class__)(control)
class NullSlot(Disconnectable):
pass
class Connectable(EventObject):
"""
Mixin for connecting a property with a control.
"""
requires_listenable_connected_property = False
def __init__(self, *a, **k):
super(Connectable, self).__init__(*a, **k)
self._connection = self._make_empty_connection()
def connect_property(self, subject, property_name, transform=nop):
"""
Create a bidirectional connection between a property and a Control.
The `subject` is the host of the property with the given name.
The connected property needs to be listenable in case
:attr:`requires_listenable_connected_property` is set to True.
If a Control is a Connectable, it has certain expectations on the connected
property.
The transform argument can be used to transform the Controls value to the
expected value of the property.
Only one property can be connected at a time.
"""
assert subject is not None
self.disconnect_property()
self._connection = NamedTuple(slot=self._register_property_slot(subject, property_name), getter=partial(getattr, subject, property_name), setter=partial(setattr, subject, property_name), transform=transform)
return
def disconnect_property(self):
"""
Disconnects a property that has been connected with :meth:`connect_property`.
"""
self._connection.slot.disconnect()
self._connection = self._make_empty_connection()
def _make_empty_connection(self):
return NamedTuple(slot=NullSlot(), getter=nop, setter=nop, transform=nop)
def _register_property_slot(self, subject, property_name):
if self.requires_listenable_connected_property:
return self.register_slot(subject, self.on_connected_property_changed, property_name)
else:
return NullSlot()
@property
def connected_property_value(self):
"""
Get/set the property connected with :meth:`connect_property`
"""
return self._connection.getter()
@connected_property_value.setter
def connected_property_value(self, value):
self._connection.setter(self._connection.transform(value))
def on_connected_property_changed(self, value):
"""
Called if the connected property changes.
Has no effect if :attr:`requires_listenable_connected_property` is set to False.
"""
pass
class SendValueMixin(object):
def __init__(self, *a, **k):
super(SendValueMixin, self).__init__(*a, **k)
self._value = 0
@property
def value(self):
return self._value
@value.setter
def value(self, value):
if self._value != value:
self._value = value
self._send_current_value()
def set_control_element(self, control_element):
super(SendValueMixin, self).set_control_element(control_element)
self._send_current_value()
def update(self):
super(SendValueMixin, self).update()
self._send_current_value()
def _send_current_value(self):
if self._control_element:
self._control_element.send_value(self._value)
class SendValueControl(Control):
class State(SendValueMixin, Control.State):
pass | [
"[email protected]"
]
| |
8c1605776199c122465a2aa10d3dade49beec409 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02748/s229745856.py | 07e6ff9b4f694f31ed7e0dfb26750d3f2b624a60 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 232 | py | _, _, M = map(int, input().split())
A = list(map(int, input().split()))
B = list(map(int, input().split()))
xyc = [tuple(map(int, input().split())) for i in range(M)]
print(min([min(A)+min(B)]+[A[x-1]+B[y-1]-c for x, y, c in xyc]))
| [
"[email protected]"
]
| |
572938151b792f0f6e8e2bb10d5c6bd6a452af48 | e5504d8c4880993b82d5583a11c5cc4623e0eac2 | /Arrays/loopInCircularArray__IMP.py | 768b8bdc06fdc4ba7a36f6287179bf3b4b92d756 | []
| no_license | noorulameenkm/DataStructuresAlgorithms | e5f87f426fc444d18f830e48569d2a7a50f5d7e0 | 7c3bb89326d2898f9e98590ceb8ee5fd7b3196f0 | refs/heads/master | 2023-06-08T19:29:42.507761 | 2023-05-28T16:20:19 | 2023-05-28T16:20:19 | 219,270,731 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,536 | py | def circular_array_loop_exists(arr):
# TODO: Write your code here
for i in range(len(arr)):
slow = fast = i
is_forward = arr[i] >= 0
# if slow or fast becomes '-1' this means we can't find cycle for this number
while True:
# move one step for slow pointer
slow = get_next_index(arr, is_forward, slow)
# move one step for fast pointer
fast = get_next_index(arr, is_forward, fast)
if fast != -1:
# move another step for fast pointer
fast = get_next_index(arr, is_forward, fast)
if slow == -1 or fast == -1 or slow == fast:
break
if slow != -1 and slow == fast:
return True
return False
def get_next_index(arr, is_forward, current_index):
direction = arr[current_index] >= 0
if is_forward != direction:
return -1 # change in direction, return -1
next_index = (arr[current_index] + current_index) % len(arr)
# one element cycle, return -1
if next_index == current_index:
next_index = -1
return next_index
def main():
print(circular_array_loop_exists([1, 2, -1, 2, 2]))
print(circular_array_loop_exists([2, 2, -1, 2]))
print(circular_array_loop_exists([2, 1, -1, -2]))
main()
""""
We are given an array containing positive and negative numbers. Suppose the array contains a number ‘M’ at a particular index. Now,
if ‘M’ is positive we will move forward ‘M’ indices and if ‘M’ is negative move backwards ‘M’ indices.
You should assume that the array is circular which means two things:
If, while moving forward, we reach the end of the array, we will jump to the first element to continue the movement.
If, while moving backward, we reach the beginning of the array, we will jump to the last element to continue the movement.
Write a method to determine if the array has a cycle. The cycle should have more than one element and should follow one direction
which means the cycle should not contain both forward and backward movements.
"""
""""
Alternate Method
In our algorithm, we don’t keep a record of all the numbers that have been evaluated for cycles.
We know that all such numbers will not produce a cycle for any other instance as well.
If we can remember all the numbers that have been visited, our algorithm will improve to O(N) as,
then, each number will be evaluated for cycles only once. We can keep track of this by creating a separate
array however the space complexity of our algorithm will increase to O(N).
"""
| [
"[email protected]"
]
| |
3aefb338c74473c31e9b8b9f5b57d93c9d53d0e5 | 5f957add3e3f7a1885d4f1b106de72e93c8fcb1a | /ExerciciosPython/ex072.py | a2215342cdcfa208efd442734fd5f94405993530 | [
"MIT"
]
| permissive | mpatrickaires/curso-python | 6e32cf785a3bc0076bb3ea24cd6d896604f4e774 | aba023648527d53bfe18833b91210a7e528a84d7 | refs/heads/main | 2022-12-27T00:57:07.467940 | 2020-10-14T00:48:09 | 2020-10-14T00:48:09 | 302,203,176 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 698 | py | extenso = ('zero', 'um', 'dois', 'três', 'quatro', 'cinco', 'seis', 'sete', 'oito', 'nove', 'dez', 'onze',
'doze', 'treze', 'catorze', 'quinze', 'dezesseis', 'dezessete', 'dezoito', 'dezenove', 'vinte')
while True:
numero = int(input('Digite um número entre 0 e 20: '))
while numero < 0 or numero > 20:
numero = int(input('Tente novamente. Digite um número entre 0 e 20: '))
print(f'Você digitou o número {extenso[numero]}')
continuar = str(input('Deseja continuar? [S/N] ')).strip().upper()
while continuar != 'S' and continuar != 'N':
continuar = str(input('Deseja continuar? [S/N] ')).strip().upper()
if continuar == 'N':
break
| [
"[email protected]"
]
| |
bb474b239a91648af44dd841343b2853899bbb38 | da38287bf935ce68321f63f17c24433384312786 | /generate&inference no texons/inference_demo_v1.2.py | 51c6887b269101830e3f04d67e3685f1a59fe4bb | []
| no_license | ningtangla/sceneParsing | 08eb5e58bceba5171e2b60a63e3b30661428e2c3 | 5b425e4203d725ac628c6b43bf1d5fa889eae7e0 | refs/heads/master | 2022-12-25T09:21:53.192613 | 2019-06-18T00:48:19 | 2019-06-18T00:48:19 | 192,429,153 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,323 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Apr 24 13:52:00 2017
@author: Edward Coen
"""
from __future__ import division
from scipy.misc import comb
import math
import scipy.stats
import numpy as np
import networkx as nx
from networkx.algorithms.traversal.depth_first_search import dfs_successors
import itertools as it
import operator as op
import pandas as pd
import cv2
import sys
import itertools
sys.setrecursionlimit(100000000)
"""
enumerate_tree parameter
"""
TREE_NUM = 3
"""
ncrp parameters
"""
GAMMA = 1
ALL_GUEST_NUM = 6
"""
image parameters
"""
IMAGE_WIDTH = 1024
IMAGE_HEIGHT = 768
COLOR_SPACE = [[128,128,128],[0,0,255],[0,255,0],[255,0,0],[0,255,255],[255,0,255],[255,255,0],[255,255,255]]
GRAPH_WIDTH = 500
"""
Dirchlet parmeters
"""
ALPHA_BASE = [20]
"""
code parameters
"""
"""
global arguements
"""
CODE_BASE = 10 #decimal
class decoder():
def __init__(self, code_base):
self.code_base = code_base
def decode_princeple(self, all_guest_num):
curr_table_guest_num = self.code_tree[0]
self.code_tree = self.code_tree[1:]
self.curr_guest_num = self.curr_guest_num + int(curr_table_guest_num)
self.table_partion_list[len(self.table_partion_list) - 1].append(int(curr_table_guest_num))
if int(curr_table_guest_num) != 1:
self.all_guest_num_list.append(int(curr_table_guest_num))
if self.curr_guest_num == all_guest_num:
self.table_partion_list.append([])
self.curr_guest_num = 0
return
else:
return self.decode_princeple(all_guest_num)
def make_decode_list(self, code_tree):
self.code_tree = code_tree
self.table_partion_list = [[]]
self.all_guest_num_list = [ALL_GUEST_NUM]
self.curr_guest_num = 0
map(self.decode_princeple, self.all_guest_num_list)
del self.table_partion_list[-1]
self.all_table_partion_list.append(self.table_partion_list)
def __call__(self):
self.code_tree_list = list(pd.read_csv('E:/ncrp_generate/tree_kind_num_' + str(ALL_GUEST_NUM) + '.csv')['tree_code'])
self.code_tree_list = map(str, self.code_tree_list)
self.all_table_partion_list = []
map(self.make_decode_list, self.code_tree_list)
return self.all_table_partion_list
class prior():
def __init__(self, all_table_partion_list):
self.all_table_partion_list = all_table_partion_list
def cal_renomalize_parameter(self, table_partion):
return 1/(1 - 1/np.array(table_partion).sum())
def cal_probability_table_partion(self, table_partion):
return reduce(op.mul, map(math.factorial, (np.array(table_partion) - 1)))/math.factorial(np.array(table_partion).sum())
def cal_permutation_table_partion(self, table_partion):
return list(set(list(itertools.permutations(table_partion))))
def cal_all_combination_guest(self, permutation_table_partion):
return reduce(op.add, map(self.cal_permutation_combination_guest, permutation_table_partion))
def cal_permutation_combination_guest(self, table_partion):
self.guest_left = np.array(table_partion).sum()
return reduce(op.mul, map(self.cal_combination_guest, table_partion))
def cal_combination_guest(self, table_guest_num):
combination_num = round(comb(self.guest_left - 1, table_guest_num - 1))
self.guest_left = self.guest_left - table_guest_num
return combination_num
def cal_prior_probability(self, table_partion_list):
probability_table_partion = map(self.cal_probability_table_partion, table_partion_list[1:])
permutation_table_partion = map(self.cal_permutation_table_partion, table_partion_list[1:])
all_combination_guest = map(self.cal_all_combination_guest, permutation_table_partion)
renomalize_parameter = map(self.cal_renomalize_parameter, table_partion_list[1:])
# print reduce(op.mul, np.array(probability_table_partion)*np.array(all_combination_guest)*np.array(renomalize_parameter))
return reduce(op.mul, np.array(probability_table_partion)*np.array(all_combination_guest)*np.array(renomalize_parameter))
def __call__(self):
return map(self.cal_prior_probability, self.all_table_partion_list)
class likelihood():
def __init__(self, all_table_partion_list, color_space, alpha_base):
self.all_table_partion_list = all_table_partion_list
self.alpha_base = alpha_base
self.color_space = color_space
def find_all_vertex(self, color_space):
self.all_vertex_list = []
map(self.find_vertex, color_space)
def find_vertex(self, color):
lower = np.array(color, dtype = "uint8")
upper = np.array(color, dtype = "uint8")
mask = cv2.inRange(self.img, lower, upper)
index = np.argwhere(mask == 255)
if len(index) != 0:
self.all_vertex_list.extend([index.min(axis = 0), index.max(axis = 0)])
def detect_cut_point_list(self, vertex):
cut_point_list = []
cut_propotion_list = []
new_vertex_list = []
if len(vertex) != 0:
min_x, min_y = np.array(vertex).min(axis = 0)
max_x, max_y = np.array(vertex).max(axis = 0)
all_vertex_array = np.array(self.all_vertex_list)
x_vertex_array = all_vertex_array[:, 1]
y_vertex_array = all_vertex_array[:, 0]
inner_vertex_array = all_vertex_array[(np.where((x_vertex_array >= min_x) & (x_vertex_array <= max_x))) or (np.where((y_vertex_array >= min_y) & (y_vertex_array <= max_y)))]
inner_vertex_list = map(tuple, inner_vertex_array)
inner_vertex_array = np.array(list(set(inner_vertex_list).difference(set([(min_y, min_x), (max_y, max_x)]))))
if len(inner_vertex_array) == 0:
vertx_array_y = []
vertx_array_x = []
else:
inner_x_vertex_array = inner_vertex_array[:, 1]
inner_y_vertex_array = inner_vertex_array[:, 0]
# print inner_x_vertex_array, inner_y_vertex_array
x_min_vertex_array_y = inner_vertex_array[np.where(np.abs(inner_x_vertex_array - min_x) < 3)][:, 0]
x_max_vertex_array_y = inner_vertex_array[np.where(np.abs(inner_x_vertex_array - max_x) < 3)][:, 0] + 1
y_min_vertex_array_x = inner_vertex_array[np.where(np.abs(inner_y_vertex_array - min_y) < 3)][:, 1]
y_max_vertex_array_x = inner_vertex_array[np.where(np.abs(inner_y_vertex_array - max_y) < 3)][:, 1] + 1
# print '&&&'
# print x_min_vertex_array_y, x_max_vertex_array_y, y_min_vertex_array_x, y_max_vertex_array_x,
vertx_array_y = np.intersect1d(x_min_vertex_array_y, x_max_vertex_array_y)
vertx_array_x = np.intersect1d(y_min_vertex_array_x, y_max_vertex_array_x)
# print(vertx_array_y)
# print(vertx_array_x)
if (len(vertx_array_y) !=0) or (len(vertx_array_x) !=0):
if len(vertx_array_y) == 0 :
min_vertex = min_x
max_vertex = max_x
cut_point_list = list(vertx_array_x)
cut_point_list.sort()
# print '!!!'
# print cut_point_list
new_vertex_x = map(lambda x: [cut_point_list[x], cut_point_list[x + 1] - 1], list(range(len(cut_point_list) - 1)))
new_vertex_x.insert(0, [min_x, cut_point_list[0] - 1])
new_vertex_x.append([cut_point_list[-1], max_x])
new_vertex_y = [[min_y, max_y]] * len(new_vertex_x)
else:
min_vertex = min_y
max_vertex = max_y
cut_point_list = list(vertx_array_y)
# print '!!!'
# print cut_point_list
cut_point_list.sort()
new_vertex_y = map(lambda x: [cut_point_list[x], cut_point_list[x + 1] - 1], list(range(len(cut_point_list) - 1)))
new_vertex_y.insert(0, [min_y, cut_point_list[0] -1])
new_vertex_y.append([cut_point_list[-1], max_y])
new_vertex_x = [[min_x, max_x]] * len(new_vertex_y)
new_vertex_list = map(zip, new_vertex_x, new_vertex_y)
propotion_list = list((np.array(cut_point_list)-min_vertex)/((max_vertex - min_vertex)*1.0))
cut_propotion_list = map(lambda x: propotion_list[x+1] - propotion_list[x], range(len(propotion_list) - 1))
cut_propotion_list.insert(0, propotion_list[0] - 0)
cut_propotion_list.append(1 - propotion_list[-1])
# else:
# cut_point_list = []
# cut_propotion_list = []
# new_vertex_list = []
# print 'ttt', cut_point_list, cut_propotion_list, new_vertex_list
return cut_point_list, cut_propotion_list, new_vertex_list
def cal_p_dirichlet(self, cut_propotion):
alpha = self.alpha_base * len(cut_propotion)
return scipy.stats.dirichlet.pdf(cut_propotion, alpha)
def cal_p_table_partion(self, curr_depth_table_partion):
self.curr_depth_table_partion = curr_depth_table_partion
self.flattend_curr_depth_table_partion = list(np.array(self.curr_depth_table_partion).flatten())
if self.flattend_curr_depth_table_partion.count(1) != len(self.flattend_curr_depth_table_partion):
self.next_depth_table_partion = map(lambda x: self.table_partion_list[x], np.array(range(len(self.flattend_curr_depth_table_partion) - self.flattend_curr_depth_table_partion.count(1))) + self.x + 1)
self.x = self.x + len(self.flattend_curr_depth_table_partion) - self.flattend_curr_depth_table_partion.count(1)
self.flattend_next_depth_table_partion = list(np.array(self.next_depth_table_partion).flatten())
print self.next_depth_table_partion
print self.permutation_table_partion_index
self.next_depth_index = map(lambda x: map(lambda y: x.index(y), np.array(range(len(self.next_depth_table_partion))) + self.flattend_curr_depth_table_partion.count(1)), self.permutation_table_partion_index)
print self.next_depth_index
self.next_depth_table_partion_index = map(lambda x: map(lambda y: np.array(x).argsort()[y], range(len(self.next_depth_table_partion))), self.next_depth_index)
self.next_depth_index_num = [0]
self.next_depth_index_num.extend(map(len, self.next_depth_table_partion))
self.next_depth_permutation_index_base = map(lambda x: map(lambda y: np.array(range(len(self.next_depth_table_partion[x[y]]))) + self.next_depth_index_num[x[y]], range(len(self.next_depth_table_partion))), self.next_depth_table_partion_index)
self.next_depth_permutation_table_partion_base = map(lambda x: map(lambda y: self.next_depth_table_partion[x[y]], range(len(self.next_depth_table_partion))), self.next_depth_table_partion_index)
self.next_depth_permutation_index_before_product = map(lambda x: map(list,(map(itertools.permutations, x))), self.next_depth_permutation_index_base)
self.next_depth_permutation_index_after_product = map(lambda x: list(itertools.product(*x)), self.next_depth_permutation_index_before_product)
self.next_depth_permutation_table_partion_before_product = map(lambda x: map(list,(map(itertools.permutations, x))), self.next_depth_permutation_table_partion_base)
self.next_depth_permutation_table_partion_after_product = map(lambda x: list(itertools.product(*x)), self.next_depth_permutation_table_partion_before_product)
# print '###'
# print self.next_depth_index, self.next_depth_table_partion, self.next_depth_table_partion_index, self.next_depth_permutation_index_base
# print '***'
# print self.next_depth_permutation_index_before_product, self.next_depth_permutation_index_after_product
# print self.next_depth_permutation_table_partion_before_product, self.next_depth_permutation_table_partion_after_product
# print self.next_depth_all_vertex, self.next_depth_all_vertex[0], self.next_depth_index
self.next_depth_vertex = map(lambda x: map(lambda y: map(lambda z: self.next_depth_all_vertex[x][y][z], self.next_depth_index[x]), range(len(self.next_depth_all_vertex[0]))), range(len(self.next_depth_all_vertex)))
self.next_depth_cut = map(lambda x: map(lambda y: map(lambda z: map(self.detect_cut_point_list, self.next_depth_vertex[x][y][z]), range(len(self.next_depth_vertex[0][0]))), range(len(self.next_depth_vertex[0]))), range(len(self.next_depth_vertex)))
self.next_depth_cut_point_list = np.array(self.next_depth_cut)[:,:,:,:,0]
self.next_depth_cut_propotion_list = np.array(self.next_depth_cut)[:,:,:,:,1]
self.next_depth_new_vertex_list = np.array(self.next_depth_cut)[:,:,:,:,2]
# self.next_depth_permutation_index_base = map(lambda x: map(lambda y: np.array(x).argsort()[y], range(len(self.next_depth_table_partion))), self.next_depth_table_partion_index)
# self.seperate_permutation_index =
print '!!!', self.next_depth_vertex, len(self.next_depth_cut_point_list[0][0][0][0])
self.next_result_list = map(lambda (x, y, z): self.cal_cut_point_and_corresponding_table_partion2(x, y, z), list(itertools.product(range(len(self.next_depth_permutation_table_partion_after_product)), range(len(self.next_depth_cut_point_list[0])), range(len(self.next_depth_permutation_table_partion_after_product[0])))))
print '***'
print self.next_result_list
self.p_list = np.array(self.next_result_list)[:,:,:,0]
# print '###', self.p_list
self.next_depth_all_vertex = np.array(self.next_result_list)[:,:,:,1]
print '###', self.next_depth_all_vertex
z = map(lambda x: map(lambda y: list(np.array(x[y]).flatten()), range(len(x))), self.next_depth_permutation_index_after_product)[0]
print len(z)
self.permutation_table_partion_index = z
self.cal_p_table_partion(self.next_depth_table_partion)
return
else:
return self.p_list
# def get_part(self, target_from_index, target_at_index):
# target_list = self.p_list[target_from_index]
# return map(lambda c: target_list[c][target_at_index], range(len(target_list)))
#
# def filter_p_positive(self, target):
# return filter(lambda x: x>0, target)
def cal_cut_point_and_corresponding_table_partion2(self, x, y, z):
print x,y,z
table_partion = list(self.next_depth_permutation_table_partion_after_product[x][z])
cut_point_list = self.next_depth_cut_point_list[x][y]
cut_propotion_list = self.next_depth_cut_propotion_list[x][y]
new_vertex_list = self.next_depth_new_vertex_list[x][y]
print table_partion, cut_point_list, cut_propotion_list, new_vertex_list
print self.next_depth_index
self.t = map(lambda x: map(lambda y: self.cal_cut_point_and_corresponding_table_partion3(table_partion[x], cut_point_list[x][y], cut_propotion_list[x][y], new_vertex_list[x][y]), range(len(self.next_depth_index))), range(len(table_partion)))
# self.p = map(lambda x, y )
tt = map(lambda x: self.flatten(x), range(len(self.t)))
print tt
return tt
def flatten(self, t_index):
if t_index == 0:
self.p = map(lambda x: self.t[t_index][x][0], range(len(self.t[t_index])))
print self.p
self.ne_vertex = map(lambda x: self.t[t_index][x][1], range(len(self.t[t_index])))
else:
self.pp = [[0]] * len(self.p) * len(self.t[t_index])
self.nne_vertex = [[0]] * len(self.p) * len(self.t[t_index])
map(lambda (x, y) : self.new_assign_value(x, y, t_index), list(itertools.product(range(len(self.t[t_index])), range(len(self.p)))))
# self.ne_vertex = map(lambda x: map(lambda y: self.ne_vertex[y].append(self.t[t_index][x][1]), range(len(self.ne_vertex))), range(len(self.t[t_index])))
self.p = self.pp
self.ne_vertex = self.nne_vertex
self.result = map(lambda x: [self.p[x], self.nne_vertex[x]], range(len(self.p)))
return self.result
def new_assign_value(self, x, y, t_index):
self.pp[x * len(self.p) + y] = [self.p[y][0] * self.t[t_index][x][0][0]]
if self.pp[x * len(self.p) + y][0] == 0:
self.nne_vertex[x * len(self.p) + y] = [[]]
else:
self.nne_vertex[x * len(self.p) + y] = [self.ne_vertex[y], self.t[t_index][x][1]]
def cal_cut_point_and_corresponding_table_partion3(self, table_partion, cut_point_list, cut_propotion_list, new_vertex_list):
print table_partion, cut_point_list, new_vertex_list
self.table_partion = table_partion
self.cut_point_list = cut_point_list
self.cut_propotion_list = cut_propotion_list
self.new_vertex_list = new_vertex_list
if (len(self.table_partion) - 1) > len(self.cut_point_list):
self.result = [[[0], []]]
else:
self.combination_index = list(itertools.combinations(range(len(self.cut_point_list)), (len(self.table_partion) - 1)))
# print self.cut_point_list, self.new_vertex_list
# print table_partion
# print self.combination_index
# self.combination_index_list = [self.combination_index] * len(self.permutation_table_partion)
# print self.combination_index_list
self.result = map(self.cal_p_cut_propotion, self.combination_index)
return self.result
def cal_cut_point_and_corresponding_table_partion(self, table_partion):
if (len(table_partion) - 1) > len(self.cut_point_list):
self.result = [[[0], []]]
else:
self.combination_index = list(itertools.combinations(range(len(self.cut_point_list)), (len(self.table_partion) - 1)))
# print self.cut_point_list, self.new_vertex_list
# print table_partion
# print self.combination_index
# self.combination_index_list = [self.combination_index] * len(self.permutation_table_partion)
# print self.combination_index_list
self.result = map(self.cal_p_cut_propotion, self.combination_index)
return self.result
# def cal_next_level_table_partion(self, table_partion_index)
def cal_p_cut_propotion(self, index):
# cut_point = map(lambda x:self.cut_point_list[x], list(index))
# print index
new_vertex = self.cal_new_vertex(index)
# print new_vertex
cut_propotion = self.cal_cut_propotion(index)
# print cut_propotion
# next_cut = np.array(map(self.detect_cut_point_list, new_vertex))
# next_cut_point_num = map(len, list(next_cut[:, 0]))
# print self.table_partion, next_cut_point_num
# diff = map(lambda(x, y): x - y, zip(self.table_partion, next_cut_point_num))
# if len(filter(lambda x: x <= 0, diff)) > 0:
# return [[0], [[]]]
# else:
return [[self.cal_p_dirichlet(cut_propotion)], new_vertex]
# def cal_combination_cut_point(self, cut_point_index):
# return map(lambda x:self.cut_point_list[x], cut_point_index)
#
def cal_permutation_table_partion(self, table_partion_index):
return map(lambda x:self.table_partion[x], table_partion_index)
def cal_cut_propotion(self, propotion_index):
if len(propotion_index) == (len(self.cut_propotion_list) - 1):
cut_propotion = self.cut_propotion_list
else:
cut_propotion = map(lambda x: np.array(self.cut_propotion_list)[propotion_index[x] + 1:propotion_index[x + 1] + 1].sum(), range(len(propotion_index) - 1))
cut_propotion.insert(0, np.array(self.cut_propotion_list)[0:propotion_index[0] + 1].sum())
cut_propotion.append(1 - np.array(self.cut_propotion_list)[0:propotion_index[-1] + 1].sum())
return cut_propotion
def cal_new_vertex(self, vertex_index):
print vertex_index
if len(vertex_index) == (len(self.new_vertex_list) - 1):
new_vertex = self.new_vertex_list
else:
new_vertex = map(lambda x: [self.new_vertex_list[vertex_index[x] + 1][0], self.new_vertex_list[vertex_index[x + 1]][1]], range(len(vertex_index) - 1))
new_vertex.insert(0, [self.new_vertex_list[0][0], self.new_vertex_list[vertex_index[0]][1]])
new_vertex.append([self.new_vertex_list[vertex_index[-1] + 1][0], self.new_vertex_list[-1][1]])
return new_vertex
def cal_p_one_table_partion_list(self, table_partion_list):
self.c = self.c + 1
print '***'
print self.c
self.table_partion_list = table_partion_list
self.vertex = [[(0, 0), (1023, 767)]]
self.p_list = [[[[1]]]]
self.next_depth_all_vertex = [[[[[(0, 0), (1023, 767)]]]]]
self.permutation_table_partion_index = [[[[0]]]]
self.x = 0
self.all_vertex_list = []
map(self.find_vertex, self.color_space)
p = self.cal_p_table_partion(self.table_partion_list[self.x])
# print p
return p
def cal_likelihood(self):
self.c = 0
map(self.cal_p_one_table_partion_list, self.all_table_partion_list[4:6])
def __call__(self, img_num):
self.img = cv2.imread('E:/ncrp/'+str(img_num)+'.png')
self.cal_likelihood()
if img_num == 3:
return
else:
return self.__call__(img_num - 1)
def main():
Decode_list = decoder(code_base = CODE_BASE)
All_Table_Partion_List = Decode_list()
Prior = prior(All_Table_Partion_List)
Prior_Probability = Prior()
Likelihood = likelihood(All_Table_Partion_List, COLOR_SPACE, ALPHA_BASE)
Likelihood_Probability = Likelihood(img_num = TREE_NUM )
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
1114a68d8b2e5c4fd05992b6c8ee4ca498cc92af | 755e4e6e966433fe887f0f28f14916696b1588d7 | /code/exceptions/exceptions.py | 7e62995995ecbc57b12ce62c9ad0de8d73a94b9e | []
| no_license | phildue/FingerspellingRecognition | f18518a6e2e29b769d131e5b54846f00213f3ff1 | 1b5236142734d7b50f0f4161ecc533b7d10347b8 | refs/heads/master | 2021-03-24T10:40:24.507766 | 2017-07-03T09:33:09 | 2017-07-03T09:33:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 167 | py | class NotTrained(Exception):
pass
class NoRoiFound(Exception):
pass
class NoContoursFound(Exception):
pass
class DescriptorFailed(Exception):
pass | [
"[email protected]"
]
| |
d1684f57fb28491ecde85c741f45fcd4e4659cf8 | ed9e4027cbd76fbac19598163b9673628cb07eea | /anjia/asgi.py | 372aac7872de8b88fd3e438e294b71fb8dafce32 | [
"BSD-2-Clause"
]
| permissive | ankiwoong/python_kindergarten | 3a1f9a486a32866b5f37ba4673dfc2135a85eec0 | 43b1e15969f0d35073e2f7fb1286d8c094fd80a8 | refs/heads/master | 2022-09-01T08:11:27.374802 | 2020-05-27T08:45:14 | 2020-05-27T08:45:14 | 258,760,014 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 387 | py | """
ASGI config for anjia project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'anjia.settings')
application = get_asgi_application()
| [
"[email protected]"
]
| |
6a50f6dc840ad5ee463050db663639df9a8ea7dd | e8b12e314782bf68347838599c8168e4a8019373 | /CompareAlternatives.py | 0d80231eb7ed1c3ac5094ee2f446c2fa5eed2155 | []
| no_license | HPM573/Lab_ParallelProcessing | 0ce7e4b615afe9e2e2a281f79684e9067003aa1b | f2e6401f4a5dc057a150914653079c0284c92b4b | refs/heads/main | 2023-05-12T06:03:15.275404 | 2023-05-02T13:58:18 | 2023-05-02T13:58:18 | 180,822,018 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,908 | py | import EconEvalInputData as D
import ProbabilisticSupport as Support
import ProbilisticParamClasses as P
from ParallelClasses import ParallelMultiCohort
N_COHORTS = 200 # number of cohorts
if __name__ == '__main__': # this line is needed to avoid errors that occur on Windows computers
# create a multi-cohort to simulate under mono therapy
multiCohortMono = ParallelMultiCohort(
ids=range(N_COHORTS),
pop_size=D.POP_SIZE,
therapy=P.Therapies.MONO
)
multiCohortMono.simulate(sim_length=D.SIM_LENGTH)
# create a multi-cohort to simulate under combi therapy
multiCohortCombo = ParallelMultiCohort(
ids=range(N_COHORTS),
pop_size=D.POP_SIZE,
therapy=P.Therapies.COMBO
)
multiCohortCombo.simulate(sim_length=D.SIM_LENGTH)
# print the estimates for the mean survival time and mean time to AIDS
Support.print_outcomes(multi_cohort_outcomes=multiCohortMono.multiCohortOutcomes,
therapy_name=P.Therapies.MONO)
Support.print_outcomes(multi_cohort_outcomes=multiCohortCombo.multiCohortOutcomes,
therapy_name=P.Therapies.COMBO)
# draw survival curves and histograms
Support.plot_survival_curves_and_histograms(multi_cohort_outcomes_mono=multiCohortMono.multiCohortOutcomes,
multi_cohort_outcomes_combo=multiCohortCombo.multiCohortOutcomes)
# print comparative outcomes
Support.print_comparative_outcomes(multi_cohort_outcomes_mono=multiCohortMono.multiCohortOutcomes,
multi_cohort_outcomes_combo=multiCohortCombo.multiCohortOutcomes)
# report the CEA results
Support.report_CEA_CBA(multi_cohort_outcomes_mono=multiCohortMono.multiCohortOutcomes,
multi_cohort_outcomes_combo=multiCohortCombo.multiCohortOutcomes) | [
"[email protected]"
]
| |
48b211d3ffc2fe351f125460bfa2de347c5ad89c | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/frenetic-lang_pyretic/pyretic-master/pyretic/tests/test_mac_learner.py | 19c0e4482c157a77029346d963681440c374e52d | []
| no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 2,387 | py | #!/usr/bin/python
from mininet.net import Mininet
from mininet.node import RemoteController
import os, shlex, subprocess, utils, time
from utils import init
### Module Parameters
def get_controller():
return 'pyretic.modules.mac_learner'
def run_mininet():
# mn = Mininet()
# s1 = mn.addSwitch('s1')
# s2 = mn.addSwitch('s2')
# s3 = mn.addSwitch('s3')
# h1 = mn.addHost('h1')
# h2 = mn.addHost('h2')
# h3 = mn.addHost('h3')
# mn.addLink(s1, s2)
# mn.addLink(s1, s3)
# mn.addLink(s2, s3)
# mn.addLink(h1, s1)
# mn.addLink(h2, s2)
# mn.addLink(h3, s3)
# mn.addController('c0', RemoteController)
# time.sleep(1)
# mn.run(mn.pingAll)
# Alternately, run mininet via the command line. Note that we need to use
# absolute path names because sudo mucks with the env.
mn = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../mininet.sh'))
cmd = '%s --topo cycle,3,4 --mac --test=pingall' % mn
subprocess.call(shlex.split(cmd))
def process_controller_output(oldf, newf):
lines = oldf.readlines()
lines.sort()
keywords = ['TEST', 'ERROR', 'error']
## filter out lines that do not contain one of the keywords
for line in lines:
for kw in keywords:
if line.find(kw) >= 0:
newf.write(line)
def process_mininet_output(oldf, newf):
lines = oldf.readlines()
lines.sort()
keywords = ['TEST', 'ERROR', 'error', 'received']
## filter out lines that do not contain one of the keywords
for line in lines:
for kw in keywords:
if line.find(kw) >= 0:
newf.write(line)
### Tests
test_mac_learner = utils.TestModule( __name__, __file__, get_controller, run_mininet, process_controller_output, process_mininet_output)
def test_mac_learner_i(init):
utils.run_test(test_mac_learner, init.test_dir, init.benchmark_dir, '-m i')
def test_mac_learner_r0(init):
utils.run_test(test_mac_learner, init.test_dir, init.benchmark_dir, '-m r0')
def test_mac_learner_p0(init):
utils.run_test(test_mac_learner, init.test_dir, init.benchmark_dir, '-m p0')
# def test_mac_learner_p0_nx(init):
# utils.run_test(test_mac_learner, init.test_dir, init.benchmark_dir, '-m p0 --nx')
### Executing this file starts the mininet instance for this test.
if __name__ == "__main__":
run_mininet()
| [
"[email protected]"
]
| |
dc4ee8e84412fbe9e26fa41aea2ba61f0a80d687 | 3b11dc40c7d772fffeb4d8683e5c9791c41f6454 | /custom/clients/ecobank/ecobank_inventory/models/inventory_account.py | 54cf03c640a891acbae5ed78bf433efd0cd027f2 | []
| no_license | Jacky-odoo/Ecobank | b986352abac9416ab00008a4abaec2b1f1a1f262 | 5c501bd03a22421f47c76380004bf3d62292f79d | refs/heads/main | 2023-03-09T18:10:45.058530 | 2021-02-25T14:11:12 | 2021-02-25T14:11:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,139 | py | from odoo import api, fields, models
from odoo.exceptions import ValidationError
class InventoryUser(models.Model):
_name = 'inventory.account'
_rec_name = 'name_and_code'
name = fields.Char(string='Name', required=True)
code = fields.Char(string='Code', required=True)
name_and_code = fields.Char(compute='compute_name_code', store=True)
@api.multi
@api.depends('name', 'code')
def compute_name_code(self):
for rec in self:
if rec.code and rec.name:
rec.name_and_code = str(rec.name + " (" + rec.code + ")")
@api.multi
def copy(self, default=None):
raise ValidationError("Sorry you are not allowed to perform this operation. Error Code BYT001")
@api.constrains('name')
def check_name(self):
all_accounts = self.search([])
for account in all_accounts:
if self.name.lower() == account.name.lower() and self.id != account.id:
raise ValidationError("Error! Account Name already exist. BYT005")
_sql_constraints = [
('unique_code', 'unique (code)', "Account Code Already Exist !"),
]
| [
"[email protected]"
]
| |
101e6d98e6ea5327b9632183ef8eb52de0c552e9 | ff5eea95bb0827cb086c32f4ec1c174b28e5b82d | /gammapy/background/tests/test_ring.py | 047cad9a887193d1551fbd48446204c72bfc2e9e | []
| no_license | pflaumenmus/gammapy | 4830cc5506a4052658f30077fa4e11d8c685ede0 | 7b5caf832c9950c886528ca107203ce9b83c7ebf | refs/heads/master | 2021-01-15T23:27:46.521337 | 2013-09-25T14:23:35 | 2013-09-25T14:23:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,176 | py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import division
import unittest
import pytest
import numpy as np
from numpy.testing import assert_almost_equal
from astropy.io import fits
from ..maps import Maps
from ..ring import RingBgMaker, outer_ring_radius
try:
import scipy
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
@pytest.mark.skipif('not HAS_SCIPY')
class TestRingBgMaker(unittest.TestCase):
def test_construction(self):
r = RingBgMaker(0.3, 0.5)
r.info()
def test_correlate(self):
image = np.zeros((10, 10))
image[5, 5] = 1
r = RingBgMaker(3, 6, 1)
image = r.correlate(image)
def test_correlate_maps(self):
n_on = np.ones((200, 200))
hdu = fits.ImageHDU(n_on, name='n_on')
maps = Maps([hdu])
maps['exclusion'].data[100:110, 100:110] = 0
r = RingBgMaker(10, 13, 1)
r.correlate_maps(maps)
class TestHelperFuntions(unittest.TestCase):
def test_compute_r_o(self):
actual = outer_ring_radius(1, 0, 1)
assert_almost_equal(actual, 1)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
77c326d8b4be8828d4ff340158b1355fd541aecb | b97edfc765baa1432fcef82596e2a2d48310cce0 | /algorithms/say.py | d6b5ed481550797d2a4d690ecaf72ebaaa5c6346 | []
| no_license | NicholasPiano/scripts | 4dadaa97bb1bb18bd51c5526b0a57c7ddc554fce | 76b610af49128f0434419fcd75be606ef5efbb37 | refs/heads/master | 2020-05-31T01:29:43.284877 | 2015-01-05T02:00:28 | 2015-01-05T02:00:28 | 27,193,847 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 63 | py | #!usr/bin/python3
import os
os.system('say vizontlatahshrah') | [
"[email protected]"
]
| |
ff31f03d357f8dd02d1fef1e8193bb092e608bea | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02708/s465285057.py | b9e8c08a651992f50a225309892a6784c1a1572f | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | from itertools import accumulate
N,K = map(int,input().split())
acc = list(accumulate(range(N+1), lambda x,y:x+y))
ans = 0
mod = 10**9+7
for i in range(K, N+1):
r = acc[N] - acc[N-i]
l = acc[i-1]
ans = (ans+r-l+1) % mod
ans += 1
print(ans % mod) | [
"[email protected]"
]
| |
38eaeac29ebaa70dc88d888b36fe8d2e3156dd76 | 083b3f5b0d23c269c6a9ff1ea413e70fb799a497 | /Leetcode Challenge/09_September_2020/Python/Week 5/2_First Missing Positive.py | 5daf39e470ef89206f9440b17c1cc1717578a4f7 | []
| no_license | HectorIGH/Competitive-Programming | b2e02dff140d9ebb06c646f7be0b53ea0afe90c9 | 467058c63e8a7e76805feebe3020bac4d20516a6 | refs/heads/master | 2022-12-31T18:32:46.824626 | 2020-10-16T20:38:33 | 2020-10-16T20:38:33 | 279,733,136 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,454 | py | #Given an unsorted integer array, find the smallest missing positive integer.
#
#Example 1:
#
#Input: [1,2,0]
#Output: 3
#Example 2:
#
#Input: [3,4,-1,1]
#Output: 2
#Example 3:
#
#Input: [7,8,9,11,12]
#Output: 1
#Follow up:
#
#Your algorithm should run in O(n) time and uses constant extra space.
#
# Hide Hint #1
#Think about how you would solve the problem in non-constant space. Can you apply that logic to the existing space?
# Hide Hint #2
#We don't care about duplicates or non-positive integers
# Hide Hint #3
#Remember that O(2n) = O(n)
class Solution:
def firstMissingPositive(self, nums: List[int]) -> int:
'''
nums = set((n for n in nums if n > 0))
i = 1
while True:
if i not in nums:
return i
i += 1
'''
if len(nums) == 0:
return 1
n = len(nums);
containsone = False;
for i in range(n):
if nums[i] == 1:
containsone = True
break
if not containsone:
return 1
for i in range(n):
if nums[i] <= 0 or nums[i] > n:
nums[i] = 1
for i in range(n):
val = nums[i]
pos = abs(val) - 1
if nums[pos] > 0:
nums[pos] = -1 * nums[pos];
for i in range(n):
if nums[i] > 0:
return i + 1
return n + 1
| [
"[email protected]"
]
| |
21cc1ba23778a7ba76d8b97034ae2a2236266abf | 864acf7235e330123c3d68ed14cdd8bf8eed800b | /crm/accounts/models.py | be98a7f2e0926b1e0b0ec5e7fd8a599dfe9597b2 | []
| no_license | wahid999/djangostuff | 83f0ae53df5c53d192603d7aaf7ee72f8665c240 | c102edfb13b8ba39930e44069122c5e545ef00ee | refs/heads/main | 2023-07-04T20:02:32.550831 | 2021-08-13T16:07:20 | 2021-08-13T16:07:20 | 399,344,439 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,705 | py | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Customer(models.Model):
user = models.OneToOneField(User, null=True, blank=True, on_delete=models.CASCADE)
name = models.CharField(max_length=200, null=True)
phone = models.CharField(max_length=200, null=True)
email = models.CharField(max_length=200, null=True)
profile_pic = models.ImageField(default="IMG_3940.JPG", null=True, blank=True)
date_created = models.DateTimeField(auto_now_add=True, null=True)
def __str__(self):
return self.name
class Tag(models.Model):
name = models.CharField(max_length=200, null=True)
def __str__(self):
return self.name
class Product(models.Model):
CATEGORY = (
('Indoor', 'Indoor'),
('Out Door', 'Out Door'),
)
name = models.CharField(max_length=200, null=True)
price = models.FloatField(null=True)
category = models.CharField(max_length=200, null=True, choices=CATEGORY)
description = models.CharField(max_length=200, null=True, blank=True)
date_created = models.DateTimeField(auto_now_add=True, null=True)
tags = models.ManyToManyField(Tag)
def __str__(self):
return self.name
class Order(models.Model):
STATUS = (
('Pending', 'Pending'),
('Out for delivery', 'Out for delivery'),
('Delivered', 'Delivered'),
)
customer = models.ForeignKey(Customer, null=True, on_delete= models.SET_NULL)
product = models.ForeignKey(Product, null=True, on_delete= models.SET_NULL)
date_created = models.DateTimeField(auto_now_add=True, null=True)
status = models.CharField(max_length=200, null=True, choices=STATUS)
note = models.CharField(max_length=1000, null=True)
def __str__(self):
return self.product.name | [
"[email protected]"
]
| |
6ad749b446664d9299143f36496269a2281f3e3d | 8cb7e95552d9b55282a26b39eef8223ad5a998a5 | /tests.py | 93e1e420b7f8dedbe4386bb28761d13079a3eb66 | [
"MIT"
]
| permissive | coleifer/ucache | c3f8949a710edd2e19ae18ae32eeccc1e1100837 | 1b2ac7ca8c67dd895b45d0fddcc7b5542b0b8fd9 | refs/heads/master | 2022-12-28T00:49:59.736055 | 2022-12-14T02:41:29 | 2022-12-14T02:41:29 | 165,595,140 | 72 | 8 | null | 2019-05-12T12:37:44 | 2019-01-14T04:26:22 | Python | UTF-8 | Python | false | false | 10,273 | py | #!/usr/bin/env python
import glob
import os
import sys
import time
import unittest
from ucache import *
class BaseTestCache(object):
cache_files = []
def get_cache(self, **kwargs):
raise NotImplementedError
def cleanup(self):
for filename in self.cache_files:
if os.path.exists(filename):
os.unlink(filename)
def setUp(self):
self.cache = self.get_cache()
super(BaseTestCache, self).setUp()
def tearDown(self):
self.cache.set_prefix()
self.cache.close()
self.cleanup()
super(BaseTestCache, self).tearDown()
def test_operations(self):
test_data = (
('k1', 'v1'),
('k2', 2),
('k3', None),
('k4', [0, '1', [2]]),
('k5', {'6': ['7', 8, {'9': '10', '11': 12}]}),
)
test_data_dict = dict(test_data)
for key, value in test_data:
self.cache.set(key, value, 60)
for key, value in test_data:
self.assertEqual(self.cache.get(key), value)
self.cache.delete('k1')
self.cache.delete('k3')
self.cache.delete('k5')
for key in ('k1', 'k3', 'k5'):
self.assertIsNone(self.cache.get(key))
for key in ('k2', 'k4'):
self.assertEqual(self.cache.get(key), test_data_dict[key])
self.cache.flush()
self.assertIsNone(self.cache.get('k2'))
self.assertIsNone(self.cache.get('k4'))
def test_bulk_operations(self):
test_data = {
'k1': 'v1',
'k2': 2,
'k3': [0, '1', [2]]}
# Do simple bulk-set.
self.cache.set_many(test_data, timeout=60)
# Do single-set to ensure compatible with bulk-get.
self.cache.set('k4', 'v4')
# Compare results of bulk-get.
self.assertEqual(self.cache.get_many(['k1', 'k2', 'k3', 'k4']), {
'k1': 'v1',
'k2': 2,
'k3': [0, '1', [2]],
'k4': 'v4'})
# Do individual gets to ensure methods are compatible.
self.assertEqual(self.cache.get('k1'), test_data['k1'])
self.assertEqual(self.cache.get('k3'), test_data['k3'])
# Do bulk-delete.
self.cache.delete_many(['k1', 'k3', 'kx'])
self.assertTrue(self.cache['k1'] is None)
self.assertTrue(self.cache['k2'] is not None)
self.assertTrue(self.cache['k3'] is None)
self.assertEqual(self.cache.get_many(['k1', 'k2', 'k3']), {'k2': 2})
# Do single-delete to ensure compatibility.
self.cache.delete('k2')
self.assertTrue(self.cache['k2'] is None)
def test_preload(self):
self.cache.set_many({'k1': 'v1', 'k2': 'v2', 'k3': 'v3'}, timeout=60)
self.assertEqual(self.cache.get('k1'), 'v1')
self.assertTrue(self.cache.get('kx') is None)
with self.cache.preload(['k1', 'k3']):
self.assertEqual(self.cache.get('k1'), 'v1')
self.assertEqual(self.cache.get('k3'), 'v3')
self.assertTrue(self.cache.get('kx') is None)
self.cache._preload['kx'] = 'preloaded'
self.assertEqual(self.cache.get('kx'), 'preloaded')
self.assertEqual(self.cache.get('k1'), 'v1')
self.assertEqual(self.cache.get('k2'), 'v2')
self.assertEqual(self.cache.get('k3'), 'v3')
self.assertTrue(self.cache.get('kx') is None)
def assertWrites(self, n):
self.assertEqual(self.cache.stats['writes'], n)
def assertHits(self, n):
self.assertEqual(self.cache.stats['hits'], n)
def assertPLHits(self, n):
self.assertEqual(self.cache.stats['preload_hits'], n)
def test_preload_re_set(self):
self.cache.set_many({'k1': 'v1', 'k2': 'v2', 'k3': 'v3'}, timeout=60)
self.assertWrites(3)
with self.cache.preload(['k1', 'k2']):
self.assertHits(2)
with self.cache.preload(['k3']):
self.assertHits(3)
self.assertPLHits(0)
self.assertEqual(self.cache.get('k1'), 'v1')
self.assertEqual(self.cache.get('k2'), 'v2')
self.assertEqual(self.cache.get('k3'), 'v3')
# No more actual trips to the backend - we are pulling from the
# preload cache.
self.assertHits(3)
self.assertPLHits(3)
self.cache.set('k2', 'v2-x')
self.assertWrites(4)
self.assertEqual(self.cache.get('k2'), 'v2-x')
self.assertHits(3)
self.assertPLHits(4)
# We lost the scope that k2 was set in, and get a stale value back.
self.assertEqual(self.cache.get('k2'), 'v2')
self.assertHits(3)
self.assertPLHits(5)
# Lost scope for k3, make trip to the cache.
self.assertEqual(self.cache.get('k3'), 'v3')
self.assertHits(4)
self.assertPLHits(5)
def test_decorator(self):
@self.cache.cached(10)
def fn(seed=None):
return time.time()
value = fn()
time.sleep(0.001)
self.assertEqual(fn(), value)
fn.bust()
self.assertFalse(fn() == value)
self.assertEqual(fn(), fn())
self.assertFalse(fn(1) == fn(2))
self.assertEqual(fn(2), fn(2))
def test_property(self):
class Dummy(object):
@self.cache.cached_property
def fn(self):
return time.time()
d = Dummy()
value = d.fn
time.sleep(0.001)
self.assertEqual(d.fn, value)
def test_compression(self):
self.cache.close()
self.cleanup()
cache = self.get_cache(compression=True)
data = {'k1': 'a' * 1024, 'k2': 'b' * 512, 'k3': 'c' * 200}
cache.set_many(data, timeout=60)
cache.set('k4', 'd' * 1024, timeout=60)
self.assertEqual(cache.get('k4'), 'd' * 1024)
res = cache.get_many(['k1', 'k2', 'k3'])
self.assertEqual(res, data)
cache.delete_many(['k1', 'k2', 'k3', 'k4'])
def test_read_expired(self):
self.cache.set('k1', 'v1', -1)
self.assertTrue(self.cache.get('k1') is None)
def test_clean_expired(self):
if not self.cache.manual_expire:
return
day = 86400
for i in range(1, 7):
self.cache.set('k%s' % i, 'v%s' % i, (-i * day) - 1)
self.cache.set('ka', 'va', -5)
self.cache.set('kb', 'vb', 60)
self.cache.set('kc', 'vc', day)
# k1, -1 days ... k6, -6 days.
self.assertTrue(self.cache.get('k4') is None) # k4 is also deleted.
self.assertEqual(self.cache.clean_expired(3), 3) # k3, k5, k6.
self.assertEqual(self.cache.clean_expired(3), 0)
self.assertEqual(self.cache.clean_expired(1), 2) # k1, k2.
self.assertEqual(self.cache.clean_expired(), 1) # ka.
self.assertEqual(self.cache.clean_expired(), 0)
# Cannot retrieve any of the expired data.
for i in range(1, 7):
self.assertTrue(self.cache.get('k%s' % i) is None)
# Set some new expired keys and values.
for i in range(3):
self.cache.set('k%s' % i, 'v%s' % i, -3)
self.assertTrue(self.cache.get('k1') is None)
self.assertEqual(self.cache.clean_expired(), 2)
self.assertEqual(self.cache.clean_expired(), 0)
# Set expired key to a valid time.
self.cache.set('k1', 'v1', 60)
self.assertEqual(self.cache.get('k1'), 'v1')
# Our original keys are still present.
self.assertEqual(self.cache.get('kb'), 'vb')
self.assertEqual(self.cache.get('kc'), 'vc')
def test_prefix_and_flush(self):
self.cache.set_prefix('a')
self.cache.set('k0', 'v0-1')
self.cache.set('k1', 'v1-1')
self.cache.set_prefix('b')
self.cache.set('k0', 'v0-2')
# Check that keys and values are isolated properly by prefix.
self.cache.set_prefix('a')
self.assertEqual(self.cache.get('k0'), 'v0-1')
self.cache.set_prefix('b')
self.assertEqual(self.cache.get('k0'), 'v0-2')
self.cache.set_prefix('a')
try:
self.cache.flush()
except NotImplementedError:
# Memcached does not support prefix match, so we skip.
return
self.assertTrue(self.cache.get('k0') is None)
self.assertTrue(self.cache.get('k1') is None)
self.cache.set_prefix('b')
self.assertEqual(self.cache.get('k0'), 'v0-2')
self.assertTrue(self.cache.get('k1') is None)
class TestKTCache(BaseTestCache, unittest.TestCase):
def cleanup(self):
self.cache.close(close_all=True)
def get_cache(self, **kwargs):
return KTCache(connection_pool=False, **kwargs)
class TestSqliteCache(BaseTestCache, unittest.TestCase):
cache_files = ['sqlite_cache.db']
def get_cache(self, **kwargs):
return SqliteCache('sqlite_cache.db', **kwargs)
class TestRedisCache(BaseTestCache, unittest.TestCase):
def get_cache(self, **kwargs):
return RedisCache(**kwargs)
def test_read_expired(self):
# Redis doesn't support setting a negative timeout.
pass
class TestKCCache(BaseTestCache, unittest.TestCase):
def get_cache(self, **kwargs):
return KCCache(filename='*', **kwargs)
class TestMemcacheCache(BaseTestCache, unittest.TestCase):
def get_cache(self, **kwargs):
return MemcacheCache(**kwargs)
class TestPyMemcacheCache(BaseTestCache, unittest.TestCase):
def get_cache(self, **kwargs):
return PyMemcacheCache(**kwargs)
class TestMemoryCache(BaseTestCache, unittest.TestCase):
def get_cache(self, **kwargs):
return MemoryCache(**kwargs)
class TestDbmCache(BaseTestCache, unittest.TestCase):
@property
def cache_files(self):
return glob.glob('dbmcache.*')
def get_cache(self, **kwargs):
return DbmCache('dbmcache.db', **kwargs)
class TestGreenDBCache(BaseTestCache, unittest.TestCase):
def get_cache(self, **kwargs):
return GreenDBCache(**kwargs)
if __name__ == '__main__':
unittest.main(argv=sys.argv)
| [
"[email protected]"
]
| |
7e06dd17c6c8f3382921b07d5a29bfd3f67c4817 | 846e642fd9b01d3b500d3efba4790761039eec24 | /code/smtp.py | 978b10738307ac891f4680f1e0a033f0d1ac1892 | []
| no_license | sachinyadav3496/Machine_Learning_Workshop | ffea23799c0f8477d9b5cc19b98e7d33a6364390 | 37f433631d1ae4e4db37c4baae6cdc3a7619423e | refs/heads/master | 2020-11-24T11:49:45.936367 | 2020-05-01T08:38:10 | 2020-05-01T08:38:10 | 228,130,385 | 5 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,080 | py |
import smtplib
import getpass
def Main():
print("\n\n*************************welcome************************\n")
print("\nWelcom to Email Service \n")
print("Enter your login details - \n")
gmail_user = input("\n\nUserName : ")
gmail_password = getpass.getpass("Password : ")
try:
server = smtplib.SMTP_SSL('smtp.gmail.com', 465)
print("\n\nConnection established ")
server.ehlo()
server.login(gmail_user, gmail_password)
print("\n\nYou have Successfully logged in your account ",gmail_user)
except Exception as e:
print("\n\nError!!! in Connection ")
print(e)
exit(0)
sent_from = gmail_user
i = int(input("\n\nEnter no. of recipients - "))
print("\n\nEnter Recipients Email Addressess - \n")
to = []
for k in range(i):
to.append(input())
print()
subject = input("\n\nPlease Type in Subject of The Mail - ")
print("\n\nType in Your Message (Type in EOF to FINISH)\n\n")
message=[]
while True:
msg = input()
if msg.upper() == 'EOF' :
break
else :
message.append(msg)
print("\n\nMessege is Ready for Delivery\n\n ")
body = '\n'.join(message)
email_text = """From:%s
To:%s
Subject:%s
%s
"""%(sent_from, ", ".join(to), subject, body)
try:
print("\n\nEmail sending is in process - \n ")
server.sendmail(sent_from, to, email_text)
server.close()
except Exception as e:
print('\nSomething went wrong...',e)
else:
print("\nMessage Delivered to - \n")
for i in to:
print(i)
print()
print("\n\n**********************Exiting********************\n\n")
print("\n\nThanks For using Mail Service \n\n")
if __name__ == "__main__":
Main()
| [
"[email protected]"
]
| |
ce3be2e0574e1ed136c469dfa1ef2ac357ed40cc | dfb8d3c365bd2ea27cef9af5cb00b7be1dae978d | /train.py | b23821e543d1b2a73205ead7e410f2b5b7bac887 | [
"MIT"
]
| permissive | Windstudent/IRM-based-Speech-Enhancement-using-DNN | dd0cedfd4150fed69c55d33a744d0a6520fdf2d5 | 27a6f73b5b7fa91a4796e093e6ea3e30508a5c15 | refs/heads/master | 2020-07-05T15:34:52.712226 | 2019-05-07T14:36:40 | 2019-05-07T14:36:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,823 | py | import argparse
import json
import os
import numpy as np
import torch
from torch.utils.data import DataLoader
from data.test_dataset import TestDataset
from data.train_dataset import TrainDataset
from trainer.trainer import Trainer
from utils.utils import initialize_config
def main(config, resume):
"""
训练脚本的入口函数
Notes:
1. 加载数据集
2. 初始化模型
3. 设置优化器
4. 选择损失函数
5. 训练脚本 run
Args:
config (dict): 配置项
resume (bool): 是否加载最近一次存储的模型断点
"""
torch.manual_seed(config["seed"])
np.random.seed(config["seed"])
train_dataset = TrainDataset(
mixture_dataset=config["train_dataset"]["mixture"],
mask_dataset=config["train_dataset"]["clean"],
limit=config["train_dataset"]["limit"],
offset=config["train_dataset"]["offset"],
)
train_data_loader = DataLoader(
dataset=train_dataset,
batch_size=config["train_dataset"]["batch_size"],
num_workers=config["train_dataset"]["num_workers"],
shuffle=config["train_dataset"]["shuffle"]
)
valid_dataset = TestDataset(
mixture_dataset=config["valid_dataset"]["mixture"],
clean_dataset=config["valid_dataset"]["clean"],
limit=config["valid_dataset"]["limit"],
offset=config["valid_dataset"]["offset"],
)
valid_data_loader = DataLoader(
dataset=valid_dataset
)
model = initialize_config(config["model"])
optimizer = torch.optim.Adam(
params=model.parameters(),
lr=config["optimizer"]["lr"]
)
loss_function = initialize_config(config["loss_function"])
trainer = Trainer(
config=config,
resume=resume,
model=model,
loss_function=loss_function,
optim=optimizer,
train_dl=train_data_loader,
validation_dl=valid_data_loader,
)
trainer.train()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='IRM Estimation using DNN in Speech Enhancement')
parser.add_argument("-C", "--config", required=True, type=str, help="训练配置文件(*.json)")
parser.add_argument('-D', '--device', default=None, type=str, help="本次实验使用的 GPU 索引,e.g. '1,2,3'")
parser.add_argument("-R", "--resume", action="store_true", help="是否从最近的一个断点处继续训练")
args = parser.parse_args()
if args.device:
os.environ["CUDA_VISIBLE_DEVICES"] = args.device
# load config file
config = json.load(open(args.config))
config["train_config_path"] = args.config
main(config, resume=args.resume)
| [
"[email protected]"
]
| |
93e0d1af53bc2b9efd06b47d2a1c4276bdb0b0bd | 5390d79dad71ad0d9ff9d0777435dcaf4aad16b3 | /chapter_06/favorite_number.py | 124b9763eeac8593df0e93e0c0e845aa9bc3e5dd | []
| no_license | JasperMi/python_learning | 19770d79cce900d968cec76dac11e45a3df9c34c | 8111d0d12e4608484864dddb597522c6c60b54e8 | refs/heads/master | 2020-11-26T08:57:02.983869 | 2020-03-11T10:14:55 | 2020-03-11T10:14:55 | 218,935,548 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 497 | py | favorite_numbers = {
'bob': 2,
'sarah': 6,
'martin': 8,
'katy': 9,
'tom': 10
}
print('bob' + "'s favorite number is " + str(favorite_numbers['bob']) + ".")
print('sarah' + "'s favorite number is " + str(favorite_numbers['sarah']) + ".")
print('martin' + "'s favorite number is " + str(favorite_numbers['martin']) + ".")
print('katy' + "'s favorite number is " + str(favorite_numbers['katy']) + ".")
print('tom' + "'s favorite number is " + str(favorite_numbers['tom']) + ".")
| [
"[email protected]"
]
| |
ad3bb9cee62e36edbee53a574699af1e1489a8af | a06596102ed51de6b7786c1a3260f8d75bae676c | /adanet/core/eval_metrics_test.py | 8b5251e4bbd59810c4c26ccf00943c06677a0940 | [
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
]
| permissive | phymucs/adanet | 803b07cea49cc3821657085252c222ebe487be20 | 9fcd68cc220371d75923dcbf4eae9c1c6b9ed106 | refs/heads/master | 2020-08-11T18:56:26.858537 | 2019-10-02T16:09:17 | 2019-10-02T16:11:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,088 | py | """Tests for AdaNet eval metrics.
Copyright 2019 The AdaNet Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from adanet import tf_compat
from adanet.core.architecture import _Architecture
from adanet.core.eval_metrics import call_eval_metrics
import adanet.core.testing_utils as tu
import tensorflow as tf
# pylint: disable=g-direct-tensorflow-import
from tensorflow.python.eager import context
from tensorflow.python.framework import test_util
# pylint: enable=g-direct-tensorflow-import
class MetricsTest(tu.AdanetTestCase):
def setup_graph(self):
# We only test the multi head since this is the general case.
self._features = {"x": tf.constant([[1.], [2.]])}
heads = ("head_1", "head_2")
labels = tf.constant([0, 1])
self._labels = {head: labels for head in heads}
predictions = {(head, "predictions"): labels for head in heads}
loss = tf.constant(2.)
self._estimator_spec = tf_compat.v1.estimator.tpu.TPUEstimatorSpec(
mode=tf.estimator.ModeKeys.EVAL,
loss=loss,
predictions=predictions,
eval_metrics=(self._spec_metric_fn, {
"features": self._features,
"labels": self._labels,
"predictions": predictions,
"loss": loss
}))
def _run_metrics(self, metrics):
metric_ops = metrics
if isinstance(metric_ops, tuple):
metric_ops = call_eval_metrics(metric_ops)
self.evaluate((tf_compat.v1.global_variables_initializer(),
tf_compat.v1.local_variables_initializer()))
self.evaluate(metric_ops)
return {k: self.evaluate(metric_ops[k][0]) for k in metric_ops}
def _assert_tensors_equal(self, actual, expected):
actual, expected = self.evaluate((actual, expected))
self.assertEqual(actual, expected)
def _spec_metric_fn(self, features, labels, predictions, loss):
actual = [features, labels, predictions, loss]
expected = [
self._features, self._labels, self._estimator_spec.predictions,
self._estimator_spec.loss
]
self._assert_tensors_equal(actual, expected)
return {"metric_1": tf_compat.v1.metrics.mean(tf.constant(1.))}
def _metric_fn(self, features, predictions):
actual = [features, predictions]
expected = [self._features, self._estimator_spec.predictions]
self._assert_tensors_equal(actual, expected)
return {"metric_2": tf_compat.v1.metrics.mean(tf.constant(2.))}
@parameterized.named_parameters(
{
"testcase_name": "use_tpu",
"use_tpu": True,
},
{
# TODO: Figure out why this gives error in TF 2.0:
# ValueError: Please call update_state(...) on the "mean_1" metric.
"testcase_name": "not_use_tpu",
"use_tpu": False,
})
@test_util.run_in_graph_and_eager_modes
def test_subnetwork_metrics(self, use_tpu):
with context.graph_mode():
self.setup_graph()
spec = self._estimator_spec
if not use_tpu:
spec = spec.as_estimator_spec()
metrics = tu.create_subnetwork_metrics(
self._metric_fn,
use_tpu=use_tpu,
features=self._features,
labels=self._labels,
estimator_spec=spec)
actual = self._run_metrics(metrics.eval_metrics_tuple())
expected = {"loss": 2., "metric_1": 1., "metric_2": 2.}
self.assertEqual(actual, expected)
@test_util.run_in_graph_and_eager_modes
def test_subnetwork_metrics_user_metric_fn_overrides_metrics(self):
with context.graph_mode():
self.setup_graph()
overridden_value = 100.
def _overriding_metric_fn():
value = tf.constant(overridden_value)
return {"metric_1": tf_compat.v1.metrics.mean(value)}
metrics = tu.create_subnetwork_metrics(
_overriding_metric_fn,
features=self._features,
labels=self._labels,
estimator_spec=self._estimator_spec)
actual = self._run_metrics(metrics.eval_metrics_tuple())
expected = {"loss": 2., "metric_1": overridden_value}
self.assertEqual(actual, expected)
@test_util.run_in_graph_and_eager_modes
def test_ensemble_metrics(self):
with context.graph_mode():
self.setup_graph()
architecture = _Architecture("test_ensemble_candidate", "test_ensembler")
architecture.add_subnetwork(iteration_number=0, builder_name="b_0_0")
architecture.add_subnetwork(iteration_number=0, builder_name="b_0_1")
architecture.add_subnetwork(iteration_number=1, builder_name="b_1_0")
architecture.add_subnetwork(iteration_number=2, builder_name="b_2_0")
metrics = tu.create_ensemble_metrics(
self._metric_fn,
features=self._features,
labels=self._labels,
estimator_spec=self._estimator_spec,
architecture=architecture)
actual = self._run_metrics(metrics.eval_metrics_tuple())
serialized_arch_proto = actual["architecture/adanet/ensembles"]
expected_arch_string = b"| b_0_0 | b_0_1 | b_1_0 | b_2_0 |"
self.assertIn(expected_arch_string, serialized_arch_proto)
@parameterized.named_parameters(
{
"testcase_name": "use_tpu_evaluating",
"use_tpu": True,
"mode": tf.estimator.ModeKeys.EVAL,
}, {
"testcase_name": "use_tpu_not_evaluating",
"use_tpu": True,
"mode": tf.estimator.ModeKeys.TRAIN,
}, {
"testcase_name": "not_use_tpu_evaluating",
"use_tpu": False,
"mode": tf.estimator.ModeKeys.EVAL,
}, {
"testcase_name": "not_use_tpu_not_evaluating",
"use_tpu": False,
"mode": tf.estimator.ModeKeys.TRAIN,
})
@test_util.run_in_graph_and_eager_modes
def test_iteration_metrics(self, use_tpu, mode):
with context.graph_mode():
self.setup_graph()
best_candidate_index = 3
ensemble_metrics = []
for i in range(10):
def metric_fn(val=i):
metric = tf.keras.metrics.Mean()
metric.update_state(tf.constant(val))
return {
"ensemble_v1_metric": tf_compat.v1.metrics.mean(tf.constant(val)),
"ensemble_keras_metric": metric
}
ensemble_metrics.append(tu.create_ensemble_metrics(metric_fn))
metrics = tu.create_iteration_metrics(ensemble_metrics=ensemble_metrics)
metrics_fn = (
metrics.best_eval_metrics_tuple
if use_tpu else metrics.best_eval_metric_ops)
actual = self._run_metrics(
metrics_fn(tf.constant(best_candidate_index), mode) or {})
if mode == tf.estimator.ModeKeys.EVAL:
expected = {
"ensemble_v1_metric": best_candidate_index,
"ensemble_keras_metric": best_candidate_index,
"iteration": 1
}
# We don't actually provide an architecture, so the default will be
# inside.
del actual["architecture/adanet/ensembles"]
else:
expected = {}
self.assertEqual(actual, expected)
@test_util.run_in_graph_and_eager_modes
def test_metric_ops_not_duplicated_on_cpu(self):
with context.graph_mode():
self.setup_graph()
metric_fn = lambda: {"metric": (tf.constant(5), tf.constant(5))}
best_candidate_index = 3
mode = tf.estimator.ModeKeys.EVAL
ensemble_metrics = tu.create_ensemble_metrics(metric_fn)
subnetwork_metrics = tu.create_subnetwork_metrics(metric_fn)
iteration_metrics = tu.create_iteration_metrics(
ensemble_metrics=[ensemble_metrics],
subnetwork_metrics=[subnetwork_metrics])
ensemble_ops1 = call_eval_metrics(ensemble_metrics.eval_metrics_tuple())
ensemble_ops2 = call_eval_metrics(ensemble_metrics.eval_metrics_tuple())
subnetwork_ops1 = call_eval_metrics(
subnetwork_metrics.eval_metrics_tuple())
subnetwork_ops2 = call_eval_metrics(
subnetwork_metrics.eval_metrics_tuple())
iteration_ops1 = iteration_metrics.best_eval_metric_ops(
best_candidate_index, mode)
iteration_ops2 = iteration_metrics.best_eval_metric_ops(
best_candidate_index, mode)
self.assertEqual(subnetwork_ops1, subnetwork_ops2)
self.assertEqual(ensemble_ops1, ensemble_ops2)
self.assertEqual(iteration_ops1, iteration_ops2)
for ops in [ensemble_ops1, subnetwork_ops1, iteration_ops1]:
self.assertIsNotNone(ops)
if __name__ == "__main__":
tf.test.main()
| [
"[email protected]"
]
| |
282ae6e15473678f51124dfb45ce9e9e04bf37aa | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/birdlik.py | b5d428582689d4d0a1ff28dc6b984b99dcdccca4 | []
| no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 82 | py | ii = [('LandWPA.py', 1), ('MedwTAI.py', 1), ('MedwTAI2.py', 1), ('WordWYR.py', 1)] | [
"[email protected]"
]
| |
1ba19442b8c3c6088b2d7f91c116de4870e58ec3 | 8f8ac99fd3ed9ceb36778b404f6fdd0b6899d3f4 | /pyobjc-framework-SpriteKit/Lib/SpriteKit/_metadata.py | 133d826139cade678d14d84de54d9b4985ef7ec4 | [
"MIT"
]
| permissive | strogo/pyobjc | ac4201c7742eb75348328eeecb7eedf4e3458de3 | 2579c5eaf44b0c5af77ee195c417d2c65e72dfda | refs/heads/master | 2023-07-13T00:41:56.448005 | 2021-08-24T06:42:53 | 2021-08-24T06:42:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 28,549 | py | # This file is generated by objective.metadata
#
# Last update: Sun Jul 11 21:54:16 2021
#
# flake8: noqa
import objc, sys
if sys.maxsize > 2 ** 32:
def sel32or64(a, b):
return b
else:
def sel32or64(a, b):
return a
if objc.arch == "arm64":
def selAorI(a, b):
return a
else:
def selAorI(a, b):
return b
misc = {}
constants = """$$"""
enums = """$PHYSICSKIT_MINUS_GL_IMPORTS@1$SKActionTimingEaseIn@1$SKActionTimingEaseInEaseOut@3$SKActionTimingEaseOut@2$SKActionTimingLinear@0$SKAttributeTypeFloat@1$SKAttributeTypeHalfFloat@5$SKAttributeTypeNone@0$SKAttributeTypeVectorFloat2@2$SKAttributeTypeVectorFloat3@3$SKAttributeTypeVectorFloat4@4$SKAttributeTypeVectorHalfFloat2@6$SKAttributeTypeVectorHalfFloat3@7$SKAttributeTypeVectorHalfFloat4@8$SKBlendModeAdd@1$SKBlendModeAlpha@0$SKBlendModeMultiply@3$SKBlendModeMultiplyAlpha@7$SKBlendModeMultiplyX2@4$SKBlendModeReplace@6$SKBlendModeScreen@5$SKBlendModeSubtract@2$SKInterpolationModeLinear@1$SKInterpolationModeSpline@2$SKInterpolationModeStep@3$SKLabelHorizontalAlignmentModeCenter@0$SKLabelHorizontalAlignmentModeLeft@1$SKLabelHorizontalAlignmentModeRight@2$SKLabelVerticalAlignmentModeBaseline@0$SKLabelVerticalAlignmentModeBottom@3$SKLabelVerticalAlignmentModeCenter@1$SKLabelVerticalAlignmentModeTop@2$SKNodeFocusBehaviorFocusable@2$SKNodeFocusBehaviorNone@0$SKNodeFocusBehaviorOccluding@1$SKParticleRenderOrderDontCare@2$SKParticleRenderOrderOldestFirst@1$SKParticleRenderOrderOldestLast@0$SKRepeatModeClamp@1$SKRepeatModeLoop@2$SKSceneScaleModeAspectFill@1$SKSceneScaleModeAspectFit@2$SKSceneScaleModeFill@0$SKSceneScaleModeResizeFill@3$SKTextureFilteringLinear@1$SKTextureFilteringNearest@0$SKTileAdjacencyAll@255$SKTileAdjacencyDown@16$SKTileAdjacencyDownEdge@199$SKTileAdjacencyLeft@64$SKTileAdjacencyLeftEdge@31$SKTileAdjacencyLowerLeft@32$SKTileAdjacencyLowerLeftCorner@253$SKTileAdjacencyLowerLeftEdge@7$SKTileAdjacencyLowerRight@8$SKTileAdjacencyLowerRightCorner@127$SKTileAdjacencyLowerRightEdge@193$SKTileAdjacencyRight@4$SKTileAdjacencyRightEdge@241$SKTileAdjacencyUp@1$SKTileAdjacencyUpEdge@124$SKTileAdjacencyUpperLeft@128$SKTileAdjacencyUpperLeftCorner@247$SKTileAdjacencyUpperLeftEdge@28$SKTileAdjacencyUpperRight@2$SKTileAdjacencyUpperRightCorner@223$SKTileAdjacencyUpperRightEdge@112$SKTileDefinitionRotation0@0$SKTileDefinitionRotation180@2$SKTileDefinitionRotation270@3$SKTileDefinitionRotation90@1$SKTileHexFlatAdjacencyAll@63$SKTileHexFlatAdjacencyDown@8$SKTileHexFlatAdjacencyLowerLeft@16$SKTileHexFlatAdjacencyLowerRight@4$SKTileHexFlatAdjacencyUp@1$SKTileHexFlatAdjacencyUpperLeft@32$SKTileHexFlatAdjacencyUpperRight@2$SKTileHexPointyAdjacencyAdd@63$SKTileHexPointyAdjacencyLeft@32$SKTileHexPointyAdjacencyLowerLeft@16$SKTileHexPointyAdjacencyLowerRight@8$SKTileHexPointyAdjacencyRight@4$SKTileHexPointyAdjacencyUpperLeft@1$SKTileHexPointyAdjacencyUpperRight@2$SKTileSetTypeGrid@0$SKTileSetTypeHexagonalFlat@2$SKTileSetTypeHexagonalPointy@3$SKTileSetTypeIsometric@1$SKTransitionDirectionDown@1$SKTransitionDirectionLeft@3$SKTransitionDirectionRight@2$SKTransitionDirectionUp@0$SKUniformTypeFloat@1$SKUniformTypeFloatMatrix2@5$SKUniformTypeFloatMatrix3@6$SKUniformTypeFloatMatrix4@7$SKUniformTypeFloatVector2@2$SKUniformTypeFloatVector3@3$SKUniformTypeFloatVector4@4$SKUniformTypeNone@0$SKUniformTypeTexture@8$SKVIEW_AVAILABLE@1$"""
misc.update({})
aliases = {"SK_AVAILABLE": "__OSX_AVAILABLE_STARTING", "SKColor": "NSColor"}
r = objc.registerMetaDataForSelector
objc._updatingMetadata(True)
try:
r(
b"NSObject",
b"didApplyConstraintsForScene:",
{"required": False, "retval": {"type": b"v"}, "arguments": {2: {"type": b"@"}}},
)
r(
b"NSObject",
b"didBeginContact:",
{"required": False, "retval": {"type": b"v"}, "arguments": {2: {"type": b"@"}}},
)
r(
b"NSObject",
b"didEndContact:",
{"required": False, "retval": {"type": b"v"}, "arguments": {2: {"type": b"@"}}},
)
r(
b"NSObject",
b"didEvaluateActionsForScene:",
{"required": False, "retval": {"type": b"v"}, "arguments": {2: {"type": b"@"}}},
)
r(
b"NSObject",
b"didFinishUpdateForScene:",
{"required": False, "retval": {"type": b"v"}, "arguments": {2: {"type": b"@"}}},
)
r(
b"NSObject",
b"didSimulatePhysicsForScene:",
{"required": False, "retval": {"type": b"v"}, "arguments": {2: {"type": b"@"}}},
)
r(
b"NSObject",
b"setSubdivisionLevels:",
{"required": True, "retval": {"type": b"v"}, "arguments": {2: {"type": b"q"}}},
)
r(
b"NSObject",
b"setWarpGeometry:",
{"required": True, "retval": {"type": b"v"}, "arguments": {2: {"type": b"@"}}},
)
r(b"NSObject", b"subdivisionLevels", {"required": True, "retval": {"type": b"q"}})
r(
b"NSObject",
b"update:forScene:",
{
"required": False,
"retval": {"type": b"v"},
"arguments": {2: {"type": b"d"}, 3: {"type": b"@"}},
},
)
r(
b"NSObject",
b"view:shouldRenderAtTime:",
{
"required": False,
"retval": {"type": "Z"},
"arguments": {2: {"type": b"@"}, 3: {"type": "d"}},
},
)
r(b"NSObject", b"warpGeometry", {"required": True, "retval": {"type": b"@"}})
r(b"SK3DNode", b"autoenablesDefaultLighting", {"retval": {"type": b"Z"}})
r(b"SK3DNode", b"isPlaying", {"retval": {"type": b"Z"}})
r(b"SK3DNode", b"loops", {"retval": {"type": b"Z"}})
r(
b"SK3DNode",
b"projectPoint:",
{"retval": {"type": b"%"}, "arguments": {2: {"type": b"%"}}},
)
r(
b"SK3DNode",
b"setAutoenablesDefaultLighting:",
{"arguments": {2: {"type": b"Z"}}},
)
r(b"SK3DNode", b"setLoops:", {"arguments": {2: {"type": b"Z"}}})
r(b"SK3DNode", b"setPlaying:", {"arguments": {2: {"type": b"Z"}}})
r(
b"SK3DNode",
b"unprojectPoint:",
{"retval": {"type": b"%"}, "arguments": {2: {"type": b"%"}}},
)
r(
b"SKAction",
b"animateWithNormalTextures:timePerFrame:resize:restore:",
{"arguments": {4: {"type": b"Z"}, 5: {"type": b"Z"}}},
)
r(
b"SKAction",
b"animateWithTextures:timePerFrame:resize:restore:",
{"arguments": {4: {"type": b"Z"}, 5: {"type": b"Z"}}},
)
r(
b"SKAction",
b"animateWithWarps:times:restore:",
{"arguments": {4: {"type": b"Z"}}},
)
r(
b"SKAction",
b"customActionWithDuration:actionBlock:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": sel32or64(b"f", b"d")},
},
}
}
}
},
)
r(
b"SKAction",
b"followPath:asOffset:orientToPath:duration:",
{"arguments": {3: {"type": b"Z"}, 4: {"type": b"Z"}}},
)
r(
b"SKAction",
b"followPath:asOffset:orientToPath:speed:",
{"arguments": {3: {"type": b"Z"}, 4: {"type": b"Z"}}},
)
r(
b"SKAction",
b"performSelector:onTarget:",
{"arguments": {2: {"sel_of_type": b"v@:"}}},
)
r(
b"SKAction",
b"playSoundFileNamed:waitForCompletion:",
{"arguments": {3: {"type": b"Z"}}},
)
r(
b"SKAction",
b"rotateToAngle:duration:shortestUnitArc:",
{"arguments": {4: {"type": b"Z"}}},
)
r(
b"SKAction",
b"runBlock:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"@?"},
"arguments": {0: {"type": b"^v"}},
}
}
}
},
)
r(
b"SKAction",
b"runBlock:queue:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"@?"},
"arguments": {0: {"type": b"^v"}},
}
}
}
},
)
r(b"SKAction", b"setNormalTexture:resize:", {"arguments": {3: {"type": b"Z"}}})
r(b"SKAction", b"setTexture:resize:", {"arguments": {3: {"type": b"Z"}}})
r(
b"SKAction",
b"setTimingFunc:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"f"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"f"}},
}
}
}
},
)
r(
b"SKAction",
b"setTimingFunction:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"f"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"f"}},
}
}
}
},
)
r(
b"SKAction",
b"timingFunc",
{
"retval": {
"callable": {
"retval": {"type": b"f"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"f"}},
}
}
},
)
r(
b"SKAction",
b"timingFunction",
{
"retval": {
"callable": {
"retval": {"type": b"f"},
"arguments": {0: {"type": b"^v"}, 1: {"type": b"f"}},
}
}
},
)
r(b"SKAudioNode", b"autoplayLooped", {"retval": {"type": b"Z"}})
r(b"SKAudioNode", b"isPositional", {"retval": {"type": b"Z"}})
r(b"SKAudioNode", b"setAutoplayLooped:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKAudioNode", b"setPositional:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKCameraNode", b"containsNode:", {"retval": {"type": b"Z"}})
r(b"SKConstraint", b"enabled", {"retval": {"type": b"Z"}})
r(b"SKConstraint", b"setEnabled:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKEffectNode", b"setShouldCenterFilter:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKEffectNode", b"setShouldEnableEffects:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKEffectNode", b"setShouldRasterize:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKEffectNode", b"shouldCenterFilter", {"retval": {"type": b"Z"}})
r(b"SKEffectNode", b"shouldEnableEffects", {"retval": {"type": b"Z"}})
r(b"SKEffectNode", b"shouldRasterize", {"retval": {"type": b"Z"}})
r(
b"SKFieldNode",
b"customFieldWithEvaluationBlock:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"@?"},
"arguments": {0: {"type": b"^v"}},
}
}
}
},
)
r(b"SKFieldNode", b"direction", {"retval": {"type": b"%"}})
r(b"SKFieldNode", b"isEnabled", {"retval": {"type": b"Z"}})
r(b"SKFieldNode", b"isExclusive", {"retval": {"type": b"Z"}})
r(
b"SKFieldNode",
b"linearGravityFieldWithVector:",
{"arguments": {2: {"type": b"%"}}},
)
r(b"SKFieldNode", b"setDirection:", {"arguments": {2: {"type": b"%"}}})
r(b"SKFieldNode", b"setEnabled:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKFieldNode", b"setExclusive:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKFieldNode", b"velocityFieldWithVector:", {"arguments": {2: {"type": b"%"}}})
r(b"SKLightNode", b"isEnabled", {"retval": {"type": b"Z"}})
r(b"SKLightNode", b"setEnabled:", {"arguments": {2: {"type": b"Z"}}})
r(
b"SKMutableTexture",
b"modifyPixelDataWithBlock:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {
"type": b"^v",
"type_modifier": "N",
"c_array_length_in_arg": 2,
},
2: {"type": b"Q"},
},
}
}
}
},
)
r(b"SKNode", b"containsPoint:", {"retval": {"type": b"Z"}})
r(
b"SKNode",
b"enumerateChildNodesWithName:usingBlock:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"^Z", "type_modifier": "o"},
},
}
}
}
},
)
r(b"SKNode", b"hasActions", {"retval": {"type": b"Z"}})
r(b"SKNode", b"inParentHierarchy:", {"retval": {"type": b"Z"}})
r(b"SKNode", b"intersectsNode:", {"retval": {"type": b"Z"}})
r(b"SKNode", b"isAccessibilityElement", {"retval": {"type": b"Z"}})
r(b"SKNode", b"isAccessibilityEnabled", {"retval": {"type": b"Z"}})
r(b"SKNode", b"isEqualToNode:", {"retval": {"type": b"Z"}})
r(b"SKNode", b"isHidden", {"retval": {"type": b"Z"}})
r(b"SKNode", b"isPaused", {"retval": {"type": b"Z"}})
r(b"SKNode", b"isUserInteractionEnabled", {"retval": {"type": b"Z"}})
r(
b"SKNode",
b"nodeWithFileNamed:securelyWithClasses:andError:",
{"arguments": {4: {"type_modifier": b"o"}}},
)
r(
b"SKNode",
b"runAction:completion:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}},
}
}
}
},
)
r(b"SKNode", b"setAccessibilityElement:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKNode", b"setAccessibilityEnabled:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKNode", b"setHidden:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKNode", b"setPaused:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKNode", b"setUserInteractionEnabled:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKPhysicsBody", b"affectedByGravity", {"retval": {"type": b"Z"}})
r(b"SKPhysicsBody", b"allowsRotation", {"retval": {"type": b"Z"}})
r(b"SKPhysicsBody", b"isDynamic", {"retval": {"type": b"Z"}})
r(b"SKPhysicsBody", b"isResting", {"retval": {"type": b"Z"}})
r(b"SKPhysicsBody", b"pinned", {"retval": {"type": b"Z"}})
r(b"SKPhysicsBody", b"setAffectedByGravity:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKPhysicsBody", b"setAllowsRotation:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKPhysicsBody", b"setDynamic:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKPhysicsBody", b"setPinned:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKPhysicsBody", b"setResting:", {"arguments": {2: {"type": b"Z"}}})
r(
b"SKPhysicsBody",
b"setUsesPreciseCollisionDetection:",
{"arguments": {2: {"type": b"Z"}}},
)
r(b"SKPhysicsBody", b"usesPreciseCollisionDetection", {"retval": {"type": b"Z"}})
r(
b"SKPhysicsJointPin",
b"setShouldEnableLimits:",
{"arguments": {2: {"type": b"Z"}}},
)
r(b"SKPhysicsJointPin", b"shouldEnableLimits", {"retval": {"type": b"Z"}})
r(
b"SKPhysicsJointSliding",
b"setShouldEnableLimits:",
{"arguments": {2: {"type": b"Z"}}},
)
r(b"SKPhysicsJointSliding", b"shouldEnableLimits", {"retval": {"type": b"Z"}})
r(
b"SKPhysicsWorld",
b"enumerateBodiesAlongRayStart:end:usingBlock:",
{
"arguments": {
4: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"{CGPoint=dd}"},
3: {"type": b"{CGVector=dd}"},
4: {"type": b"o^Z"},
},
}
}
}
},
)
r(
b"SKPhysicsWorld",
b"enumerateBodiesAtPoint:usingBlock:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"^Z", "type_modifier": "o"},
},
}
}
}
},
)
r(
b"SKPhysicsWorld",
b"enumerateBodiesInRect:usingBlock:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"^Z", "type_modifier": "o"},
},
}
}
}
},
)
r(
b"SKPhysicsWorld",
b"sampleFieldsAt:",
{"retval": {"type": b"%"}, "arguments": {2: {"type": b"%"}}},
)
r(b"SKRegion", b"containsPoint:", {"retval": {"type": b"Z"}})
r(b"SKRenderer", b"ignoresSiblingOrder", {"retval": {"type": "Z"}})
r(b"SKRenderer", b"setIgnoresSiblingOrder:", {"arguments": {2: {"type": "Z"}}})
r(
b"SKRenderer",
b"setShouldCullNonVisibleNodes:",
{"arguments": {2: {"type": "Z"}}},
)
r(b"SKRenderer", b"setShowsDrawCount:", {"arguments": {2: {"type": "Z"}}})
r(b"SKRenderer", b"setShowsFields:", {"arguments": {2: {"type": "Z"}}})
r(b"SKRenderer", b"setShowsNodeCount:", {"arguments": {2: {"type": "Z"}}})
r(b"SKRenderer", b"setShowsPhysics:", {"arguments": {2: {"type": "Z"}}})
r(b"SKRenderer", b"setShowsQuadCount:", {"arguments": {2: {"type": "Z"}}})
r(b"SKRenderer", b"shouldCullNonVisibleNodes", {"retval": {"type": "Z"}})
r(b"SKRenderer", b"showsDrawCount", {"retval": {"type": "Z"}})
r(b"SKRenderer", b"showsFields", {"retval": {"type": "Z"}})
r(b"SKRenderer", b"showsNodeCount", {"retval": {"type": "Z"}})
r(b"SKRenderer", b"showsPhysics", {"retval": {"type": "Z"}})
r(b"SKRenderer", b"showsQuadCount", {"retval": {"type": "Z"}})
r(b"SKShapeNode", b"isAntialiased", {"retval": {"type": b"Z"}})
r(b"SKShapeNode", b"setAntialiased:", {"arguments": {2: {"type": b"Z"}}})
r(
b"SKShapeNode",
b"shapeNodeWithPath:centered:",
{"arguments": {3: {"type": b"Z"}}},
)
r(
b"SKShapeNode",
b"shapeNodeWithPoints:count:",
{"arguments": {2: {"type_modifier": b"n", "c_array_length_in_arg": 3}}},
)
r(
b"SKShapeNode",
b"shapeNodeWithSplinePoints:count:",
{"arguments": {2: {"type_modifier": b"n", "c_array_length_in_arg": 3}}},
)
r(
b"SKSpriteNode",
b"spriteNodeWithImageNamed:normalMapped:",
{"arguments": {3: {"type": b"Z"}}},
)
r(
b"SKTexture",
b"preloadTextures:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}},
}
}
}
},
)
r(
b"SKTexture",
b"preloadWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}},
}
}
}
},
)
r(b"SKTexture", b"setUsesMipmaps:", {"arguments": {2: {"type": b"Z"}}})
r(
b"SKTexture",
b"textureNoiseWithSmoothness:size:grayscale:",
{"arguments": {4: {"type": b"Z"}}},
)
r(
b"SKTexture",
b"textureWithData:size:flipped:",
{"arguments": {4: {"type": b"Z"}}},
)
r(b"SKTexture", b"usesMipmaps", {"retval": {"type": b"Z"}})
r(
b"SKTextureAtlas",
b"preloadTextureAtlases:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}},
}
}
}
},
)
r(
b"SKTextureAtlas",
b"preloadTextureAtlasesNamed:withCompletionHandler:",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": b"^v"},
1: {"type": b"@"},
2: {"type": b"@"},
},
}
}
}
},
)
r(
b"SKTextureAtlas",
b"preloadWithCompletionHandler:",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": b"^v"}},
}
}
}
},
)
r(b"SKTileDefinition", b"flipHorizontally", {"retval": {"type": "Z"}})
r(b"SKTileDefinition", b"flipVertically", {"retval": {"type": "Z"}})
r(b"SKTileDefinition", b"setFlipHorizontally:", {"arguments": {2: {"type": "Z"}}})
r(b"SKTileDefinition", b"setFlipVertically:", {"arguments": {2: {"type": "Z"}}})
r(b"SKTileMapNode", b"enableAutomapping", {"retval": {"type": b"Z"}})
r(b"SKTileMapNode", b"setEnableAutomapping:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKTransformNode", b"quaternion", {"retval": {"type": b"{_simd_quatf=}"}})
r(
b"SKTransformNode",
b"rotationMatrix",
{"retval": {"type": b"{_matrix_float3x3=?}"}},
)
r(
b"SKTransformNode",
b"setQuaternion:",
{"arguments": {2: {"type": b"{_simd_quatf=}"}}},
)
r(
b"SKTransformNode",
b"setRotationMatrix:",
{"arguments": {2: {"type": b"{_matrix_float3x3=?}"}}},
)
r(b"SKTransition", b"pausesIncomingScene", {"retval": {"type": b"Z"}})
r(b"SKTransition", b"pausesOutgoingScene", {"retval": {"type": b"Z"}})
r(b"SKTransition", b"setPausesIncomingScene:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKTransition", b"setPausesOutgoingScene:", {"arguments": {2: {"type": b"Z"}}})
r(
b"SKUniform",
b"initWithName:matrixFloat2x2:",
{"arguments": {3: {"type": b"{_matrix_float2x2=?}"}}},
)
r(
b"SKUniform",
b"initWithName:matrixFloat3x3:",
{"arguments": {3: {"type": b"{_matrix_float3x3=?}"}}},
)
r(
b"SKUniform",
b"initWithName:matrixFloat4x4:",
{"arguments": {3: {"type": b"{_matrix_float4x4=?}"}}},
)
r(
b"SKUniform",
b"matrixFloat2x2Value",
{"retval": {"type": b"{_matrix_float2x2=?}"}},
)
r(
b"SKUniform",
b"matrixFloat3x3Value",
{"retval": {"type": b"{_matrix_float3x3=?}"}},
)
r(
b"SKUniform",
b"matrixFloat4x4Value",
{"retval": {"type": b"{_matrix_float4x4=?}"}},
)
r(
b"SKUniform",
b"setMatrixFloat2x2Value:",
{"arguments": {2: {"type": b"{_matrix_float2x2=?}"}}},
)
r(
b"SKUniform",
b"setMatrixFloat3x3Value:",
{"arguments": {2: {"type": b"{_matrix_float3x3=?}"}}},
)
r(
b"SKUniform",
b"setMatrixFloat4x4Value:",
{"arguments": {2: {"type": b"{_matrix_float4x4=?}"}}},
)
r(
b"SKUniform",
b"uniformWithName:matrixFloat2x2:",
{"arguments": {3: {"type": b"{_matrix_float2x2=?}"}}},
)
r(
b"SKUniform",
b"uniformWithName:matrixFloat3x3:",
{"arguments": {3: {"type": b"{_matrix_float3x3=?}"}}},
)
r(
b"SKUniform",
b"uniformWithName:matrixFloat4x4:",
{"arguments": {3: {"type": b"{_matrix_float4x4=?}"}}},
)
r(b"SKView", b"allowsTransparency", {"retval": {"type": b"Z"}})
r(b"SKView", b"disableDepthStencilBuffer", {"retval": {"type": "Z"}})
r(b"SKView", b"ignoresSiblingOrder", {"retval": {"type": b"Z"}})
r(b"SKView", b"isAsynchronous", {"retval": {"type": b"Z"}})
r(b"SKView", b"isPaused", {"retval": {"type": b"Z"}})
r(b"SKView", b"setAllowsTransparency:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKView", b"setAsynchronous:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKView", b"setDisableDepthStencilBuffer:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKView", b"setIgnoresSiblingOrder:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKView", b"setPaused:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKView", b"setShouldCullNonVisibleNodes:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKView", b"setShowsDrawCount:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKView", b"setShowsFPS:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKView", b"setShowsFields:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKView", b"setShowsNodeCount:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKView", b"setShowsPhysics:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKView", b"setShowsQuadCount:", {"arguments": {2: {"type": b"Z"}}})
r(b"SKView", b"shouldCullNonVisibleNodes", {"retval": {"type": b"Z"}})
r(b"SKView", b"showsDrawCount", {"retval": {"type": b"Z"}})
r(b"SKView", b"showsFPS", {"retval": {"type": b"Z"}})
r(b"SKView", b"showsFields", {"retval": {"type": b"Z"}})
r(b"SKView", b"showsNodeCount", {"retval": {"type": b"Z"}})
r(b"SKView", b"showsPhysics", {"retval": {"type": b"Z"}})
r(b"SKView", b"showsQuadCount", {"retval": {"type": b"Z"}})
finally:
objc._updatingMetadata(False)
expressions = {
"SKTileAdjacencyRightEdge": "SKTileAdjacencyDown | SKTileAdjacencyLowerLeft | SKTileAdjacencyLeft | SKTileAdjacencyUpperLeft | SKTileAdjacencyUp",
"SKTileAdjacencyUpperRightCorner": "SKTileAdjacencyUp | SKTileAdjacencyUpperRight | SKTileAdjacencyRight | SKTileAdjacencyLowerRight | SKTileAdjacencyDown | SKTileAdjacencyLeft | SKTileAdjacencyUpperLeft",
"SKTileAdjacencyUpperRightEdge": "SKTileAdjacencyDown | SKTileAdjacencyLowerLeft | SKTileAdjacencyLeft",
"SKTileAdjacencyLowerRightCorner": "SKTileAdjacencyUp | SKTileAdjacencyUpperRight | SKTileAdjacencyRight | SKTileAdjacencyLowerRight | SKTileAdjacencyDown | SKTileAdjacencyLowerLeft | SKTileAdjacencyLeft",
"SKTileAdjacencyLowerRightEdge": "SKTileAdjacencyLeft | SKTileAdjacencyUpperLeft | SKTileAdjacencyUp",
"SKTileAdjacencyDownEdge": "SKTileAdjacencyUp | SKTileAdjacencyUpperRight | SKTileAdjacencyRight | SKTileAdjacencyLeft | SKTileAdjacencyUpperLeft",
"SKTileAdjacencyLeftEdge": "SKTileAdjacencyUp | SKTileAdjacencyUpperRight | SKTileAdjacencyRight | SKTileAdjacencyLowerRight | SKTileAdjacencyDown",
"SKTileAdjacencyUpEdge": "SKTileAdjacencyRight | SKTileAdjacencyLowerRight | SKTileAdjacencyDown | SKTileAdjacencyLowerLeft | SKTileAdjacencyLeft",
"SKTileAdjacencyLowerLeftEdge": "SKTileAdjacencyUp | SKTileAdjacencyUpperRight | SKTileAdjacencyRight",
"SKTileAdjacencyUpperLeftCorner": "SKTileAdjacencyUp | SKTileAdjacencyUpperRight | SKTileAdjacencyRight | SKTileAdjacencyDown | SKTileAdjacencyLowerLeft | SKTileAdjacencyLeft | SKTileAdjacencyUpperLeft",
"SKTileAdjacencyUpperLeftEdge": "SKTileAdjacencyRight | SKTileAdjacencyLowerRight | SKTileAdjacencyDown",
"SKTileAdjacencyLowerLeftCorner": "SKTileAdjacencyUp | SKTileAdjacencyRight | SKTileAdjacencyLowerRight | SKTileAdjacencyDown | SKTileAdjacencyLowerLeft | SKTileAdjacencyLeft | SKTileAdjacencyUpperLeft",
}
# END OF FILE
| [
"[email protected]"
]
| |
b1f2bc27194e8f706625493989d95c5335783f9f | fc58366ed416de97380df7040453c9990deb7faa | /daoliagent/services/arp.py | 7d9cf1622fdcd08505553150ef2cdef052d75232 | [
"Apache-2.0"
]
| permissive | foruy/openflow-multiopenstack | eb51e37b2892074234ebdd5b501b24aa1f72fb86 | 74140b041ac25ed83898ff3998e8dcbed35572bb | refs/heads/master | 2016-09-13T08:24:09.713883 | 2016-05-19T01:16:58 | 2016-05-19T01:16:58 | 58,977,485 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,005 | py | from oslo.config import cfg
from ryu.lib.packet import arp
from ryu.lib.packet import ethernet
from ryu.lib.packet import packet
from ryu.lib import addrconv
from ryu.ofproto import ether
from daoliagent.services.base import PacketBase
from daoliagent.lib import SMAX
from daoliagent.openstack.common import log as logging
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class PacketARP(PacketBase):
priority = 5
def _arp(self, msg, dp, in_port, pkt_ether, pkt_arp, address):
ofp, ofp_parser, ofp_set, ofp_out = self.ofp_get(dp)
actions = [ofp_parser.OFPActionSetField(eth_src=address),
ofp_parser.OFPActionOutput(ofp.OFPP_IN_PORT)]
match = ofp_parser.OFPMatch(
in_port=in_port, eth_type=ether.ETH_TYPE_ARP,
arp_spa=pkt_arp.src_ip, arp_tpa=pkt_arp.dst_ip)
LOG.debug("arp response %(src_mac)s-%(src_ip)s -> %(dst_mac)s-%(dst_ip)s",
{'src_mac': address, 'src_ip': pkt_arp.dst_ip,
'dst_mac': pkt_arp.src_mac, 'dst_ip': pkt_arp.src_ip})
self.add_flow(dp, match, actions)
self.packet_out(msg, dp, actions)
def _redirect(self, msg, dp, in_port, pkt_ether, pkt_arp, output):
ofp, ofp_parser, ofp_set, ofp_out = self.ofp_get(dp)
actions = [ofp_parser.OFPActionOutput(output)]
match = ofp_parser.OFPMatch(
in_port=in_port, eth_type=ether.ETH_TYPE_ARP,
arp_spa=pkt_arp.src_ip, arp_tpa=pkt_arp.dst_ip)
self.add_flow(dp, match, actions)
self.packet_out(msg, dp, actions)
def run(self, msg, pkt_ether, pkt_arp, gateway, **kwargs):
dp = msg.datapath
in_port = msg.match['in_port']
ofp, ofp_parser, ofp_set, ofp_out = self.ofp_get(dp)
src_mac = pkt_arp.src_mac
dst_ip = pkt_arp.dst_ip
LOG.debug("arp request %(src_mac)s-%(src_ip)s -> %(dst_mac)s-%(dst_ip)s",
{'src_mac': src_mac, 'src_ip': pkt_arp.src_ip,
'dst_mac': pkt_arp.dst_mac, 'dst_ip': dst_ip})
if gateway.int_dev != gateway.ext_dev:
int_port = self.port_get(dp, devname=gateway.int_dev)
tap_port = self.port_get(dp, devname=gateway.vint_dev)
if not int_port or not tap_port:
return True
if in_port == int_port.port_no:
if pkt_arp.dst_ip == gateway['int_ip']:
self._redirect(msg, dp, in_port, pkt_ether, pkt_arp, tap_port.port_no)
return True
if in_port == tap_port.port_no:
if pkt_arp.src_ip == gateway['int_ip']:
self._redirect(msg, dp, in_port, pkt_ether, pkt_arp, int_port.port_no)
return True
port = self.port_get(dp, devname=gateway['ext_dev'])
if not port:
return True
if in_port == port.port_no:
if pkt_arp.dst_ip == gateway['ext_ip']:
self._redirect(msg, dp, in_port, pkt_ether, pkt_arp, ofp.OFPP_LOCAL)
return True
if in_port == ofp.OFPP_LOCAL:
if pkt_arp.src_ip == gateway['ext_ip']:
self._redirect(msg, dp, in_port, pkt_ether, pkt_arp, port.port_no)
return True
num_ip = addrconv.ipv4._addr(dst_ip).value
if pkt_arp.opcode != arp.ARP_REQUEST:
LOG.debug("unknown arp op %s", pkt_arp.opcode)
elif (num_ip & 0x0000FFFF == SMAX - 1):
#br_port = self.port_get(dp, devname=gateway['vext_dev'])
#self._arp(dp, in_port, pkt_ether, pkt_arp, br_port.hw_addr)
self._arp(msg, dp, in_port, pkt_ether, pkt_arp, gateway['vint_mac'])
else:
servers = self.db.server_get_by_mac(src_mac, dst_ip, False)
if servers['src'] and servers['dst']:
self._arp(msg, dp, in_port, pkt_ether, pkt_arp, servers['dst'].mac_address)
else:
self._arp(msg, dp, in_port, pkt_ether, pkt_arp, gateway['vint_mac'])
| [
"[email protected]"
]
| |
5bba101ad14c50d3036fcbeb1308b66a5996c235 | 38744aa4f3ba165a8c043ac51c87b849882ea129 | /game/lib/python3.7/site-packages/Security/_metadata.py | 7cce69ed3c96048306a7eabc4a36fc086d625bc0 | []
| no_license | CleverParty/containers | 5be3c82e38e65ccbaf703fe68f35992ad9941219 | a0d45e62fda2cb7b047c7a930cf6437e71a31d80 | refs/heads/master | 2023-08-04T01:32:58.122067 | 2021-02-07T15:14:35 | 2021-02-07T15:14:35 | 266,421,416 | 2 | 0 | null | 2021-09-22T19:39:31 | 2020-05-23T21:20:17 | Python | UTF-8 | Python | false | false | 107,766 | py | # This file is generated by objective.metadata
#
# Last update: Sun Mar 22 17:27:39 2020
#
# flake8: noqa
import objc, sys
if sys.maxsize > 2 ** 32:
def sel32or64(a, b):
return b
else:
def sel32or64(a, b):
return a
misc = {}
misc.update(
{
"AuthorizationExternalForm": objc.createStructType(
"AuthorizationExternalForm",
b"{_AuthorizationExternalForm=[32C]}",
["bytes"],
),
"SecKeychainSettings": objc.createStructType(
"SecKeychainSettings",
b"{SecKeychainSettings=IZZI}",
["version", "lockOnSleep", "useLockInterval", "lockInterval"],
),
"SecItemImportExportKeyParameters": objc.createStructType(
"SecItemImportExportKeyParameters",
b"{_SecItemImportExportKeyParameters=II@@@@@@}",
[
"version",
"flags",
"passphrase",
"alertTitle",
"alertPrompt",
"accessRef",
"keyUsage",
"keyAttributes",
],
),
"SecKeychainCallbackInfo": objc.createStructType(
"SecKeychainCallbackInfo",
b"{SecKeychainCallbackInfo=I@@i}",
["version", "item", "keychain", "pid"],
),
"AuthorizationItem": objc.createStructType(
"AuthorizationItem",
b"{_AuthorizationItem=^cL^vI}",
["name", "valueLength", "value", "flags"],
),
}
)
constants = """$kCMSEncoderDigestAlgorithmSHA1$kCMSEncoderDigestAlgorithmSHA256$kSSLSessionConfig_3DES_fallback$kSSLSessionConfig_ATSv1$kSSLSessionConfig_ATSv1_noPFS$kSSLSessionConfig_RC4_fallback$kSSLSessionConfig_TLSv1_3DES_fallback$kSSLSessionConfig_TLSv1_RC4_fallback$kSSLSessionConfig_TLSv1_fallback$kSSLSessionConfig_anonymous$kSSLSessionConfig_default$kSSLSessionConfig_legacy$kSSLSessionConfig_legacy_DHE$kSSLSessionConfig_standard$kSecACLAuthorizationAny$kSecACLAuthorizationChangeACL$kSecACLAuthorizationChangeOwner$kSecACLAuthorizationDecrypt$kSecACLAuthorizationDelete$kSecACLAuthorizationDerive$kSecACLAuthorizationEncrypt$kSecACLAuthorizationExportClear$kSecACLAuthorizationExportWrapped$kSecACLAuthorizationGenKey$kSecACLAuthorizationImportClear$kSecACLAuthorizationImportWrapped$kSecACLAuthorizationIntegrity$kSecACLAuthorizationKeychainCreate$kSecACLAuthorizationKeychainDelete$kSecACLAuthorizationKeychainItemDelete$kSecACLAuthorizationKeychainItemInsert$kSecACLAuthorizationKeychainItemModify$kSecACLAuthorizationKeychainItemRead$kSecACLAuthorizationLogin$kSecACLAuthorizationMAC$kSecACLAuthorizationPartitionID$kSecACLAuthorizationSign$kSecAttrAccess$kSecAttrAccessControl$kSecAttrAccessGroup$kSecAttrAccessGroupToken$kSecAttrAccessible$kSecAttrAccessibleAfterFirstUnlock$kSecAttrAccessibleAfterFirstUnlockThisDeviceOnly$kSecAttrAccessibleAlways$kSecAttrAccessibleAlwaysThisDeviceOnly$kSecAttrAccessibleWhenPasscodeSetThisDeviceOnly$kSecAttrAccessibleWhenUnlocked$kSecAttrAccessibleWhenUnlockedThisDeviceOnly$kSecAttrAccount$kSecAttrApplicationLabel$kSecAttrApplicationTag$kSecAttrAuthenticationType$kSecAttrAuthenticationTypeDPA$kSecAttrAuthenticationTypeDefault$kSecAttrAuthenticationTypeHTMLForm$kSecAttrAuthenticationTypeHTTPBasic$kSecAttrAuthenticationTypeHTTPDigest$kSecAttrAuthenticationTypeMSN$kSecAttrAuthenticationTypeNTLM$kSecAttrAuthenticationTypeRPA$kSecAttrCanDecrypt$kSecAttrCanDerive$kSecAttrCanEncrypt$kSecAttrCanSign$kSecAttrCanUnwrap$kSecAttrCanVerify$kSecAttrCanWrap$kSecAttrCertificateEncoding$kSecAttrCertificateType$kSecAttrComment$kSecAttrCreationDate$kSecAttrCreator$kSecAttrDescription$kSecAttrEffectiveKeySize$kSecAttrGeneric$kSecAttrIsExtractable$kSecAttrIsInvisible$kSecAttrIsNegative$kSecAttrIsPermanent$kSecAttrIsSensitive$kSecAttrIssuer$kSecAttrKeyClass$kSecAttrKeyClassPrivate$kSecAttrKeyClassPublic$kSecAttrKeyClassSymmetric$kSecAttrKeySizeInBits$kSecAttrKeyType$kSecAttrKeyType3DES$kSecAttrKeyTypeAES$kSecAttrKeyTypeCAST$kSecAttrKeyTypeDES$kSecAttrKeyTypeDSA$kSecAttrKeyTypeEC$kSecAttrKeyTypeECDSA$kSecAttrKeyTypeECSECPrimeRandom$kSecAttrKeyTypeRC2$kSecAttrKeyTypeRC4$kSecAttrKeyTypeRSA$kSecAttrLabel$kSecAttrModificationDate$kSecAttrPRF$kSecAttrPRFHmacAlgSHA1$kSecAttrPRFHmacAlgSHA224$kSecAttrPRFHmacAlgSHA256$kSecAttrPRFHmacAlgSHA384$kSecAttrPRFHmacAlgSHA512$kSecAttrPath$kSecAttrPersistantReference$kSecAttrPersistentReference$kSecAttrPort$kSecAttrProtocol$kSecAttrProtocolAFP$kSecAttrProtocolAppleTalk$kSecAttrProtocolDAAP$kSecAttrProtocolEPPC$kSecAttrProtocolFTP$kSecAttrProtocolFTPAccount$kSecAttrProtocolFTPProxy$kSecAttrProtocolFTPS$kSecAttrProtocolHTTP$kSecAttrProtocolHTTPProxy$kSecAttrProtocolHTTPS$kSecAttrProtocolHTTPSProxy$kSecAttrProtocolIMAP$kSecAttrProtocolIMAPS$kSecAttrProtocolIPP$kSecAttrProtocolIRC$kSecAttrProtocolIRCS$kSecAttrProtocolLDAP$kSecAttrProtocolLDAPS$kSecAttrProtocolNNTP$kSecAttrProtocolNNTPS$kSecAttrProtocolPOP3$kSecAttrProtocolPOP3S$kSecAttrProtocolRTSP$kSecAttrProtocolRTSPProxy$kSecAttrProtocolSMB$kSecAttrProtocolSMTP$kSecAttrProtocolSOCKS$kSecAttrProtocolSSH$kSecAttrProtocolTelnet$kSecAttrProtocolTelnetS$kSecAttrPublicKeyHash$kSecAttrRounds$kSecAttrSalt$kSecAttrSecurityDomain$kSecAttrSerialNumber$kSecAttrServer$kSecAttrService$kSecAttrSubject$kSecAttrSubjectKeyID$kSecAttrSyncViewHint$kSecAttrSynchronizable$kSecAttrSynchronizableAny$kSecAttrTokenID$kSecAttrTokenIDSecureEnclave$kSecAttrType$kSecBase32Encoding$kSecBase64Encoding$kSecCFErrorArchitecture$kSecCFErrorGuestAttributes$kSecCFErrorInfoPlist$kSecCFErrorPath$kSecCFErrorPattern$kSecCFErrorRequirementSyntax$kSecCFErrorResourceAdded$kSecCFErrorResourceAltered$kSecCFErrorResourceMissing$kSecCFErrorResourceSeal$kSecCFErrorResourceSideband$kSecClass$kSecClassCertificate$kSecClassGenericPassword$kSecClassIdentity$kSecClassInternetPassword$kSecClassKey$kSecCodeAttributeArchitecture$kSecCodeAttributeBundleVersion$kSecCodeAttributeSubarchitecture$kSecCodeAttributeUniversalFileOffset$kSecCodeInfoCMS$kSecCodeInfoCdHashes$kSecCodeInfoCertificates$kSecCodeInfoChangedFiles$kSecCodeInfoDesignatedRequirement$kSecCodeInfoDigestAlgorithm$kSecCodeInfoDigestAlgorithms$kSecCodeInfoEntitlements$kSecCodeInfoEntitlementsDict$kSecCodeInfoFlags$kSecCodeInfoFormat$kSecCodeInfoIdentifier$kSecCodeInfoImplicitDesignatedRequirement$kSecCodeInfoMainExecutable$kSecCodeInfoPList$kSecCodeInfoPlatformIdentifier$kSecCodeInfoRequirementData$kSecCodeInfoRequirements$kSecCodeInfoSource$kSecCodeInfoStatus$kSecCodeInfoTeamIdentifier$kSecCodeInfoTime$kSecCodeInfoTimestamp$kSecCodeInfoTrust$kSecCodeInfoUnique$kSecCompressionRatio$kSecDecodeTypeAttribute$kSecDigestHMACKeyAttribute$kSecDigestHMACMD5$kSecDigestHMACSHA1$kSecDigestHMACSHA2$kSecDigestLengthAttribute$kSecDigestMD2$kSecDigestMD4$kSecDigestMD5$kSecDigestSHA1$kSecDigestSHA2$kSecDigestTypeAttribute$kSecEncodeLineLengthAttribute$kSecEncodeTypeAttribute$kSecEncryptKey$kSecEncryptionMode$kSecGuestAttributeArchitecture$kSecGuestAttributeAudit$kSecGuestAttributeCanonical$kSecGuestAttributeDynamicCode$kSecGuestAttributeDynamicCodeInfoPlist$kSecGuestAttributeHash$kSecGuestAttributeMachPort$kSecGuestAttributePid$kSecGuestAttributeSubarchitecture$kSecIVKey$kSecIdentityDomainDefault$kSecIdentityDomainKerberosKDC$kSecImportExportAccess$kSecImportExportKeychain$kSecImportExportPassphrase$kSecImportItemCertChain$kSecImportItemIdentity$kSecImportItemKeyID$kSecImportItemLabel$kSecImportItemTrust$kSecInputIsAttributeName$kSecInputIsDigest$kSecInputIsPlainText$kSecInputIsRaw$kSecKeyAlgorithmECDHKeyExchangeCofactor$kSecKeyAlgorithmECDHKeyExchangeCofactorX963SHA1$kSecKeyAlgorithmECDHKeyExchangeCofactorX963SHA224$kSecKeyAlgorithmECDHKeyExchangeCofactorX963SHA256$kSecKeyAlgorithmECDHKeyExchangeCofactorX963SHA384$kSecKeyAlgorithmECDHKeyExchangeCofactorX963SHA512$kSecKeyAlgorithmECDHKeyExchangeStandard$kSecKeyAlgorithmECDHKeyExchangeStandardX963SHA1$kSecKeyAlgorithmECDHKeyExchangeStandardX963SHA224$kSecKeyAlgorithmECDHKeyExchangeStandardX963SHA256$kSecKeyAlgorithmECDHKeyExchangeStandardX963SHA384$kSecKeyAlgorithmECDHKeyExchangeStandardX963SHA512$kSecKeyAlgorithmECDSASignatureDigestX962$kSecKeyAlgorithmECDSASignatureDigestX962SHA1$kSecKeyAlgorithmECDSASignatureDigestX962SHA224$kSecKeyAlgorithmECDSASignatureDigestX962SHA256$kSecKeyAlgorithmECDSASignatureDigestX962SHA384$kSecKeyAlgorithmECDSASignatureDigestX962SHA512$kSecKeyAlgorithmECDSASignatureMessageX962SHA1$kSecKeyAlgorithmECDSASignatureMessageX962SHA224$kSecKeyAlgorithmECDSASignatureMessageX962SHA256$kSecKeyAlgorithmECDSASignatureMessageX962SHA384$kSecKeyAlgorithmECDSASignatureMessageX962SHA512$kSecKeyAlgorithmECDSASignatureRFC4754$kSecKeyAlgorithmECIESEncryptionCofactorVariableIVX963SHA224AESGCM$kSecKeyAlgorithmECIESEncryptionCofactorVariableIVX963SHA256AESGCM$kSecKeyAlgorithmECIESEncryptionCofactorVariableIVX963SHA384AESGCM$kSecKeyAlgorithmECIESEncryptionCofactorVariableIVX963SHA512AESGCM$kSecKeyAlgorithmECIESEncryptionCofactorX963SHA1AESGCM$kSecKeyAlgorithmECIESEncryptionCofactorX963SHA224AESGCM$kSecKeyAlgorithmECIESEncryptionCofactorX963SHA256AESGCM$kSecKeyAlgorithmECIESEncryptionCofactorX963SHA384AESGCM$kSecKeyAlgorithmECIESEncryptionCofactorX963SHA512AESGCM$kSecKeyAlgorithmECIESEncryptionStandardVariableIVX963SHA224AESGCM$kSecKeyAlgorithmECIESEncryptionStandardVariableIVX963SHA256AESGCM$kSecKeyAlgorithmECIESEncryptionStandardVariableIVX963SHA384AESGCM$kSecKeyAlgorithmECIESEncryptionStandardVariableIVX963SHA512AESGCM$kSecKeyAlgorithmECIESEncryptionStandardX963SHA1AESGCM$kSecKeyAlgorithmECIESEncryptionStandardX963SHA224AESGCM$kSecKeyAlgorithmECIESEncryptionStandardX963SHA256AESGCM$kSecKeyAlgorithmECIESEncryptionStandardX963SHA384AESGCM$kSecKeyAlgorithmECIESEncryptionStandardX963SHA512AESGCM$kSecKeyAlgorithmRSAEncryptionOAEPSHA1$kSecKeyAlgorithmRSAEncryptionOAEPSHA1AESGCM$kSecKeyAlgorithmRSAEncryptionOAEPSHA224$kSecKeyAlgorithmRSAEncryptionOAEPSHA224AESGCM$kSecKeyAlgorithmRSAEncryptionOAEPSHA256$kSecKeyAlgorithmRSAEncryptionOAEPSHA256AESGCM$kSecKeyAlgorithmRSAEncryptionOAEPSHA384$kSecKeyAlgorithmRSAEncryptionOAEPSHA384AESGCM$kSecKeyAlgorithmRSAEncryptionOAEPSHA512$kSecKeyAlgorithmRSAEncryptionOAEPSHA512AESGCM$kSecKeyAlgorithmRSAEncryptionPKCS1$kSecKeyAlgorithmRSAEncryptionRaw$kSecKeyAlgorithmRSASignatureDigestPKCS1v15Raw$kSecKeyAlgorithmRSASignatureDigestPKCS1v15SHA1$kSecKeyAlgorithmRSASignatureDigestPKCS1v15SHA224$kSecKeyAlgorithmRSASignatureDigestPKCS1v15SHA256$kSecKeyAlgorithmRSASignatureDigestPKCS1v15SHA384$kSecKeyAlgorithmRSASignatureDigestPKCS1v15SHA512$kSecKeyAlgorithmRSASignatureDigestPSSSHA1$kSecKeyAlgorithmRSASignatureDigestPSSSHA224$kSecKeyAlgorithmRSASignatureDigestPSSSHA256$kSecKeyAlgorithmRSASignatureDigestPSSSHA384$kSecKeyAlgorithmRSASignatureDigestPSSSHA512$kSecKeyAlgorithmRSASignatureMessagePKCS1v15SHA1$kSecKeyAlgorithmRSASignatureMessagePKCS1v15SHA224$kSecKeyAlgorithmRSASignatureMessagePKCS1v15SHA256$kSecKeyAlgorithmRSASignatureMessagePKCS1v15SHA384$kSecKeyAlgorithmRSASignatureMessagePKCS1v15SHA512$kSecKeyAlgorithmRSASignatureMessagePSSSHA1$kSecKeyAlgorithmRSASignatureMessagePSSSHA224$kSecKeyAlgorithmRSASignatureMessagePSSSHA256$kSecKeyAlgorithmRSASignatureMessagePSSSHA384$kSecKeyAlgorithmRSASignatureMessagePSSSHA512$kSecKeyAlgorithmRSASignatureRaw$kSecKeyAttributeName$kSecKeyKeyExchangeParameterRequestedSize$kSecKeyKeyExchangeParameterSharedInfo$kSecLineLength64$kSecLineLength76$kSecMatchCaseInsensitive$kSecMatchDiacriticInsensitive$kSecMatchEmailAddressIfPresent$kSecMatchIssuers$kSecMatchItemList$kSecMatchLimit$kSecMatchLimitAll$kSecMatchLimitOne$kSecMatchPolicy$kSecMatchSearchList$kSecMatchSubjectContains$kSecMatchSubjectEndsWith$kSecMatchSubjectStartsWith$kSecMatchSubjectWholeString$kSecMatchTrustedOnly$kSecMatchValidOnDate$kSecMatchWidthInsensitive$kSecModeCBCKey$kSecModeCFBKey$kSecModeECBKey$kSecModeNoneKey$kSecModeOFBKey$kSecOAEPEncodingParametersAttributeName$kSecOAEPMGF1DigestAlgorithmAttributeName$kSecOAEPMessageLengthAttributeName$kSecOIDADC_CERT_POLICY$kSecOIDAPPLE_CERT_POLICY$kSecOIDAPPLE_EKU_CODE_SIGNING$kSecOIDAPPLE_EKU_CODE_SIGNING_DEV$kSecOIDAPPLE_EKU_ICHAT_ENCRYPTION$kSecOIDAPPLE_EKU_ICHAT_SIGNING$kSecOIDAPPLE_EKU_RESOURCE_SIGNING$kSecOIDAPPLE_EKU_SYSTEM_IDENTITY$kSecOIDAPPLE_EXTENSION$kSecOIDAPPLE_EXTENSION_AAI_INTERMEDIATE$kSecOIDAPPLE_EXTENSION_ADC_APPLE_SIGNING$kSecOIDAPPLE_EXTENSION_ADC_DEV_SIGNING$kSecOIDAPPLE_EXTENSION_APPLEID_INTERMEDIATE$kSecOIDAPPLE_EXTENSION_APPLE_SIGNING$kSecOIDAPPLE_EXTENSION_CODE_SIGNING$kSecOIDAPPLE_EXTENSION_INTERMEDIATE_MARKER$kSecOIDAPPLE_EXTENSION_ITMS_INTERMEDIATE$kSecOIDAPPLE_EXTENSION_WWDR_INTERMEDIATE$kSecOIDAuthorityInfoAccess$kSecOIDAuthorityKeyIdentifier$kSecOIDBasicConstraints$kSecOIDBiometricInfo$kSecOIDCSSMKeyStruct$kSecOIDCertIssuer$kSecOIDCertificatePolicies$kSecOIDClientAuth$kSecOIDCollectiveStateProvinceName$kSecOIDCollectiveStreetAddress$kSecOIDCommonName$kSecOIDCountryName$kSecOIDCrlDistributionPoints$kSecOIDCrlNumber$kSecOIDCrlReason$kSecOIDDOTMAC_CERT_EMAIL_ENCRYPT$kSecOIDDOTMAC_CERT_EMAIL_SIGN$kSecOIDDOTMAC_CERT_EXTENSION$kSecOIDDOTMAC_CERT_IDENTITY$kSecOIDDOTMAC_CERT_POLICY$kSecOIDDeltaCrlIndicator$kSecOIDDescription$kSecOIDEKU_IPSec$kSecOIDEmailAddress$kSecOIDEmailProtection$kSecOIDExtendedKeyUsage$kSecOIDExtendedKeyUsageAny$kSecOIDExtendedUseCodeSigning$kSecOIDGivenName$kSecOIDHoldInstructionCode$kSecOIDInvalidityDate$kSecOIDIssuerAltName$kSecOIDIssuingDistributionPoint$kSecOIDIssuingDistributionPoints$kSecOIDKERBv5_PKINIT_KP_CLIENT_AUTH$kSecOIDKERBv5_PKINIT_KP_KDC$kSecOIDKeyUsage$kSecOIDLocalityName$kSecOIDMS_NTPrincipalName$kSecOIDMicrosoftSGC$kSecOIDNameConstraints$kSecOIDNetscapeCertSequence$kSecOIDNetscapeCertType$kSecOIDNetscapeSGC$kSecOIDOCSPSigning$kSecOIDOrganizationName$kSecOIDOrganizationalUnitName$kSecOIDPolicyConstraints$kSecOIDPolicyMappings$kSecOIDPrivateKeyUsagePeriod$kSecOIDQC_Statements$kSecOIDSRVName$kSecOIDSerialNumber$kSecOIDServerAuth$kSecOIDStateProvinceName$kSecOIDStreetAddress$kSecOIDSubjectAltName$kSecOIDSubjectDirectoryAttributes$kSecOIDSubjectEmailAddress$kSecOIDSubjectInfoAccess$kSecOIDSubjectKeyIdentifier$kSecOIDSubjectPicture$kSecOIDSubjectSignatureBitmap$kSecOIDSurname$kSecOIDTimeStamping$kSecOIDTitle$kSecOIDUseExemptions$kSecOIDX509V1CertificateIssuerUniqueId$kSecOIDX509V1CertificateSubjectUniqueId$kSecOIDX509V1IssuerName$kSecOIDX509V1IssuerNameCStruct$kSecOIDX509V1IssuerNameLDAP$kSecOIDX509V1IssuerNameStd$kSecOIDX509V1SerialNumber$kSecOIDX509V1Signature$kSecOIDX509V1SignatureAlgorithm$kSecOIDX509V1SignatureAlgorithmParameters$kSecOIDX509V1SignatureAlgorithmTBS$kSecOIDX509V1SignatureCStruct$kSecOIDX509V1SignatureStruct$kSecOIDX509V1SubjectName$kSecOIDX509V1SubjectNameCStruct$kSecOIDX509V1SubjectNameLDAP$kSecOIDX509V1SubjectNameStd$kSecOIDX509V1SubjectPublicKey$kSecOIDX509V1SubjectPublicKeyAlgorithm$kSecOIDX509V1SubjectPublicKeyAlgorithmParameters$kSecOIDX509V1SubjectPublicKeyCStruct$kSecOIDX509V1ValidityNotAfter$kSecOIDX509V1ValidityNotBefore$kSecOIDX509V1Version$kSecOIDX509V3Certificate$kSecOIDX509V3CertificateCStruct$kSecOIDX509V3CertificateExtensionCStruct$kSecOIDX509V3CertificateExtensionCritical$kSecOIDX509V3CertificateExtensionId$kSecOIDX509V3CertificateExtensionStruct$kSecOIDX509V3CertificateExtensionType$kSecOIDX509V3CertificateExtensionValue$kSecOIDX509V3CertificateExtensionsCStruct$kSecOIDX509V3CertificateExtensionsStruct$kSecOIDX509V3CertificateNumberOfExtensions$kSecOIDX509V3SignedCertificate$kSecOIDX509V3SignedCertificateCStruct$kSecPaddingKey$kSecPaddingNoneKey$kSecPaddingOAEPKey$kSecPaddingPKCS1Key$kSecPaddingPKCS5Key$kSecPaddingPKCS7Key$kSecPolicyAppleCodeSigning$kSecPolicyAppleEAP$kSecPolicyAppleIDValidation$kSecPolicyAppleIPsec$kSecPolicyApplePKINITClient$kSecPolicyApplePKINITServer$kSecPolicyApplePassbookSigning$kSecPolicyApplePayIssuerEncryption$kSecPolicyAppleRevocation$kSecPolicyAppleSMIME$kSecPolicyAppleSSL$kSecPolicyAppleTimeStamping$kSecPolicyAppleX509Basic$kSecPolicyAppleiChat$kSecPolicyClient$kSecPolicyKU_CRLSign$kSecPolicyKU_DataEncipherment$kSecPolicyKU_DecipherOnly$kSecPolicyKU_DigitalSignature$kSecPolicyKU_EncipherOnly$kSecPolicyKU_KeyAgreement$kSecPolicyKU_KeyCertSign$kSecPolicyKU_KeyEncipherment$kSecPolicyKU_NonRepudiation$kSecPolicyMacAppStoreReceipt$kSecPolicyName$kSecPolicyOid$kSecPolicyRevocationFlags$kSecPolicyTeamIdentifier$kSecPrivateKeyAttrs$kSecPropertyKeyLabel$kSecPropertyKeyLocalizedLabel$kSecPropertyKeyType$kSecPropertyKeyValue$kSecPropertyTypeArray$kSecPropertyTypeData$kSecPropertyTypeDate$kSecPropertyTypeError$kSecPropertyTypeNumber$kSecPropertyTypeSection$kSecPropertyTypeString$kSecPropertyTypeSuccess$kSecPropertyTypeTitle$kSecPropertyTypeURL$kSecPropertyTypeWarning$kSecPublicKeyAttrs$kSecRandomDefault$kSecReturnAttributes$kSecReturnData$kSecReturnPersistentRef$kSecReturnRef$kSecSignatureAttributeName$kSecTransformAbortAttributeName$kSecTransformAbortOriginatorKey$kSecTransformActionAttributeNotification$kSecTransformActionAttributeValidation$kSecTransformActionCanExecute$kSecTransformActionExternalizeExtraData$kSecTransformActionFinalize$kSecTransformActionInternalizeExtraData$kSecTransformActionProcessData$kSecTransformActionStartingExecution$kSecTransformDebugAttributeName$kSecTransformErrorDomain$kSecTransformInputAttributeName$kSecTransformOutputAttributeName$kSecTransformPreviousErrorKey$kSecTransformTransformName$kSecTrustCertificateTransparency$kSecTrustCertificateTransparencyWhiteList$kSecTrustEvaluationDate$kSecTrustExtendedValidation$kSecTrustOrganizationName$kSecTrustResultValue$kSecTrustRevocationChecked$kSecTrustRevocationValidUntilDate$kSecUseAuthenticationContext$kSecUseAuthenticationUI$kSecUseAuthenticationUIAllow$kSecUseAuthenticationUIFail$kSecUseAuthenticationUISkip$kSecUseDataProtectionKeychain$kSecUseItemList$kSecUseKeychain$kSecUseNoAuthenticationUI$kSecUseOperationPrompt$kSecValueData$kSecValuePersistentRef$kSecValueRef$kSecZLibEncoding$"""
enums = """$SEC_KEYCHAIN_SETTINGS_VERS1@1$SEC_KEY_IMPORT_EXPORT_PARAMS_VERSION@0$SSL_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA@17$SSL_DHE_DSS_WITH_3DES_EDE_CBC_SHA@19$SSL_DHE_DSS_WITH_DES_CBC_SHA@18$SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA@20$SSL_DHE_RSA_WITH_3DES_EDE_CBC_SHA@22$SSL_DHE_RSA_WITH_DES_CBC_SHA@21$SSL_DH_DSS_EXPORT_WITH_DES40_CBC_SHA@11$SSL_DH_DSS_WITH_3DES_EDE_CBC_SHA@13$SSL_DH_DSS_WITH_DES_CBC_SHA@12$SSL_DH_RSA_EXPORT_WITH_DES40_CBC_SHA@14$SSL_DH_RSA_WITH_3DES_EDE_CBC_SHA@16$SSL_DH_RSA_WITH_DES_CBC_SHA@15$SSL_DH_anon_EXPORT_WITH_DES40_CBC_SHA@25$SSL_DH_anon_EXPORT_WITH_RC4_40_MD5@23$SSL_DH_anon_WITH_3DES_EDE_CBC_SHA@27$SSL_DH_anon_WITH_DES_CBC_SHA@26$SSL_DH_anon_WITH_RC4_128_MD5@24$SSL_FORTEZZA_DMS_WITH_FORTEZZA_CBC_SHA@29$SSL_FORTEZZA_DMS_WITH_NULL_SHA@28$SSL_NO_SUCH_CIPHERSUITE@65535$SSL_NULL_WITH_NULL_NULL@0$SSL_RSA_EXPORT_WITH_DES40_CBC_SHA@8$SSL_RSA_EXPORT_WITH_RC2_CBC_40_MD5@6$SSL_RSA_EXPORT_WITH_RC4_40_MD5@3$SSL_RSA_WITH_3DES_EDE_CBC_MD5@65411$SSL_RSA_WITH_3DES_EDE_CBC_SHA@10$SSL_RSA_WITH_DES_CBC_MD5@65410$SSL_RSA_WITH_DES_CBC_SHA@9$SSL_RSA_WITH_IDEA_CBC_MD5@65409$SSL_RSA_WITH_IDEA_CBC_SHA@7$SSL_RSA_WITH_NULL_MD5@1$SSL_RSA_WITH_NULL_SHA@2$SSL_RSA_WITH_RC2_CBC_MD5@65408$SSL_RSA_WITH_RC4_128_MD5@4$SSL_RSA_WITH_RC4_128_SHA@5$TLS_AES_128_CCM_8_SHA256@4869$TLS_AES_128_CCM_SHA256@4868$TLS_AES_128_GCM_SHA256@4865$TLS_AES_256_GCM_SHA384@4866$TLS_CHACHA20_POLY1305_SHA256@4867$TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA@19$TLS_DHE_DSS_WITH_AES_128_CBC_SHA@50$TLS_DHE_DSS_WITH_AES_128_CBC_SHA256@64$TLS_DHE_DSS_WITH_AES_128_GCM_SHA256@162$TLS_DHE_DSS_WITH_AES_256_CBC_SHA@56$TLS_DHE_DSS_WITH_AES_256_CBC_SHA256@106$TLS_DHE_DSS_WITH_AES_256_GCM_SHA384@163$TLS_DHE_PSK_WITH_3DES_EDE_CBC_SHA@143$TLS_DHE_PSK_WITH_AES_128_CBC_SHA@144$TLS_DHE_PSK_WITH_AES_128_CBC_SHA256@178$TLS_DHE_PSK_WITH_AES_128_GCM_SHA256@170$TLS_DHE_PSK_WITH_AES_256_CBC_SHA@145$TLS_DHE_PSK_WITH_AES_256_CBC_SHA384@179$TLS_DHE_PSK_WITH_AES_256_GCM_SHA384@171$TLS_DHE_PSK_WITH_NULL_SHA@45$TLS_DHE_PSK_WITH_NULL_SHA256@180$TLS_DHE_PSK_WITH_NULL_SHA384@181$TLS_DHE_PSK_WITH_RC4_128_SHA@142$TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA@22$TLS_DHE_RSA_WITH_AES_128_CBC_SHA@51$TLS_DHE_RSA_WITH_AES_128_CBC_SHA256@103$TLS_DHE_RSA_WITH_AES_128_GCM_SHA256@158$TLS_DHE_RSA_WITH_AES_256_CBC_SHA@57$TLS_DHE_RSA_WITH_AES_256_CBC_SHA256@107$TLS_DHE_RSA_WITH_AES_256_GCM_SHA384@159$TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA@13$TLS_DH_DSS_WITH_AES_128_CBC_SHA@48$TLS_DH_DSS_WITH_AES_128_CBC_SHA256@62$TLS_DH_DSS_WITH_AES_128_GCM_SHA256@164$TLS_DH_DSS_WITH_AES_256_CBC_SHA@54$TLS_DH_DSS_WITH_AES_256_CBC_SHA256@104$TLS_DH_DSS_WITH_AES_256_GCM_SHA384@165$TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA@16$TLS_DH_RSA_WITH_AES_128_CBC_SHA@49$TLS_DH_RSA_WITH_AES_128_CBC_SHA256@63$TLS_DH_RSA_WITH_AES_128_GCM_SHA256@160$TLS_DH_RSA_WITH_AES_256_CBC_SHA@55$TLS_DH_RSA_WITH_AES_256_CBC_SHA256@105$TLS_DH_RSA_WITH_AES_256_GCM_SHA384@161$TLS_DH_anon_WITH_3DES_EDE_CBC_SHA@27$TLS_DH_anon_WITH_AES_128_CBC_SHA@52$TLS_DH_anon_WITH_AES_128_CBC_SHA256@108$TLS_DH_anon_WITH_AES_128_GCM_SHA256@166$TLS_DH_anon_WITH_AES_256_CBC_SHA@58$TLS_DH_anon_WITH_AES_256_CBC_SHA256@109$TLS_DH_anon_WITH_AES_256_GCM_SHA384@167$TLS_DH_anon_WITH_RC4_128_MD5@24$TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA@49160$TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA@49161$TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256@49187$TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256@49195$TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA@49162$TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384@49188$TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384@49196$TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256@52393$TLS_ECDHE_ECDSA_WITH_NULL_SHA@49158$TLS_ECDHE_ECDSA_WITH_RC4_128_SHA@49159$TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA@49205$TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA@49206$TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA@49170$TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA@49171$TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256@49191$TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256@49199$TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA@49172$TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384@49192$TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384@49200$TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256@52392$TLS_ECDHE_RSA_WITH_NULL_SHA@49168$TLS_ECDHE_RSA_WITH_RC4_128_SHA@49169$TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA@49155$TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA@49156$TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256@49189$TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256@49197$TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA@49157$TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384@49190$TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384@49198$TLS_ECDH_ECDSA_WITH_NULL_SHA@49153$TLS_ECDH_ECDSA_WITH_RC4_128_SHA@49154$TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA@49165$TLS_ECDH_RSA_WITH_AES_128_CBC_SHA@49166$TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256@49193$TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256@49201$TLS_ECDH_RSA_WITH_AES_256_CBC_SHA@49167$TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384@49194$TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384@49202$TLS_ECDH_RSA_WITH_NULL_SHA@49163$TLS_ECDH_RSA_WITH_RC4_128_SHA@49164$TLS_ECDH_anon_WITH_3DES_EDE_CBC_SHA@49175$TLS_ECDH_anon_WITH_AES_128_CBC_SHA@49176$TLS_ECDH_anon_WITH_AES_256_CBC_SHA@49177$TLS_ECDH_anon_WITH_NULL_SHA@49173$TLS_ECDH_anon_WITH_RC4_128_SHA@49174$TLS_EMPTY_RENEGOTIATION_INFO_SCSV@255$TLS_NULL_WITH_NULL_NULL@0$TLS_PSK_WITH_3DES_EDE_CBC_SHA@139$TLS_PSK_WITH_AES_128_CBC_SHA@140$TLS_PSK_WITH_AES_128_CBC_SHA256@174$TLS_PSK_WITH_AES_128_GCM_SHA256@168$TLS_PSK_WITH_AES_256_CBC_SHA@141$TLS_PSK_WITH_AES_256_CBC_SHA384@175$TLS_PSK_WITH_AES_256_GCM_SHA384@169$TLS_PSK_WITH_CHACHA20_POLY1305_SHA256@52395$TLS_PSK_WITH_NULL_SHA@44$TLS_PSK_WITH_NULL_SHA256@176$TLS_PSK_WITH_NULL_SHA384@177$TLS_PSK_WITH_RC4_128_SHA@138$TLS_RSA_PSK_WITH_3DES_EDE_CBC_SHA@147$TLS_RSA_PSK_WITH_AES_128_CBC_SHA@148$TLS_RSA_PSK_WITH_AES_128_CBC_SHA256@182$TLS_RSA_PSK_WITH_AES_128_GCM_SHA256@172$TLS_RSA_PSK_WITH_AES_256_CBC_SHA@149$TLS_RSA_PSK_WITH_AES_256_CBC_SHA384@183$TLS_RSA_PSK_WITH_AES_256_GCM_SHA384@173$TLS_RSA_PSK_WITH_NULL_SHA@46$TLS_RSA_PSK_WITH_NULL_SHA256@184$TLS_RSA_PSK_WITH_NULL_SHA384@185$TLS_RSA_PSK_WITH_RC4_128_SHA@146$TLS_RSA_WITH_3DES_EDE_CBC_SHA@10$TLS_RSA_WITH_AES_128_CBC_SHA@47$TLS_RSA_WITH_AES_128_CBC_SHA256@60$TLS_RSA_WITH_AES_128_GCM_SHA256@156$TLS_RSA_WITH_AES_256_CBC_SHA@53$TLS_RSA_WITH_AES_256_CBC_SHA256@61$TLS_RSA_WITH_AES_256_GCM_SHA384@157$TLS_RSA_WITH_NULL_MD5@1$TLS_RSA_WITH_NULL_SHA@2$TLS_RSA_WITH_NULL_SHA256@59$TLS_RSA_WITH_RC4_128_MD5@4$TLS_RSA_WITH_RC4_128_SHA@5$callerSecuritySession@-1$errAuthorizationBadAddress@-60033$errAuthorizationCanceled@-60006$errAuthorizationDenied@-60005$errAuthorizationExternalizeNotAllowed@-60009$errAuthorizationInteractionNotAllowed@-60007$errAuthorizationInternal@-60008$errAuthorizationInternalizeNotAllowed@-60010$errAuthorizationInvalidFlags@-60011$errAuthorizationInvalidPointer@-60004$errAuthorizationInvalidRef@-60002$errAuthorizationInvalidSet@-60001$errAuthorizationInvalidTag@-60003$errAuthorizationSuccess@0$errAuthorizationToolEnvironmentError@-60032$errAuthorizationToolExecuteFailure@-60031$errSSLATSCertificateHashAlgorithmViolation@-9885$errSSLATSCertificateTrustViolation@-9886$errSSLATSCiphersuiteViolation@-9882$errSSLATSLeafCertificateHashAlgorithmViolation@-9884$errSSLATSMinimumKeySizeViolation@-9883$errSSLATSMinimumVersionViolation@-9881$errSSLATSViolation@-9880$errSSLBadCert@-9808$errSSLBadCertificateStatusResponse@-9862$errSSLBadCipherSuite@-9818$errSSLBadConfiguration@-9848$errSSLBadRecordMac@-9846$errSSLBufferOverflow@-9817$errSSLCertExpired@-9814$errSSLCertNotYetValid@-9815$errSSLCertificateRequired@-9863$errSSLClientCertRequested@-9842$errSSLClientHelloReceived@-9851$errSSLClosedAbort@-9806$errSSLClosedGraceful@-9805$errSSLClosedNoNotify@-9816$errSSLConfigurationFailed@-9854$errSSLConnectionRefused@-9844$errSSLCrypto@-9809$errSSLDecodeError@-9859$errSSLDecompressFail@-9857$errSSLDecryptionFail@-9845$errSSLFatalAlert@-9802$errSSLHandshakeFail@-9858$errSSLHostNameMismatch@-9843$errSSLIllegalParam@-9830$errSSLInappropriateFallback@-9860$errSSLInternal@-9810$errSSLMissingExtension@-9861$errSSLModuleAttach@-9811$errSSLNegotiation@-9801$errSSLNetworkTimeout@-9853$errSSLNoRootCert@-9813$errSSLPeerAccessDenied@-9832$errSSLPeerAuthCompleted@-9841$errSSLPeerBadCert@-9825$errSSLPeerBadRecordMac@-9820$errSSLPeerCertExpired@-9828$errSSLPeerCertRevoked@-9827$errSSLPeerCertUnknown@-9829$errSSLPeerDecodeError@-9833$errSSLPeerDecompressFail@-9823$errSSLPeerDecryptError@-9834$errSSLPeerDecryptionFail@-9821$errSSLPeerExportRestriction@-9835$errSSLPeerHandshakeFail@-9824$errSSLPeerInsufficientSecurity@-9837$errSSLPeerInternalError@-9838$errSSLPeerNoRenegotiation@-9840$errSSLPeerProtocolVersion@-9836$errSSLPeerRecordOverflow@-9822$errSSLPeerUnexpectedMsg@-9819$errSSLPeerUnknownCA@-9831$errSSLPeerUnsupportedCert@-9826$errSSLPeerUserCancelled@-9839$errSSLProtocol@-9800$errSSLRecordOverflow@-9847$errSSLSessionNotFound@-9804$errSSLTransportReset@-9852$errSSLUnexpectedMessage@-9856$errSSLUnexpectedRecord@-9849$errSSLUnknownPSKIdentity@-9864$errSSLUnknownRootCert@-9812$errSSLUnrecognizedName@-9865$errSSLUnsupportedExtension@-9855$errSSLWeakPeerEphemeralDHKey@-9850$errSSLWouldBlock@-9803$errSSLXCertChainInvalid@-9807$errSecACLAddFailed@-67698$errSecACLChangeFailed@-67699$errSecACLDeleteFailed@-67696$errSecACLNotSimple@-25240$errSecACLReplaceFailed@-67697$errSecAddinLoadFailed@-67711$errSecAddinUnloadFailed@-67714$errSecAlgorithmMismatch@-67730$errSecAllocate@-108$errSecAlreadyLoggedIn@-67814$errSecAppleAddAppACLSubject@-67589$errSecAppleInvalidKeyEndDate@-67593$errSecAppleInvalidKeyStartDate@-67592$errSecApplePublicKeyIncomplete@-67590$errSecAppleSSLv2Rollback@-67595$errSecAppleSignatureMismatch@-67591$errSecAttachHandleBusy@-67728$errSecAttributeNotInContext@-67720$errSecAuthFailed@-25293$errSecBadReq@-909$errSecBlockSizeMismatch@-67810$errSecBufferTooSmall@-25301$errSecCRLAlreadySigned@-67684$errSecCRLBadURI@-67617$errSecCRLExpired@-67613$errSecCRLNotFound@-67615$errSecCRLNotTrusted@-67620$errSecCRLNotValidYet@-67614$errSecCRLPolicyFailed@-67621$errSecCRLServerDown@-67616$errSecCSAmbiguousBundleFormat@-67011$errSecCSBadBundleFormat@-67028$errSecCSBadCallbackValue@-67020$errSecCSBadDictionaryFormat@-67058$errSecCSBadDiskImageFormat@-67001$errSecCSBadFrameworkVersion@-67009$errSecCSBadLVArch@-67017$errSecCSBadMainExecutable@-67010$errSecCSBadNestedCode@-67021$errSecCSBadObjectFormat@-67049$errSecCSBadResource@-67054$errSecCSBadTeamIdentifier@-66997$errSecCSCMSTooLarge@-67036$errSecCSCancelled@-67006$errSecCSDBAccess@-67032$errSecCSDBDenied@-67033$errSecCSDSStoreSymlink@-67012$errSecCSDbCorrupt@-67024$errSecCSFileHardQuarantined@-67026$errSecCSGuestInvalid@-67063$errSecCSHelperFailed@-67019$errSecCSHostProtocolContradiction@-67043$errSecCSHostProtocolDedicationError@-67042$errSecCSHostProtocolInvalidAttribute@-67031$errSecCSHostProtocolInvalidHash@-67035$errSecCSHostProtocolNotProxy@-67041$errSecCSHostProtocolRelativePath@-67044$errSecCSHostProtocolStateError@-67040$errSecCSHostProtocolUnrelated@-67039$errSecCSHostReject@-67047$errSecCSInfoPlistFailed@-67030$errSecCSInternalError@-67048$errSecCSInvalidAssociatedFileData@-66999$errSecCSInvalidAttributeValues@-67066$errSecCSInvalidEntitlements@-66994$errSecCSInvalidFlags@-67070$errSecCSInvalidObjectRef@-67071$errSecCSInvalidPlatform@-67005$errSecCSInvalidRuntimeVersion@-66993$errSecCSInvalidSymlink@-67003$errSecCSInvalidTeamIdentifier@-66998$errSecCSMultipleGuests@-67064$errSecCSNoMainExecutable@-67029$errSecCSNoMatches@-67027$errSecCSNoSuchCode@-67065$errSecCSNotAHost@-67046$errSecCSNotAppLike@-67002$errSecCSNotSupported@-67037$errSecCSObjectRequired@-67069$errSecCSOutdated@-67025$errSecCSRegularFile@-67015$errSecCSReqFailed@-67050$errSecCSReqInvalid@-67052$errSecCSReqUnsupported@-67051$errSecCSResourceDirectoryFailed@-67023$errSecCSResourceNotSupported@-67016$errSecCSResourceRulesInvalid@-67053$errSecCSResourcesInvalid@-67055$errSecCSResourcesNotFound@-67056$errSecCSResourcesNotSealed@-67057$errSecCSRevokedNotarization@-66992$errSecCSSignatureFailed@-67061$errSecCSSignatureInvalid@-67045$errSecCSSignatureNotVerifiable@-67060$errSecCSSignatureUnsupported@-67059$errSecCSSignatureUntrusted@-66996$errSecCSStaticCodeChanged@-67034$errSecCSStaticCodeNotFound@-67068$errSecCSTooBig@-67004$errSecCSUnimplemented@-67072$errSecCSUnsealedAppRoot@-67014$errSecCSUnsealedFrameworkRoot@-67008$errSecCSUnsigned@-67062$errSecCSUnsignedNestedCode@-67022$errSecCSUnsupportedDigestAlgorithm@-67000$errSecCSUnsupportedGuestAttributes@-67067$errSecCSVetoed@-67018$errSecCSWeakResourceEnvelope@-67007$errSecCSWeakResourceRules@-67013$errSecCallbackFailed@-67695$errSecCertificateCannotOperate@-67817$errSecCertificateExpired@-67818$errSecCertificateNameNotAllowed@-67900$errSecCertificateNotValidYet@-67819$errSecCertificatePolicyNotAllowed@-67899$errSecCertificateRevoked@-67820$errSecCertificateSuspended@-67821$errSecCertificateValidityPeriodTooLong@-67901$errSecCodeSigningBadCertChainLength@-67647$errSecCodeSigningBadPathLengthConstraint@-67649$errSecCodeSigningDevelopment@-67651$errSecCodeSigningNoBasicConstraints@-67648$errSecCodeSigningNoExtendedKeyUsage@-67650$errSecConversionError@-67594$errSecCoreFoundationUnknown@-4960$errSecCreateChainFailed@-25318$errSecDataNotAvailable@-25316$errSecDataNotModifiable@-25317$errSecDataTooLarge@-25302$errSecDatabaseLocked@-67869$errSecDatastoreIsOpen@-67870$errSecDecode@-26275$errSecDeviceError@-67727$errSecDeviceFailed@-67588$errSecDeviceReset@-67587$errSecDeviceVerifyFailed@-67812$errSecDiskFull@-34$errSecDskFull@-34$errSecDuplicateCallback@-25297$errSecDuplicateItem@-25299$errSecDuplicateKeychain@-25296$errSecEMMLoadFailed@-67709$errSecEMMUnloadFailed@-67710$errSecEndOfData@-67634$errSecEventNotificationCallbackNotFound@-67723$errSecExtendedKeyUsageNotCritical@-67881$errSecFieldSpecifiedMultiple@-67866$errSecFileTooBig@-67597$errSecFunctionFailed@-67677$errSecFunctionIntegrityFail@-67670$errSecHostNameMismatch@-67602$errSecIDPFailure@-67622$errSecIO@-36$errSecInDarkWake@-25320$errSecIncompatibleDatabaseBlob@-67600$errSecIncompatibleFieldFormat@-67867$errSecIncompatibleKeyBlob@-67601$errSecIncompatibleVersion@-67704$errSecIncompleteCertRevocationCheck@-67635$errSecInputLengthError@-67724$errSecInsufficientClientID@-67586$errSecInsufficientCredentials@-67822$errSecInteractionNotAllowed@-25308$errSecInteractionRequired@-25315$errSecInternalComponent@-2070$errSecInternalError@-67671$errSecInvaldCRLAuthority@-67827$errSecInvalidACL@-67702$errSecInvalidAccessCredentials@-67700$errSecInvalidAccessRequest@-67876$errSecInvalidAction@-67823$errSecInvalidAddinFunctionTable@-67716$errSecInvalidAlgorithm@-67747$errSecInvalidAlgorithmParms@-67770$errSecInvalidAttributeAccessCredentials@-67796$errSecInvalidAttributeBase@-67788$errSecInvalidAttributeBlockSize@-67764$errSecInvalidAttributeDLDBHandle@-67794$errSecInvalidAttributeEffectiveBits@-67778$errSecInvalidAttributeEndDate@-67782$errSecInvalidAttributeInitVector@-67750$errSecInvalidAttributeIterationCount@-67792$errSecInvalidAttributeKey@-67748$errSecInvalidAttributeKeyLength@-67762$errSecInvalidAttributeKeyType@-67774$errSecInvalidAttributeLabel@-67772$errSecInvalidAttributeMode@-67776$errSecInvalidAttributeOutputSize@-67766$errSecInvalidAttributePadding@-67754$errSecInvalidAttributePassphrase@-67760$errSecInvalidAttributePrime@-67786$errSecInvalidAttributePrivateKeyFormat@-67800$errSecInvalidAttributePublicKeyFormat@-67798$errSecInvalidAttributeRandom@-67756$errSecInvalidAttributeRounds@-67768$errSecInvalidAttributeSalt@-67752$errSecInvalidAttributeSeed@-67758$errSecInvalidAttributeStartDate@-67780$errSecInvalidAttributeSubprime@-67790$errSecInvalidAttributeSymmetricKeyFormat@-67802$errSecInvalidAttributeVersion@-67784$errSecInvalidAttributeWrappedKeyFormat@-67804$errSecInvalidAuthority@-67824$errSecInvalidAuthorityKeyID@-67606$errSecInvalidBaseACLs@-67851$errSecInvalidBundleInfo@-67857$errSecInvalidCRL@-67830$errSecInvalidCRLEncoding@-67828$errSecInvalidCRLGroup@-67816$errSecInvalidCRLIndex@-67858$errSecInvalidCRLType@-67829$errSecInvalidCallback@-25298$errSecInvalidCertAuthority@-67826$errSecInvalidCertificateGroup@-67691$errSecInvalidCertificateRef@-67690$errSecInvalidContext@-67746$errSecInvalidDBList@-67681$errSecInvalidDBLocation@-67875$errSecInvalidData@-67673$errSecInvalidDatabaseBlob@-67598$errSecInvalidDigestAlgorithm@-67815$errSecInvalidEncoding@-67853$errSecInvalidExtendedKeyUsage@-67609$errSecInvalidFormType@-67831$errSecInvalidGUID@-67679$errSecInvalidHandle@-67680$errSecInvalidHandleUsage@-67668$errSecInvalidID@-67832$errSecInvalidIDLinkage@-67610$errSecInvalidIdentifier@-67833$errSecInvalidIndex@-67834$errSecInvalidIndexInfo@-67877$errSecInvalidInputVector@-67744$errSecInvalidItemRef@-25304$errSecInvalidKeyAttributeMask@-67738$errSecInvalidKeyBlob@-67599$errSecInvalidKeyFormat@-67742$errSecInvalidKeyHierarchy@-67713$errSecInvalidKeyLabel@-67740$errSecInvalidKeyRef@-67712$errSecInvalidKeyUsageForPolicy@-67608$errSecInvalidKeyUsageMask@-67736$errSecInvalidKeychain@-25295$errSecInvalidLoginName@-67813$errSecInvalidModifyMode@-67879$errSecInvalidName@-67689$errSecInvalidNetworkAddress@-67683$errSecInvalidNewOwner@-67878$errSecInvalidNumberOfFields@-67685$errSecInvalidOutputVector@-67745$errSecInvalidOwnerEdit@-25244$errSecInvalidPVC@-67708$errSecInvalidParsingModule@-67868$errSecInvalidPassthroughID@-67682$errSecInvalidPasswordRef@-25261$errSecInvalidPointer@-67675$errSecInvalidPolicyIdentifiers@-67835$errSecInvalidPrefsDomain@-25319$errSecInvalidQuery@-67693$errSecInvalidReason@-67837$errSecInvalidRecord@-67701$errSecInvalidRequestInputs@-67838$errSecInvalidRequestor@-67855$errSecInvalidResponseVector@-67839$errSecInvalidRoot@-67612$errSecInvalidSampleValue@-67703$errSecInvalidScope@-67706$errSecInvalidSearchRef@-25305$errSecInvalidServiceMask@-67717$errSecInvalidSignature@-67688$errSecInvalidStopOnPolicy@-67840$errSecInvalidSubServiceID@-67719$errSecInvalidSubjectKeyID@-67607$errSecInvalidSubjectName@-67655$errSecInvalidTimeString@-67836$errSecInvalidTrustSetting@-25242$errSecInvalidTrustSettings@-25262$errSecInvalidTuple@-67841$errSecInvalidTupleCredendtials@-67852$errSecInvalidTupleGroup@-67850$errSecInvalidValidityPeriod@-67854$errSecInvalidValue@-67694$errSecItemNotFound@-25300$errSecKeyBlobTypeIncorrect@-67732$errSecKeyHeaderInconsistent@-67733$errSecKeyIsSensitive@-25258$errSecKeySizeNotAllowed@-25311$errSecKeyUsageIncorrect@-67731$errSecLibraryReferenceNotFound@-67715$errSecMDSError@-67674$errSecMemoryError@-67672$errSecMissingAlgorithmParms@-67771$errSecMissingAttributeAccessCredentials@-67797$errSecMissingAttributeBase@-67789$errSecMissingAttributeBlockSize@-67765$errSecMissingAttributeDLDBHandle@-67795$errSecMissingAttributeEffectiveBits@-67779$errSecMissingAttributeEndDate@-67783$errSecMissingAttributeInitVector@-67751$errSecMissingAttributeIterationCount@-67793$errSecMissingAttributeKey@-67749$errSecMissingAttributeKeyLength@-67763$errSecMissingAttributeKeyType@-67775$errSecMissingAttributeLabel@-67773$errSecMissingAttributeMode@-67777$errSecMissingAttributeOutputSize@-67767$errSecMissingAttributePadding@-67755$errSecMissingAttributePassphrase@-67761$errSecMissingAttributePrime@-67787$errSecMissingAttributePrivateKeyFormat@-67801$errSecMissingAttributePublicKeyFormat@-67799$errSecMissingAttributeRandom@-67757$errSecMissingAttributeRounds@-67769$errSecMissingAttributeSalt@-67753$errSecMissingAttributeSeed@-67759$errSecMissingAttributeStartDate@-67781$errSecMissingAttributeSubprime@-67791$errSecMissingAttributeSymmetricKeyFormat@-67803$errSecMissingAttributeVersion@-67785$errSecMissingAttributeWrappedKeyFormat@-67805$errSecMissingEntitlement@-34018$errSecMissingRequiredExtension@-67880$errSecMissingValue@-67871$errSecMobileMeCSRVerifyFailure@-67665$errSecMobileMeFailedConsistencyCheck@-67666$errSecMobileMeNoRequestPending@-67664$errSecMobileMeRequestAlreadyPending@-67663$errSecMobileMeRequestQueued@-67657$errSecMobileMeRequestRedirected@-67658$errSecMobileMeServerAlreadyExists@-67661$errSecMobileMeServerError@-67659$errSecMobileMeServerNotAvailable@-67660$errSecMobileMeServerServiceErr@-67662$errSecModuleManagerInitializeFailed@-67721$errSecModuleManagerNotFound@-67722$errSecModuleManifestVerifyFailed@-67678$errSecModuleNotLoaded@-67718$errSecMultipleExecSegments@-66995$errSecMultiplePrivKeys@-25259$errSecMultipleValuesUnsupported@-67842$errSecNetworkFailure@-67636$errSecNoAccessForItem@-25243$errSecNoBasicConstraints@-67604$errSecNoBasicConstraintsCA@-67605$errSecNoCertificateModule@-25313$errSecNoDefaultAuthority@-67844$errSecNoDefaultKeychain@-25307$errSecNoFieldValues@-67859$errSecNoPolicyModule@-25314$errSecNoStorageModule@-25312$errSecNoSuchAttr@-25303$errSecNoSuchClass@-25306$errSecNoSuchKeychain@-25294$errSecNoTrustSettings@-25263$errSecNotAvailable@-25291$errSecNotInitialized@-67667$errSecNotLoggedIn@-67729$errSecNotSigner@-26267$errSecNotTrusted@-67843$errSecOCSPBadRequest@-67631$errSecOCSPBadResponse@-67630$errSecOCSPNoSigner@-67640$errSecOCSPNotTrustedToAnchor@-67637$errSecOCSPResponderInternalError@-67642$errSecOCSPResponderMalformedReq@-67641$errSecOCSPResponderSignatureRequired@-67644$errSecOCSPResponderTryLater@-67643$errSecOCSPResponderUnauthorized@-67645$errSecOCSPResponseNonceMismatch@-67646$errSecOCSPSignatureError@-67639$errSecOCSPStatusUnrecognized@-67633$errSecOCSPUnavailable@-67632$errSecOpWr@-49$errSecOutputLengthError@-67725$errSecPVCAlreadyConfigured@-67707$errSecPVCReferentNotFound@-67669$errSecParam@-50$errSecPassphraseRequired@-25260$errSecPathLengthConstraintExceeded@-67611$errSecPkcs12VerifyFailure@-25264$errSecPolicyNotFound@-25241$errSecPrivilegeNotGranted@-67705$errSecPrivilegeNotSupported@-67726$errSecPublicKeyInconsistent@-67811$errSecQuerySizeUnknown@-67809$errSecQuotaExceeded@-67596$errSecReadOnly@-25292$errSecReadOnlyAttr@-25309$errSecRecordModified@-67638$errSecRejectedForm@-67845$errSecRequestDescriptor@-67856$errSecRequestLost@-67846$errSecRequestRejected@-67847$errSecResourceSignBadCertChainLength@-67652$errSecResourceSignBadExtKeyUsage@-67653$errSecSMIMEBadExtendedKeyUsage@-67624$errSecSMIMEBadKeyUsage@-67625$errSecSMIMEEmailAddressesNotFound@-67623$errSecSMIMEKeyUsageNotCritical@-67626$errSecSMIMENoEmailAddress@-67627$errSecSMIMESubjAltNameNotCritical@-67628$errSecSSLBadExtendedKeyUsage@-67629$errSecSelfCheckFailed@-67676$errSecServiceNotAvailable@-67585$errSecSigningTimeMissing@-67894$errSecStagedOperationInProgress@-67806$errSecStagedOperationNotStarted@-67807$errSecSuccess@0$errSecTagNotFound@-67692$errSecTimestampAddInfoNotAvailable@-67892$errSecTimestampBadAlg@-67886$errSecTimestampBadDataFormat@-67888$errSecTimestampBadRequest@-67887$errSecTimestampInvalid@-67883$errSecTimestampMissing@-67882$errSecTimestampNotTrusted@-67884$errSecTimestampRejection@-67895$errSecTimestampRevocationNotification@-67898$errSecTimestampRevocationWarning@-67897$errSecTimestampServiceNotAvailable@-67885$errSecTimestampSystemFailure@-67893$errSecTimestampTimeNotAvailable@-67889$errSecTimestampUnacceptedExtension@-67891$errSecTimestampUnacceptedPolicy@-67890$errSecTimestampWaiting@-67896$errSecTrustNotAvailable@-25245$errSecTrustSettingDeny@-67654$errSecUnimplemented@-4$errSecUnknownCRLExtension@-67619$errSecUnknownCertExtension@-67618$errSecUnknownCriticalExtensionFlag@-67603$errSecUnknownFormat@-25257$errSecUnknownQualifiedCertStatement@-67656$errSecUnknownTag@-67687$errSecUnsupportedAddressType@-67848$errSecUnsupportedFieldFormat@-67860$errSecUnsupportedFormat@-25256$errSecUnsupportedIndexInfo@-67861$errSecUnsupportedKeyAttributeMask@-67739$errSecUnsupportedKeyFormat@-67734$errSecUnsupportedKeyLabel@-67741$errSecUnsupportedKeySize@-67735$errSecUnsupportedKeyUsageMask@-67737$errSecUnsupportedLocality@-67862$errSecUnsupportedNumAttributes@-67863$errSecUnsupportedNumIndexes@-67864$errSecUnsupportedNumRecordTypes@-67865$errSecUnsupportedNumSelectionPreds@-67873$errSecUnsupportedOperator@-67874$errSecUnsupportedQueryLimits@-67872$errSecUnsupportedService@-67849$errSecUnsupportedVectorOfBuffers@-67743$errSecUserCanceled@-128$errSecVerificationFailure@-67686$errSecVerifyActionFailed@-67825$errSecVerifyFailed@-67808$errSecWrPerm@-61$errSecWrongSecVersion@-25310$errSecureDownloadInvalidDownload@-20053$errSecureDownloadInvalidTicket@-20052$errSessionAuthorizationDenied@-60502$errSessionInvalidAttributes@-60501$errSessionInvalidId@-60500$errSessionSuccess@0$errSessionValueNotSet@-60503$kAlwaysAuthenticate@1$kAuthorizationExternalFormLength@32$kAuthorizationFlagCanNotPreAuthorize@1$kAuthorizationFlagDefaults@0$kAuthorizationFlagDestroyRights@8$kAuthorizationFlagExtendRights@2$kAuthorizationFlagInteractionAllowed@1$kAuthorizationFlagNoData@1048576$kAuthorizationFlagPartialRights@4$kAuthorizationFlagPreAuthorize@16$kAuthorizationResultAllow@0$kAuthorizationResultDeny@1$kAuthorizationResultUndefined@2$kAuthorizationResultUserCanceled@3$kCMSAttrAppleCodesigningHashAgility@16$kCMSAttrAppleCodesigningHashAgilityV2@32$kCMSAttrAppleExpirationTime@64$kCMSAttrNone@0$kCMSAttrSigningTime@8$kCMSAttrSmimeCapabilities@1$kCMSAttrSmimeEncryptionKeyPrefs@2$kCMSAttrSmimeMSEncryptionKeyPrefs@4$kCMSCertificateChain@2$kCMSCertificateChainWithRoot@3$kCMSCertificateChainWithRootOrFail@4$kCMSCertificateNone@0$kCMSCertificateSignerOnly@1$kCMSSignerInvalidCert@4$kCMSSignerInvalidIndex@5$kCMSSignerInvalidSignature@3$kCMSSignerNeedsDetachedContent@2$kCMSSignerUnsigned@0$kCMSSignerValid@1$kDTLSProtocol1@9$kDTLSProtocol12@11$kNeverAuthenticate@0$kSSLAborted@4$kSSLCiphersuiteGroupATS@3$kSSLCiphersuiteGroupATSCompatibility@4$kSSLCiphersuiteGroupCompatibility@1$kSSLCiphersuiteGroupDefault@0$kSSLCiphersuiteGroupLegacy@2$kSSLClientCertNone@0$kSSLClientCertRejected@3$kSSLClientCertRequested@1$kSSLClientCertSent@2$kSSLClientSide@1$kSSLClosed@3$kSSLConnected@2$kSSLDatagramType@1$kSSLHandshake@1$kSSLIdle@0$kSSLProtocol2@1$kSSLProtocol3@2$kSSLProtocol3Only@3$kSSLProtocolAll@6$kSSLProtocolUnknown@0$kSSLServerSide@0$kSSLSessionOptionAllowRenegotiation@8$kSSLSessionOptionAllowServerIdentityChange@5$kSSLSessionOptionBreakOnCertRequested@1$kSSLSessionOptionBreakOnClientAuth@2$kSSLSessionOptionBreakOnClientHello@7$kSSLSessionOptionBreakOnServerAuth@0$kSSLSessionOptionEnableSessionTickets@9$kSSLSessionOptionFallback@6$kSSLSessionOptionFalseStart@3$kSSLSessionOptionSendOneByteRecord@4$kSSLStreamType@0$kSec3DES192@192$kSecAES128@128$kSecAES192@192$kSecAES256@256$kSecAccessControlAnd@32768$kSecAccessControlApplicationPassword@2147483648$kSecAccessControlBiometryAny@2$kSecAccessControlBiometryCurrentSet@8$kSecAccessControlDevicePasscode@16$kSecAccessControlOr@16384$kSecAccessControlPrivateKeyUsage@1073741824$kSecAccessControlTouchIDAny@2$kSecAccessControlTouchIDCurrentSet@8$kSecAccessControlUserPresence@1$kSecAccessControlWatch@32$kSecAccountItemAttr@1633903476$kSecAddEvent@3$kSecAddEventMask@8$kSecAddressItemAttr@1633969266$kSecAlias@1634494835$kSecAppleSharePasswordItemClass@1634953328$kSecAuthenticationTypeAny@0$kSecAuthenticationTypeDPA@1633775716$kSecAuthenticationTypeDefault@1953261156$kSecAuthenticationTypeHTMLForm@1836216166$kSecAuthenticationTypeHTTPBasic@1886680168$kSecAuthenticationTypeHTTPDigest@1685353576$kSecAuthenticationTypeItemAttr@1635023216$kSecAuthenticationTypeMSN@1634628461$kSecAuthenticationTypeNTLM@1835824238$kSecAuthenticationTypeRPA@1633775730$kSecCSBasicValidateOnly@6$kSecCSCalculateCMSDigest@64$kSecCSCheckAllArchitectures@1$kSecCSCheckGatekeeperArchitectures@65$kSecCSCheckNestedCode@8$kSecCSCheckTrustedAnchors@134217728$kSecCSConsiderExpiration@2147483648$kSecCSContentInformation@16$kSecCSDedicatedHost@1$kSecCSDefaultFlags@0$kSecCSDoNotValidateExecutable@2$kSecCSDoNotValidateResources@4$kSecCSDynamicInformation@8$kSecCSEnforceRevocationChecks@1073741824$kSecCSFullReport@32$kSecCSGenerateGuestHash@2$kSecCSInternalInformation@1$kSecCSNoNetworkAccess@536870912$kSecCSQuickCheck@67108864$kSecCSReportProgress@268435456$kSecCSRequirementInformation@4$kSecCSRestrictSidebandData@512$kSecCSRestrictSymlinks@128$kSecCSRestrictToAppLike@256$kSecCSSigningInformation@2$kSecCSSingleThreaded@4096$kSecCSSkipResourceDirectory@32$kSecCSStrictValidate@16$kSecCSUseAllArchitectures@1$kSecCSUseSoftwareSigningCert@1024$kSecCSValidatePEH@2048$kSecCertificateEncoding@1667591779$kSecCertificateItemClass@2147487744$kSecCertificateType@1668577648$kSecCodeSignatureAdhoc@2$kSecCodeSignatureEnforcement@4096$kSecCodeSignatureForceExpiration@1024$kSecCodeSignatureForceHard@256$kSecCodeSignatureForceKill@512$kSecCodeSignatureHashSHA1@1$kSecCodeSignatureHashSHA256@2$kSecCodeSignatureHashSHA256Truncated@3$kSecCodeSignatureHashSHA384@4$kSecCodeSignatureHashSHA512@5$kSecCodeSignatureHost@1$kSecCodeSignatureLibraryValidation@8192$kSecCodeSignatureNoHash@0$kSecCodeSignatureRestrict@2048$kSecCodeSignatureRuntime@65536$kSecCodeStatusDebugged@268435456$kSecCodeStatusHard@256$kSecCodeStatusKill@512$kSecCodeStatusPlatform@67108864$kSecCodeStatusValid@1$kSecCommentItemAttr@1768123764$kSecCreationDateItemAttr@1667522932$kSecCreatorItemAttr@1668445298$kSecCredentialTypeDefault@0$kSecCredentialTypeNoUI@2$kSecCredentialTypeWithUI@1$kSecCrlEncoding@1668443747$kSecCrlType@1668445296$kSecCustomIconItemAttr@1668641641$kSecDataAccessEvent@10$kSecDataAccessEventMask@1024$kSecDefaultChangedEvent@9$kSecDefaultChangedEventMask@512$kSecDefaultKeySize@0$kSecDeleteEvent@4$kSecDeleteEventMask@16$kSecDescriptionItemAttr@1684370275$kSecDesignatedRequirementType@3$kSecEveryEventMask@4294967295$kSecFormatBSAFE@3$kSecFormatNetscapeCertSequence@13$kSecFormatOpenSSL@1$kSecFormatPEMSequence@10$kSecFormatPKCS12@12$kSecFormatPKCS7@11$kSecFormatRawKey@4$kSecFormatSSH@2$kSecFormatSSHv2@14$kSecFormatUnknown@0$kSecFormatWrappedLSH@8$kSecFormatWrappedOpenSSL@6$kSecFormatWrappedPKCS8@5$kSecFormatWrappedSSH@7$kSecFormatX509Cert@9$kSecGenericItemAttr@1734700641$kSecGenericPasswordItemClass@1734700656$kSecGuestRequirementType@2$kSecHonorRoot@256$kSecHostRequirementType@1$kSecInternetPasswordItemClass@1768842612$kSecInvalidRequirementType@6$kSecInvisibleItemAttr@1768846953$kSecItemPemArmour@1$kSecItemTypeAggregate@5$kSecItemTypeCertificate@4$kSecItemTypePrivateKey@1$kSecItemTypePublicKey@2$kSecItemTypeSessionKey@3$kSecItemTypeUnknown@0$kSecKeyAlias@2$kSecKeyAlwaysSensitive@15$kSecKeyApplicationTag@7$kSecKeyDecrypt@19$kSecKeyDerive@20$kSecKeyEffectiveKeySize@11$kSecKeyEncrypt@18$kSecKeyEndDate@13$kSecKeyExtractable@16$kSecKeyImportOnlyOne@1$kSecKeyKeyClass@0$kSecKeyKeyCreator@8$kSecKeyKeySizeInBits@10$kSecKeyKeyType@9$kSecKeyLabel@6$kSecKeyModifiable@5$kSecKeyNeverExtractable@17$kSecKeyNoAccessControl@4$kSecKeyOperationTypeDecrypt@3$kSecKeyOperationTypeEncrypt@2$kSecKeyOperationTypeKeyExchange@4$kSecKeyOperationTypeSign@0$kSecKeyOperationTypeVerify@1$kSecKeyPermanent@3$kSecKeyPrintName@1$kSecKeyPrivate@4$kSecKeySecurePassphrase@2$kSecKeySensitive@14$kSecKeySign@21$kSecKeySignRecover@23$kSecKeyStartDate@12$kSecKeyUnwrap@26$kSecKeyUsageAll@2147483647$kSecKeyUsageCRLSign@64$kSecKeyUsageContentCommitment@2$kSecKeyUsageCritical@2147483648$kSecKeyUsageDataEncipherment@8$kSecKeyUsageDecipherOnly@256$kSecKeyUsageDigitalSignature@1$kSecKeyUsageEncipherOnly@128$kSecKeyUsageKeyAgreement@16$kSecKeyUsageKeyCertSign@32$kSecKeyUsageKeyEncipherment@4$kSecKeyUsageNonRepudiation@2$kSecKeyUsageUnspecified@0$kSecKeyVerify@22$kSecKeyVerifyRecover@24$kSecKeyWrap@25$kSecKeychainListChangedEvent@11$kSecKeychainListChangedMask@2048$kSecKeychainPromptInvalid@64$kSecKeychainPromptInvalidAct@128$kSecKeychainPromptRequirePassphase@1$kSecKeychainPromptUnsigned@16$kSecKeychainPromptUnsignedAct@32$kSecLabelItemAttr@1818321516$kSecLibraryRequirementType@4$kSecLockEvent@1$kSecLockEventMask@2$kSecMatchBits@3$kSecModDateItemAttr@1835295092$kSecNegativeItemAttr@1852139361$kSecNoGuest@0$kSecPaddingNone@0$kSecPaddingOAEP@2$kSecPaddingPKCS1@1$kSecPaddingPKCS1MD2@32768$kSecPaddingPKCS1MD5@32769$kSecPaddingPKCS1SHA1@32770$kSecPaddingPKCS1SHA224@32771$kSecPaddingPKCS1SHA256@32772$kSecPaddingPKCS1SHA384@32773$kSecPaddingPKCS1SHA512@32774$kSecPaddingSigRaw@16384$kSecPasswordChangedEvent@6$kSecPasswordChangedEventMask@64$kSecPathItemAttr@1885434984$kSecPluginRequirementType@5$kSecPortItemAttr@1886351988$kSecPreferencesDomainCommon@2$kSecPreferencesDomainDynamic@3$kSecPreferencesDomainSystem@1$kSecPreferencesDomainUser@0$kSecPrivateKeyItemClass@16$kSecProtocolItemAttr@1886675820$kSecProtocolTypeAFP@1634103328$kSecProtocolTypeAny@0$kSecProtocolTypeAppleTalk@1635019883$kSecProtocolTypeCIFS@1667851891$kSecProtocolTypeCVSpserver@1668707184$kSecProtocolTypeDAAP@1684103536$kSecProtocolTypeEPPC@1701867619$kSecProtocolTypeFTP@1718906912$kSecProtocolTypeFTPAccount@1718906977$kSecProtocolTypeFTPProxy@1718907000$kSecProtocolTypeFTPS@1718906995$kSecProtocolTypeHTTP@1752462448$kSecProtocolTypeHTTPProxy@1752461432$kSecProtocolTypeHTTPS@1752461427$kSecProtocolTypeHTTPSProxy@1752462200$kSecProtocolTypeIMAP@1768776048$kSecProtocolTypeIMAPS@1768779891$kSecProtocolTypeIPP@1768976416$kSecProtocolTypeIRC@1769104160$kSecProtocolTypeIRCS@1769104243$kSecProtocolTypeLDAP@1818517872$kSecProtocolTypeLDAPS@1818521715$kSecProtocolTypeNNTP@1852732528$kSecProtocolTypeNNTPS@1853124723$kSecProtocolTypePOP3@1886351411$kSecProtocolTypePOP3S@1886351475$kSecProtocolTypeRTSP@1920234352$kSecProtocolTypeRTSPProxy@1920234360$kSecProtocolTypeSMB@1936548384$kSecProtocolTypeSMTP@1936553072$kSecProtocolTypeSOCKS@1936685088$kSecProtocolTypeSSH@1936943136$kSecProtocolTypeSVN@1937141280$kSecProtocolTypeTelnet@1952803950$kSecProtocolTypeTelnetS@1952803955$kSecPublicKeyItemClass@15$kSecRSAMax@4096$kSecRSAMin@1024$kSecReadPermStatus@2$kSecRevocationCRLMethod@2$kSecRevocationNetworkAccessDisabled@16$kSecRevocationOCSPMethod@1$kSecRevocationPreferCRL@4$kSecRevocationRequirePositiveResponse@8$kSecRevocationUseAnyAvailableMethod@3$kSecScriptCodeItemAttr@1935897200$kSecSecurityDomainItemAttr@1935961454$kSecServerItemAttr@1936881266$kSecServiceItemAttr@1937138533$kSecSignatureItemAttr@1936943463$kSecSymmetricKeyItemClass@17$kSecTransformErrorAbortInProgress@19$kSecTransformErrorAborted@20$kSecTransformErrorAttributeNotFound@1$kSecTransformErrorInvalidAlgorithm@6$kSecTransformErrorInvalidConnection@15$kSecTransformErrorInvalidInput@10$kSecTransformErrorInvalidInputDictionary@5$kSecTransformErrorInvalidLength@7$kSecTransformErrorInvalidOperation@2$kSecTransformErrorInvalidType@8$kSecTransformErrorMissingParameter@14$kSecTransformErrorMoreThanOneOutput@4$kSecTransformErrorNameAlreadyRegistered@11$kSecTransformErrorNotInitializedCorrectly@3$kSecTransformErrorUnsupportedAttribute@12$kSecTransformInvalidArgument@21$kSecTransformInvalidOverride@17$kSecTransformMetaAttributeCanCycle@7$kSecTransformMetaAttributeDeferred@5$kSecTransformMetaAttributeExternalize@8$kSecTransformMetaAttributeHasInboundConnection@10$kSecTransformMetaAttributeHasOutboundConnections@9$kSecTransformMetaAttributeName@1$kSecTransformMetaAttributeRef@2$kSecTransformMetaAttributeRequired@3$kSecTransformMetaAttributeRequiresOutboundConnection@4$kSecTransformMetaAttributeStream@6$kSecTransformMetaAttributeValue@0$kSecTransformOperationNotSupportedOnGroup@13$kSecTransformTransformIsExecuting@16$kSecTransformTransformIsNotRegistered@18$kSecTrustOptionAllowExpired@1$kSecTrustOptionAllowExpiredRoot@8$kSecTrustOptionFetchIssuerFromNet@4$kSecTrustOptionImplicitAnchors@64$kSecTrustOptionLeafIsCA@2$kSecTrustOptionRequireRevPerCert@16$kSecTrustOptionUseTrustSettings@32$kSecTrustResultConfirm@2$kSecTrustResultDeny@3$kSecTrustResultFatalTrustFailure@6$kSecTrustResultInvalid@0$kSecTrustResultOtherError@7$kSecTrustResultProceed@1$kSecTrustResultRecoverableTrustFailure@5$kSecTrustResultUnspecified@4$kSecTrustSettingsChangedEvent@12$kSecTrustSettingsChangedEventMask@4096$kSecTrustSettingsDefaultRootCertSetting@-1$kSecTrustSettingsDomainAdmin@1$kSecTrustSettingsDomainSystem@2$kSecTrustSettingsDomainUser@0$kSecTrustSettingsKeyUseAny@4294967295$kSecTrustSettingsKeyUseEnDecryptData@2$kSecTrustSettingsKeyUseEnDecryptKey@4$kSecTrustSettingsKeyUseKeyExchange@32$kSecTrustSettingsKeyUseSignCert@8$kSecTrustSettingsKeyUseSignRevocation@16$kSecTrustSettingsKeyUseSignature@1$kSecTrustSettingsResultDeny@3$kSecTrustSettingsResultInvalid@0$kSecTrustSettingsResultTrustAsRoot@2$kSecTrustSettingsResultTrustRoot@1$kSecTrustSettingsResultUnspecified@4$kSecTypeItemAttr@1954115685$kSecUnlockEvent@2$kSecUnlockEventMask@4$kSecUnlockStateStatus@1$kSecUpdateEvent@5$kSecUpdateEventMask@32$kSecUseOnlyGID@2$kSecUseOnlyUID@1$kSecVolumeItemAttr@1986817381$kSecWritePermStatus@4$kSecp192r1@192$kSecp256r1@256$kSecp384r1@384$kSecp521r1@521$kSecureDownloadDoNotEvaluateSigner@0$kSecureDownloadEvaluateSigner@1$kSecureDownloadFailEvaluation@2$kTLSProtocol1@4$kTLSProtocol11@7$kTLSProtocol12@8$kTLSProtocol13@10$kTLSProtocol1Only@5$kTLSProtocolMaxSupported@999$kTryAuthenticate@2$noSecuritySession@0$sessionHasGraphicAccess@16$sessionHasTTY@32$sessionIsRemote@4096$sessionIsRoot@1$sessionKeepCurrentBootstrap@32768$tls_ciphersuite_AES_128_GCM_SHA256@4865$tls_ciphersuite_AES_256_GCM_SHA384@4866$tls_ciphersuite_CHACHA20_POLY1305_SHA256@4867$tls_ciphersuite_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA@49160$tls_ciphersuite_ECDHE_ECDSA_WITH_AES_128_CBC_SHA@49161$tls_ciphersuite_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256@49187$tls_ciphersuite_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256@49195$tls_ciphersuite_ECDHE_ECDSA_WITH_AES_256_CBC_SHA@49162$tls_ciphersuite_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384@49188$tls_ciphersuite_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384@49196$tls_ciphersuite_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256@52393$tls_ciphersuite_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA@49170$tls_ciphersuite_ECDHE_RSA_WITH_AES_128_CBC_SHA@49171$tls_ciphersuite_ECDHE_RSA_WITH_AES_128_CBC_SHA256@49191$tls_ciphersuite_ECDHE_RSA_WITH_AES_128_GCM_SHA256@49199$tls_ciphersuite_ECDHE_RSA_WITH_AES_256_CBC_SHA@49172$tls_ciphersuite_ECDHE_RSA_WITH_AES_256_CBC_SHA384@49192$tls_ciphersuite_ECDHE_RSA_WITH_AES_256_GCM_SHA384@49200$tls_ciphersuite_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256@52392$tls_ciphersuite_RSA_WITH_3DES_EDE_CBC_SHA@10$tls_ciphersuite_RSA_WITH_AES_128_CBC_SHA@47$tls_ciphersuite_RSA_WITH_AES_128_CBC_SHA256@60$tls_ciphersuite_RSA_WITH_AES_128_GCM_SHA256@156$tls_ciphersuite_RSA_WITH_AES_256_CBC_SHA@53$tls_ciphersuite_RSA_WITH_AES_256_CBC_SHA256@61$tls_ciphersuite_RSA_WITH_AES_256_GCM_SHA384@157$tls_ciphersuite_group_ats@3$tls_ciphersuite_group_ats_compatibility@4$tls_ciphersuite_group_compatibility@1$tls_ciphersuite_group_default@0$tls_ciphersuite_group_legacy@2$tls_protocol_version_DTLSv10@65279$tls_protocol_version_DTLSv12@65277$tls_protocol_version_TLSv10@769$tls_protocol_version_TLSv11@770$tls_protocol_version_TLSv12@771$tls_protocol_version_TLSv13@772$"""
misc.update(
{
"kSecTrustSettingsAllowedError": "kSecTrustSettingsAllowedError",
"kAuthorizationComment": b"comment",
"kAuthorizationEnvironmentIcon": b"icon",
"kAuthorizationRuleClassDeny": b"deny",
"kSecTrustSettingsPolicyString": "kSecTrustSettingsPolicyString",
"kAuthorizationEnvironmentUsername": b"username",
"kAuthorizationRightExecute": b"system.privilege.admin",
"kAuthorizationRightRule": b"rule",
"kAuthorizationRuleIsAdmin": b"is-admin",
"kAuthorizationRuleClassAllow": b"allow",
"kAuthorizationEnvironmentPassword": b"password",
"kAuthorizationRuleAuthenticateAsAdmin": b"authenticate-admin",
"kAuthorizationPamResult": b"pam_result",
"kSecTrustSettingsResult": "kSecTrustSettingsResult",
"kAuthorizationEnvironmentPrompt": b"prompt",
"kAuthorizationRuleAuthenticateAsSessionUser": b"authenticate-session-owner",
"kSecTrustSettingsKeyUsage": "kSecTrustSettingsKeyUsage",
"kAuthorizationEnvironmentShared": b"shared",
"kSecTrustSettingsPolicy": "kSecTrustSettingsPolicy",
"kSecTrustSettingsApplication": "kSecTrustSettingsApplication",
}
)
functions = {
"CMSEncoderGetCertificateChainMode": (b"i@o^I",),
"SecKeyGeneratePair": (
b"i@o^@o^@",
"",
{
"arguments": {
1: {"already_cfretained": True},
2: {"already_cfretained": True},
}
},
),
"SecCodeCopyPath": (
b"i@Io^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SecCertificateCopySerialNumber": (
b"@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecKeyIsAlgorithmSupported": (sel32or64(b"Z@i@", b"Z@q@"),),
"SecTrustSetPolicies": (b"i@@",),
"SSLSetError": (b"i@i",),
"SecTransformCustomSetAttribute": (
sel32or64(
b"@^{OpaqueSecTransformImplementation=}@i@",
b"@^{OpaqueSecTransformImplementation=}@q@",
),
),
"SSLGetSessionOption": (b"i@io^Z",),
"SecStaticCodeCreateWithPath": (
b"i@Io^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SecTrustGetCertificateAtIndex": (sel32or64(b"@@i", b"@@q"),),
"SecTransformSetTransformAction": (
b"@^{OpaqueSecTransformImplementation=}@@?",
"",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"@"},
"arguments": {0: {"type": "^v"}},
}
}
}
},
),
"sec_protocol_options_set_tls_tickets_enabled": (b"vB",),
"SSLGetSessionState": (b"i@o^i",),
"SecItemImport": (
b"i@@N^IN^IIn^{_SecItemImportExportKeyParameters=II@@@@@@}@o^@",
"",
{"arguments": {7: {"already_cfretained": True}}},
),
"SSLGetMaxDatagramRecordSize": (b"i@o^L",),
"sec_protocol_options_set_tls_ocsp_enabled": (b"vB",),
"SecTrustEvaluate": (b"i@o^I",),
"CMSDecoderIsContentEncrypted": (b"i@o^Z",),
"SecTaskCreateFromSelf": (b"@@", "", {"retval": {"already_cfretained": True}}),
"SecureDownloadCopyCreationDate": (
b"i^{OpaqueSecureDownload=}o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecTransformSetAttributeAction": (
b"@^{OpaqueSecTransformImplementation=}@@@?",
"",
{
"arguments": {
3: {
"callable": {
"retval": {"type": b"@"},
"arguments": {
0: {"type": "^v"},
1: {"type": "@"},
2: {"type": "@"},
},
}
}
}
},
),
"sec_certificate_copy_ref": (b"@@", "", {"retval": {"already_cfretained": True}}),
"SSLCopyDistinguishedNames": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecTrustSetExceptions": (b"B@@",),
"SecItemAdd": (b"i@o^@", "", {"arguments": {1: {"already_cfretained": True}}}),
"SecKeychainItemCopyKeychain": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"sec_protocol_metadata_get_server_name": (
b"^t@",
"",
{"retval": {"c_array_delimited_by_null": True}},
),
"SecPolicyCreateRevocation": (
sel32or64(b"@I", b"@Q"),
"",
{"retval": {"already_cfretained": True}},
),
"SecKeyCreateEncryptedData": (
b"@@@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecCertificateCopyNormalizedSubjectContent": (
b"@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecKeyCreateSignature": (b"@@@@o^@", "", {"retval": {"already_cfretained": True}}),
"CMSDecoderCreate": (b"io^@", "", {"arguments": {0: {"already_cfretained": True}}}),
"CMSDecoderCopyAllCerts": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"sec_protocol_options_add_tls_ciphersuite_group": (b"v@S",),
"SecDigestTransformCreate": (
sel32or64(b"@@io^@", b"@@qo^@"),
"",
{"retval": {"already_cfretained": True}},
),
"SSLSetEncryptionCertificate": (b"i@@",),
"SecHostCreateGuest": (b"iII@@Io^I",),
"SecTrustSettingsCopyCertificates": (
b"iIo^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecKeychainUnlock": (
b"i@In^vZ",
"",
{"arguments": {2: {"c_array_length_in_arg": 1}}},
),
"SSLSetSessionTicketsEnabled": (b"i@Z",),
"SecHostSelectGuest": (b"iII",),
"AuthorizationCopyPrivilegedReference": (b"io^^{AuthorizationOpaqueRef=}",),
"CMSDecoderSetDetachedContent": (b"i@@",),
"sec_identity_create": (b"@@", "", {"retval": {"already_retained": True}}),
"SSLAddDistinguishedName": (
b"i@n^vL",
"",
{"arguments": {1: {"c_array_length_in_arg": 2}}},
),
"SecKeychainItemCopyFromPersistentReference": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecTransformCopyExternalRepresentation": (
b"@@",
"",
{"retval": {"already_cfretained": True}},
),
"sec_protocol_options_append_tls_ciphersuite_group": (b"v@S",),
"SecStaticCodeCheckValidityWithErrors": (
b"i@I@o^@",
"",
{"arguments": {3: {"already_cfretained": True}}},
),
"SSLGetNegotiatedProtocolVersion": (b"i@o^i",),
"sec_protocol_metadata_access_distinguished_names": (
b"B@@?",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": "^v"}, 1: {"type": "@"}},
},
"callable_retained": True,
}
}
},
),
"SecTransformPushbackAttribute": (b"@^{OpaqueSecTransformImplementation=}@@",),
"SecAccessCreateWithOwnerAndACL": (
b"@III@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecTrustCopyResult": (b"@@", "", {"retval": {"already_cfretained": True}}),
"CMSDecoderCopySignerSigningTime": (
b"i@Lo^d",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SecTrustSetNetworkFetchAllowed": (b"i@Z",),
"SSLSetCertificate": (b"i@@",),
"SecACLSetContents": (b"i@@@S",),
"sec_protocol_options_append_tls_ciphersuite": (b"v@@",),
"SecTrustGetCertificateCount": (sel32or64(b"i@", b"q@"),),
"SecPKCS12Import": (
b"i@@o^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SecTransformSetAttribute": (b"Z@@@o^@",),
"SecTrustSettingsSetTrustSettings": (b"i@I@",),
"SecKeyCopyExternalRepresentation": (
b"@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecTransformCreateGroupTransform": (
b"@",
"",
{"retval": {"already_cfretained": True}},
),
"CMSDecoderSetSearchKeychain": (b"i@@",),
"SecTrustedApplicationSetData": (b"i@@",),
"SSLSetSessionOption": (b"i@iZ",),
"sec_protocol_options_set_peer_authentication_required": (b"vB",),
"SecKeychainSetPreferenceDomain": (b"ii",),
"SecTransformCreateFromExternalRepresentation": (
b"@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SSLGetBufferedReadSize": (b"i@o^L",),
"SecTrustSetVerifyDate": (b"i@@",),
"sec_trust_copy_ref": (b"@@", "", {"retval": {"already_cfretained": True}}),
"SecACLGetTypeID": (sel32or64(b"I", b"Q"),),
"SSLContextGetTypeID": (sel32or64(b"I", b"Q"),),
"SessionCreate": (b"iII",),
"sec_identity_access_certificates": (
b"B@@?",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": "^v"}, 1: {"type": "@"}},
}
}
}
},
),
"SecAccessCopyOwnerAndACL": (
b"i@o^Io^Io^Io^@",
"",
{"arguments": {4: {"already_cfretained": True}}},
),
"SecPolicyCreateWithProperties": (
b"@@@",
"",
{"retval": {"already_cfretained": True}},
),
"SecKeychainLockAll": (b"i",),
"SSLGetPeerDomainName": (
b"i@o^tN^L",
"",
{"arguments": {1: {"c_array_length_in_arg": 2}}},
),
"sec_protocol_metadata_access_ocsp_response": (
b"B@@?",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": "^v"}, 1: {"type": "@"}},
}
}
}
},
),
"SecACLCopyContents": (
b"i@o^@o^@o^S",
"",
{
"arguments": {
1: {"already_cfretained": True},
2: {"already_cfretained": True},
}
},
),
"SecCodeMapMemory": (b"i@I",),
"CMSDecoderCopyContent": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecAccessCreate": (
b"i@@o^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SecTransformExecute": (b"@@o^@", "", {"retval": {"already_cfretained": True}}),
"SecCertificateCopyEmailAddresses": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"AuthorizationMakeExternalForm": (
b"i^{AuthorizationOpaqueRef=}o^{_AuthorizationExternalForm=[32C]}",
),
"SecCodeCheckValidityWithErrors": (
b"i@I@o^@",
"",
{"arguments": {3: {"already_cfretained": True}}},
),
"SecCodeCopyDesignatedRequirement": (
b"i@Io^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SecTransformNoData": (b"@",),
"SecTransformRegister": (
b"Z@^?o^@",
"",
{
"arguments": {
1: {
"callable": {
"retval": {
"callable": {
"retval": {"type": "@"},
"arguments": {0: {"type": "^v"}},
},
"type": b"@?",
},
"arguments": {
0: {"type": "@"},
1: {"type": "@"},
2: {"type": "^{OpaqueSecTransformImplementation=}"},
},
},
"callable_retained": True,
}
}
},
),
"SecCodeCopyStaticCode": (
b"i@Io^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"CMSEncoderAddSignedAttributes": (b"i@I",),
"SecIdentityCopySystemIdentity": (
b"i@o^@o^@",
"",
{
"arguments": {
1: {"already_cfretained": True},
2: {"already_cfretained": True},
}
},
),
"SecureDownloadGetDownloadSize": (b"i^{OpaqueSecureDownload=}o^q",),
"SecKeychainItemDelete": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecStaticCodeCreateWithPathAndAttributes": (
b"i@I@o^@",
"",
{"arguments": {3: {"already_cfretained": True}}},
),
"sec_identity_create_with_certificates": (
b"@@@",
"",
{"retval": {"already_retained": True}},
),
"SSLCopyPeerTrust": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecKeyVerifySignature": (b"Z@@@@o^@",),
"AuthorizationRightGet": (
b"i^t^@",
"",
{
"arguments": {
0: {"c_array_delimited_by_null": True, "type_modifier": "n"},
1: {"already_retained": True, "type_modifier": "o"},
}
},
),
"SecDecryptTransformCreate": (
b"@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecCertificateCopyNormalizedIssuerContent": (
b"@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecPolicyCreateBasicX509": (b"@", "", {"retval": {"already_cfretained": True}}),
"sec_protocol_options_set_tls_false_start_enabled": (b"vB",),
"SecKeychainLock": (b"i@",),
"SecTrustGetNetworkFetchAllowed": (b"i@o^Z",),
"SecureDownloadCreateWithTicket": (
b"i@^?^v^?^vo^^{OpaqueSecureDownload=}",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"i"},
"arguments": {0: {"type": "@"}, 1: {"type": "^v"}},
},
"callable_retained": True,
},
3: {
"callable": {
"retval": {"type": b"i"},
"arguments": {
0: {"type": "@"},
1: {"type": "i"},
2: {"type": "^v"},
},
},
"callable_retained": True,
},
}
},
),
"CMSEncoderCopySupportingCerts": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecKeychainItemSetAccess": (b"i@@",),
"sec_protocol_options_set_tls_resumption_enabled": (b"vB",),
"SSLHandshake": (b"i@",),
"SecKeychainAddCallback": (
b"i^?I^v",
"",
{
"arguments": {
0: {
"callable": {
"retval": {"type": b"i"},
"arguments": {
0: {"type": "I"},
1: {"type": "n^{SecKeychainCallbackInfo=I@@i}"},
2: {"type": "^v"},
},
},
"callable_retained": True,
}
}
},
),
"SecureDownloadCopyURLs": (
b"i^{OpaqueSecureDownload=}o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"CMSEncoderAddRecipients": (b"i@@",),
"sec_protocol_options_set_tls_is_fallback_attempt": (b"vB",),
"SecTrustCopyPublicKey": (b"@@", "", {"retval": {"already_cfretained": True}}),
"SecKeyCreateWithData": (b"@@@o^@", "", {"retval": {"already_cfretained": True}}),
"sec_protocol_metadata_get_negotiated_tls_protocol_version": (b"S@",),
"SecKeychainDelete": (b"i@",),
"sec_identity_copy_certificates_ref": (
b"@@",
"",
{"retval": {"already_cfretained": True}},
),
"AuthorizationRightSet": (
b"i^{AuthorizationOpaqueRef=}^t@@@@",
"",
{"arguments": {1: {"c_array_delimited_by_null": True, "type_modifier": "n"}}},
),
"SecACLRemove": (b"i@",),
"CMSDecoderCopyDetachedContent": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecKeychainCreate": (
b"in^tIn^vZ@o^@",
"",
{
"arguments": {
0: {"c_array_delimited_by_null": True},
2: {"c_array_length_in_arg": 1},
5: {"already_cfretained": True},
}
},
),
"SecEncryptTransformGetTypeID": (sel32or64(b"I", b"Q"),),
"SSLGetDiffieHellmanParams": (
b"i@o^vN^L",
"",
{"arguments": {1: {"c_array_length_in_arg": 2}}},
),
"SSLSetSessionConfig": (b"i@@",),
"SecCertificateCreateWithData": (
b"@@@",
"",
{"retval": {"already_cfretained": True}},
),
"SecRequirementCopyData": (
b"i@Io^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"CMSDecoderCopyEncapsulatedContentType": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecRequirementCreateWithStringAndErrors": (
b"i@Io^@o^@",
"",
{"arguments": {3: {"already_cfretained": True}}},
),
"SSLReHandshake": (b"i@",),
"sec_protocol_metadata_get_negotiated_ciphersuite": (b"I@",),
"SSLCopyRequestedPeerName": (
b"i@o^tN^L",
"",
{"arguments": {1: {"c_array_length_in_arg": 2}}},
),
"CMSDecoderCopySignerCert": (
b"i@Lo^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SecTrustCopyExceptions": (b"@@", "", {"retval": {"already_cfretained": True}}),
"SecPolicyCreateSSL": (b"@Z@", "", {"retval": {"already_cfretained": True}}),
"SecKeychainItemCreatePersistentReference": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SSLCopyCertificateAuthorities": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecTransformConnectTransforms": (b"@@@@@@o^@",),
"SecAccessCopyACLList": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecHostSelectedGuest": (b"iIo^I",),
"sec_protocol_options_get_default_min_tls_protocol_version": (b"i@",),
"SecAccessCopyMatchingACLList": (
b"@@@",
"",
{"retval": {"already_cfretained": True}},
),
"SecKeychainItemCreateCopy": (
b"i@@@o^@",
"",
{"arguments": {3: {"already_cfretained": True}}},
),
"SecKeyGeneratePairAsync": (
b"v@^{dispatch_queue_s=}@?",
"",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": "^v"},
1: {"type": "@"},
2: {"type": "@"},
3: {"type": "@"},
},
}
}
}
},
),
"SecCertificateCopyData": (b"@@", "", {"retval": {"already_cfretained": True}}),
"sec_protocol_options_set_tls_max_version": (b"v@i",),
"SecKeyGenerateSymmetric": (b"@@o^@", "", {"retval": {"already_cfretained": True}}),
"SecHostSetGuestStatus": (b"iII@I",),
"CMSDecoderCopySignerStatus": (b"i@L@Zo^Io^i",),
"SecTrustCopyCustomAnchorCertificates": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"CMSEncoderCopySignerTimestampWithPolicy": (b"i@@Lo^d",),
"SecTrustSettingsImportExternalRepresentation": (b"iI@",),
"SecTrustCreateWithCertificates": (
b"i@@o^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SecHostSetHostingPort": (b"iII",),
"sec_protocol_options_set_tls_sct_enabled": (b"vB",),
"SecDecryptTransformGetTypeID": (sel32or64(b"I", b"Q"),),
"CMSEncoderUpdateContent": (
b"i@n^vL",
"",
{"arguments": {1: {"c_array_length_in_arg": 2}}},
),
"SSLGetNegotiatedCipher": (b"i@o^i",),
"SecTrustCopyProperties": (b"@@", "", {"retval": {"already_cfretained": True}}),
"SecKeyCopyKeyExchangeResult": (
b"@@@@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecTrustEvaluateWithError": (b"B@o^@",),
"SecPolicyGetTypeID": (sel32or64(b"I", b"Q"),),
"SessionGetInfo": (
b"iI^I^I",
"",
{"arguments": {1: {"type_modifier": "o"}, 2: {"type_modifier": "o"}}},
),
"sec_protocol_metadata_access_supported_signature_algorithms": (
b"B@@?",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": "^v"}, 1: {"type": "S"}},
}
}
}
},
),
"SecTransformFindByName": (b"@@@",),
"SecIdentityCreateWithCertificate": (
b"i@@o^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SecTaskCreateWithAuditToken": (
b"@@{?=[8I]}",
"",
{"retval": {"already_cfretained": True}},
),
"SecCertificateCopyValues": (
b"@@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"AuthorizationCreateFromExternalForm": (
b"in^{_AuthorizationExternalForm=[32C]}o^^{AuthorizationOpaqueRef=}",
),
"CMSDecoderUpdateMessage": (
b"i@n^vL",
"",
{"arguments": {1: {"c_array_length_in_arg": 2}}},
),
"CMSEncoderCopyEncapsulatedContentType": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SSLGetConnection": (b"i@o^@",),
"SecKeychainSetUserInteractionAllowed": (b"iZ",),
"SecTrustSetAnchorCertificatesOnly": (b"i@Z",),
"SSLGetPeerID": (b"i@o^vN^L", "", {"arguments": {1: {"c_array_length_in_arg": 2}}}),
"SecTransformCreateReadTransformWithReadStream": (
b"@@",
"",
{"retval": {"already_cfretained": True}},
),
"sec_protocol_options_set_max_tls_protocol_version": (b"v@i",),
"SecRequirementGetTypeID": (sel32or64(b"I", b"Q"),),
"SSLCreateContext": (b"@@ii", "", {"retval": {"already_cfretained": True}}),
"sec_protocol_options_set_challenge_block": (
b"v@@?^{dispatch_queue_s}",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": "^v"},
1: {"type": "@"},
2: {
"callable": {
"retval": {"type": "v"},
"arguments": {0: {"type": "^v"}},
},
"type": "@?",
},
},
}
}
}
},
),
"CMSEncoderGetHasDetachedContent": (b"i@o^Z",),
"SSLSetConnection": (b"i@@",),
"SecKeychainRemoveCallback": (
b"i^?",
"",
{
"arguments": {
0: {
"callable": {
"retval": {"type": b"i"},
"arguments": {
0: {"type": "I"},
1: {"type": "n^{SecKeychainCallbackInfo=I@@i}"},
2: {"type": "^v"},
},
},
"callable_retained": True,
}
}
},
),
"SecCertificateCopyPublicKey": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecPolicyCopyProperties": (b"@@", "", {"retval": {"already_cfretained": True}}),
"CMSEncoderSetEncapsulatedContentTypeOID": (b"i@@",),
"SecDecodeTransformCreate": (
b"@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"sec_protocol_metadata_get_negotiated_protocol": (
b"^t@",
"",
{"retval": {"c_array_delimited_by_null": True}},
),
"SSLSetALPNProtocols": (b"i@@",),
"SSLGetProtocolVersionMin": (b"i@o^i",),
"SSLSetPeerDomainName": (
b"i@n^tL",
"",
{"arguments": {1: {"c_array_length_in_arg": 2}}},
),
"SecSignTransformCreate": (b"@@o^@", "", {"retval": {"already_cfretained": True}}),
"SecTransformGetTypeID": (sel32or64(b"I", b"Q"),),
"SecKeychainGetPath": (
b"i@N^Io^t",
"",
{"arguments": {2: {"c_array_length_in_arg": 1}}},
),
"SecCertificateCopySerialNumberData": (
b"@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecCertificateCopyCommonName": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SSLGetSupportedCiphers": (
b"i@o^iN^L",
"",
{"arguments": {1: {"c_array_length_in_arg": 2}}},
),
"sec_trust_create": (b"@@", "", {"retval": {"already_retained": True}}),
"CMSEncoderSetSignerAlgorithm": (b"i@@",),
"SecCertificateAddToKeychain": (b"i@@",),
"SecKeyGetBlockSize": (b"L@",),
"SecIdentityCopyPrivateKey": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"CMSEncoderAddSupportingCerts": (b"i@@",),
"sec_certificate_create": (b"@@@", "", {"retval": {"already_retained": True}}),
"SSLSetMaxDatagramRecordSize": (b"i@L",),
"CMSDecoderCopySignerTimestamp": (
b"i@Lo^d",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SSLSetDatagramHelloCookie": (
b"i@n^vL",
"",
{"arguments": {1: {"c_array_length_in_arg": 2}}},
),
"sec_identity_copy_ref": (b"@@", "", {"retval": {"already_cfretained": True}}),
"SecTaskGetTypeID": (sel32or64(b"I", b"Q"),),
"sec_protocol_options_get_default_max_tls_protocol_version": (b"i@",),
"SSLSetIOFuncs": (
b"i@^?^?",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"i"},
"arguments": {
0: {"type": "@"},
1: {"type": "o^v", "c_array_length_in_arg": 2},
2: {"type": "N^L"},
},
},
"callable_retained": True,
},
2: {
"callable": {
"retval": {"type": b"i"},
"arguments": {
0: {"type": "@"},
1: {"type": "n^v", "c_array_length_in_arg": 2},
2: {"type": "N^L"},
},
},
"callable_retained": True,
},
}
},
),
"SecKeychainOpen": (
b"in^to^@",
"",
{
"arguments": {
0: {"c_array_delimited_by_null": True},
1: {"already_cfretained": True},
}
},
),
"SecCodeGetTypeID": (sel32or64(b"I", b"Q"),),
"SecRequirementCreateWithData": (
b"i@Io^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"sec_protocol_options_set_min_tls_protocol_version": (b"v@i",),
"SecCodeCopySigningInformation": (
b"i@Io^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SSLSetProtocolVersionMax": (b"i@i",),
"sec_protocol_metadata_access_pre_shared_keys": (
b"B@@?",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": "^v"},
1: {"type": "@"},
2: {"type": "@"},
},
}
}
}
},
),
"SecKeychainGetStatus": (b"i@o^I",),
"SSLGetClientCertificateState": (b"i@o^i",),
"CMSDecoderCopySignerTimestampCertificates": (
b"i@Lo^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SecureDownloadFinished": (b"i^{OpaqueSecureDownload=}",),
"CMSEncoderSetCertificateChainMode": (b"i@I",),
"SecAccessControlCreateWithFlags": (
sel32or64(b"@@@Io^@", b"@@@Qo^@"),
"",
{"retval": {"already_cfretained": True}},
),
"SecTrustEvaluateAsync": (
b"i@^{dispatch_queue_s}@?",
"",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": "^v"},
1: {"type": "@"},
2: {"type": "I"},
},
}
}
}
},
),
"SecureDownloadCopyName": (
b"i^{OpaqueSecureDownload=}o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SSLCopyALPNProtocols": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecKeychainCopySearchList": (
b"io^@",
"",
{"arguments": {0: {"already_cfretained": True}}},
),
"SecDigestTransformGetTypeID": (sel32or64(b"I", b"Q"),),
"SecTrustSetOptions": (b"i@I",),
"SSLGetNumberEnabledCiphers": (b"i@o^L",),
"SecIdentityGetTypeID": (sel32or64(b"I", b"Q"),),
"SecKeychainCopyAccess": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"sec_protocol_options_set_tls_pre_shared_key_identity_hint": (b"v@@",),
"CMSEncoderGetTypeID": (sel32or64(b"I", b"Q"),),
"SecTransformGetAttribute": (b"@@@",),
"CMSDecoderGetNumSigners": (b"i@o^L",),
"SecCertificateCopyPreferred": (
b"@@@",
"",
{"retval": {"already_cfretained": True}},
),
"sec_protocol_options_set_local_identity": (b"v@@",),
"sec_protocol_options_set_tls_min_version": (b"v@i",),
"SecRandomCopyBytes": (
b"i^{__SecRandom=}L^v",
"",
{"arguments": {2: {"type_modifier": "o", "c_array_length_in_arg": 1}}},
),
"CMSDecoderFinalizeMessage": (b"i@",),
"SecKeyWrapSymmetric": (b"@@@@o^@", "", {"retval": {"already_cfretained": True}}),
"SecVerifyTransformCreate": (
b"@@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecCodeCheckValidity": (b"i@I@",),
"CMSEncoderCopyEncodedContent": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"CMSEncoderAddSigners": (b"i@@",),
"sec_protocol_metadata_copy_peer_public_key": (
b"@@@",
"",
{"retval": {"already_retained": True}},
),
"AuthorizationFree": (b"i^{AuthorizationOpaqueRef=}I",),
"SecCopyErrorMessageString": (
b"@i^v",
"",
{"retval": {"already_cfretained": True}},
),
"CMSEncoderCopySigners": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecureDownloadRelease": (b"i^{OpaqueSecureDownload=}",),
"SecTrustSettingsCopyModificationDate": (
b"i@Io^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SecureDownloadUpdateWithData": (b"i^{OpaqueSecureDownload=}@",),
"SecKeychainCopyDomainDefault": (
b"iio^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecItemDelete": (b"i@",),
"SecStaticCodeCheckValidity": (b"i@I@",),
"sec_protocol_metadata_create_secret_with_context": (
b"@@L^tL^vL",
"",
{
"retval": {"already_retained": True},
"arguments": {
2: {"type_modifier": "n", "c_array_length_in_arg": 1},
4: {"type_modifier": "n", "c_array_length_in_arg": 3},
},
},
),
"SecCodeCopyHost": (
b"i@Io^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"sec_protocol_metadata_challenge_parameters_are_equal": (b"B@@",),
"SecTrustedApplicationGetTypeID": (sel32or64(b"I", b"Q"),),
"SecTransformSetDataAction": (
b"@^{OpaqueSecTransformImplementation=}@@?",
"",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"@"},
"arguments": {0: {"type": "^v"}, 1: {"type": "@"}},
}
}
}
},
),
"SecKeychainAddGenericPassword": (
b"i@In^tIn^tIn^vo^@",
"",
{
"arguments": {
2: {"c_array_length_in_arg": 1},
4: {"c_array_length_in_arg": 3},
6: {"c_array_length_in_arg": 5},
7: {"already_cfretained": True},
}
},
),
"sec_protocol_options_add_tls_application_protocol": (
b"v@^t",
"",
{"arguments": {1: {"c_array_delimited_by_null": True, "type_modifier": "n"}}},
),
"CMSDecoderCopySignerTimestampWithPolicy": (
b"i@@Lo^d",
"",
{"arguments": {3: {"already_cfretained": True}}},
),
"SSLRead": (
b"i@o^vLo^L",
"",
{"arguments": {1: {"c_array_length_in_arg": (2, 3)}}},
),
"SecTaskCopyValueForEntitlement": (
b"@@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"sec_protocol_metadata_get_negotiated_tls_ciphersuite": (b"I@",),
"SSLGetDatagramWriteSize": (b"i@o^L",),
"SecIdentitySetPreferred": (b"i@@@",),
"SecTrustCopyAnchorCertificates": (
b"io^@",
"",
{"arguments": {0: {"already_cfretained": True}}},
),
"sec_protocol_options_add_tls_ciphersuite": (b"v@i",),
"SecKeychainSetDomainDefault": (b"ii@",),
"sec_protocol_options_get_default_max_dtls_protocol_version": (b"i@",),
"SecCertificateGetTypeID": (sel32or64(b"I", b"Q"),),
"SecCertificateCopyShortDescription": (
b"@@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SSLCopyRequestedPeerNameLength": (b"i@o^L",),
"CMSEncoderCopySignerTimestamp": (b"i@Lo^d",),
"SSLSetClientSideAuthenticate": (b"i@i",),
"sec_protocol_metadata_get_early_data_accepted": (b"B@",),
"SecCodeCopySelf": (b"iIo^@", "", {"arguments": {1: {"already_cfretained": True}}}),
"SSLGetNumberSupportedCiphers": (b"i@o^L",),
"SecIdentityCopyPreferred": (b"@@@@", "", {"retval": {"already_cfretained": True}}),
"SecACLUpdateAuthorizations": (b"i@@",),
"SecAccessGetTypeID": (sel32or64(b"I", b"Q"),),
"SecKeychainItemGetTypeID": (sel32or64(b"I", b"Q"),),
"SecTrustSetKeychains": (b"i@@",),
"SSLGetProtocolVersionMax": (b"i@o^i",),
"SecKeyCreateDecryptedData": (
b"@@@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecKeyCopyPublicKey": (b"@@", "", {"retval": {"already_cfretained": True}}),
"SecGroupTransformGetTypeID": (sel32or64(b"I", b"Q"),),
"SecKeychainCopySettings": (b"i@o^{SecKeychainSettings=IZZI}",),
"SecTrustSettingsCreateExternalRepresentation": (
b"iIo^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecCertificateCopyKey": (b"@@", "", {"retval": {"already_cfretained": True}}),
"SecTrustGetTrustResult": (b"i@o^I",),
"SSLSetDiffieHellmanParams": (
b"i@n^vL",
"",
{"arguments": {1: {"c_array_length_in_arg": 2}}},
),
"SecTrustSettingsRemoveTrustSettings": (b"i@I",),
"SecRequirementCreateWithString": (
b"i@Io^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SecKeychainSetDomainSearchList": (b"ii@",),
"sec_protocol_options_set_tls_diffie_hellman_parameters": (b"v@@",),
"SecRequirementCopyString": (
b"i@Io^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SSLWrite": (b"i@n^vLo^L", "", {"arguments": {1: {"c_array_length_in_arg": 2}}}),
"sec_protocol_metadata_peers_are_equal": (b"B@@",),
"SSLSetPeerID": (b"i@n^vL", "", {"arguments": {1: {"c_array_length_in_arg": 2}}}),
"sec_protocol_options_get_default_min_dtls_protocol_version": (b"i@",),
"SecTransformCreate": (b"@@o^@", "", {"retval": {"already_cfretained": True}}),
"SecCertificateCopyNormalizedSubjectSequence": (
b"@@",
"",
{"retval": {"already_cfretained": True}},
),
"SecKeychainCopyDomainSearchList": (
b"iio^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecKeyCreateFromData": (b"@@@o^@", "", {"retval": {"already_cfretained": True}}),
"SecTaskCopyValuesForEntitlements": (
b"@@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecStaticCodeGetTypeID": (sel32or64(b"I", b"Q"),),
"SecItemExport": (
b"i@IIn^{_SecItemImportExportKeyParameters=II@@@@@@}o^@",
"",
{"arguments": {4: {"already_cfretained": True}}},
),
"SSLSetProtocolVersionMin": (b"i@i",),
"SecCertificateCopyLongDescription": (
b"@@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecKeychainSetAccess": (b"i@@",),
"sec_protocol_options_set_pre_shared_key_selection_block": (
b"v@@?^{dispatch_queue_s}",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": "^v"},
1: {"type": "@"},
2: {"type": "@"},
3: {
"callable": {
"retval": {"type": "v"},
"arguments": {0: {"type": "^v"}, 1: {"type": "@"}},
},
"type": "@?",
},
},
}
}
}
},
),
"sec_protocol_options_add_pre_shared_key": (b"v@",),
"SecKeychainSetSearchList": (b"i@",),
"CMSEncoderCopyRecipients": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"sec_protocol_options_set_key_update_block": (
b"v@@?^{dispatch_queue_s}",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": "^v"},
1: {"type": "@"},
2: {
"callable": {
"retval": {"type": "v"},
"arguments": {0: {"type": "^v"}},
},
"type": "@?",
},
},
}
}
}
},
),
"SecACLCopyAuthorizations": (b"@@", "", {"retval": {"already_cfretained": True}}),
"SecTrustEvaluateAsyncWithError": (
b"i@^{dispatch_queue_s}@?",
"",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": "^v"},
1: {"type": "@"},
2: {"type": "B"},
3: {"type": "@"},
},
}
}
}
},
),
"SSLGetEnabledCiphers": (
b"i@o^iN^L",
"",
{"arguments": {1: {"c_array_length_in_arg": 2}}},
),
"SecKeychainGetPreferenceDomain": (b"io^i",),
"SecKeychainGetVersion": (b"io^I",),
"SecKeyCreateRandomKey": (b"@@o^@", "", {"retval": {"already_cfretained": True}}),
"sec_protocol_options_set_tls_renegotiation_enabled": (b"vB",),
"SSLGetPeerDomainNameLength": (b"i@o^L",),
"sec_protocol_options_are_equal": (b"B@@",),
"SecCertificateCopySubjectSummary": (
b"@@",
"",
{"retval": {"already_cfretained": True}},
),
"CMSDecoderCopySignerEmailAddress": (
b"i@Lo^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SecTrustSetSignedCertificateTimestamps": (b"i@@",),
"SecTrustSetOCSPResponse": (b"i@@",),
"SSLSetCertificateAuthorities": (b"i@@Z",),
"SecACLCreateWithSimpleContents": (
b"i@@@So^@",
"",
{"arguments": {4: {"already_cfretained": True}}},
),
"SecTrustGetTypeID": (sel32or64(b"I", b"Q"),),
"SecTrustedApplicationCreateFromPath": (
b"i^to^@",
"",
{
"arguments": {
0: {"c_array_delimited_by_null": True, "type_modifier": "n"},
1: {"already_cfretained": True},
}
},
),
"SSLSetOCSPResponse": (b"i@@",),
"SecTrustGetVerifyTime": (b"d@",),
"SecTransformExecuteAsync": (
b"v@^{dispatch_queue_s=}@?",
"",
{
"arguments": {
2: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": "^v"},
1: {"type": "@"},
2: {"type": "@"},
3: {"type": "Z"},
},
}
}
}
},
),
"SecIdentityCopyCertificate": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecKeychainAddInternetPassword": (
b"i@In^tIn^tIn^tIn^tSIIIn^vo^@",
"",
{
"arguments": {
2: {"c_array_length_in_arg": 1},
4: {"c_array_length_in_arg": 3},
6: {"c_array_length_in_arg": 5},
8: {"c_array_length_in_arg": 7},
13: {"c_array_length_in_arg": 12},
14: {"already_cfretained": True},
}
},
),
"SecKeychainSetSettings": (b"i@n^{SecKeychainSettings=IZZI}",),
"SecIdentitySetSystemIdentity": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"CMSEncoderSetHasDetachedContent": (b"i@Z",),
"SecEncodeTransformCreate": (
b"@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecKeychainGetUserInteractionAllowed": (b"io^Z",),
"sec_protocol_metadata_create_secret": (
b"@@L^tL",
"",
{
"retval": {"already_retained": True},
"arguments": {2: {"type_modifier": "n", "c_array_length_in_arg": 1}},
},
),
"SecTrustCopyPolicies": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecKeychainSetDefault": (b"i@",),
"SecCodeCopyGuestWithAttributes": (
b"i@@Io^@",
"",
{"arguments": {3: {"already_cfretained": True}}},
),
"SecTrustSetAnchorCertificates": (b"i@@",),
"SecKeychainGetTypeID": (sel32or64(b"I", b"Q"),),
"SecCertificateSetPreferred": (b"i@@@",),
"SecCertificateCopyNormalizedIssuerSequence": (
b"@@",
"",
{"retval": {"already_cfretained": True}},
),
"sec_protocol_options_set_tls_server_name": (
b"v@^t",
"",
{"arguments": {1: {"c_array_delimited_by_null": True, "type_modifier": "n"}}},
),
"SecKeyGetTypeID": (sel32or64(b"I", b"Q"),),
"sec_protocol_set_local_identity": (b"v@@",),
"sec_protocol_metadata_get_negotiated_protocol_version": (b"i@",),
"SecItemCopyMatching": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecItemUpdate": (b"i@@",),
"CMSDecoderGetTypeID": (sel32or64(b"I", b"Q"),),
"SecTrustedApplicationCopyData": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecKeychainItemCopyAccess": (
b"i@o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"SecKeyUnwrapSymmetric": (
b"@n^@@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecTaskCopySigningIdentifier": (
b"@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecureDownloadCopyTicketLocation": (
b"i^{OpaqueSecureDownload=}o^@",
"",
{"arguments": {1: {"already_cfretained": True}}},
),
"sec_protocol_options_set_verify_block": (
b"v@@?^{dispatch_queue_s}",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {
0: {"type": "^v"},
1: {"type": "@"},
2: {"type": "@"},
3: {
"callable": {
"retval": {"type": "v"},
"arguments": {0: {"type": "^v"}},
},
"type": "@?",
},
},
}
}
}
},
),
"SecTranformCustomGetAttribute": (
sel32or64(
b"@^{OpaqueSecTransformImplementation=}@i",
b"@^{OpaqueSecTransformImplementation=}@q",
),
),
"SecKeychainCopyDefault": (
b"io^@",
"",
{"arguments": {0: {"already_cfretained": True}}},
),
"SSLSetEnabledCiphers": (
b"i@n^iL",
"",
{"arguments": {1: {"c_array_length_in_arg": 2}}},
),
"sec_protocol_metadata_access_peer_certificate_chain": (
b"B@@?",
"",
{
"arguments": {
1: {
"callable": {
"retval": {"type": b"v"},
"arguments": {0: {"type": "^v"}, 1: {"type": "@"}},
}
}
}
},
),
"SecHostRemoveGuest": (b"iIII",),
"SSLClose": (b"i@",),
"SecKeyDeriveFromPassword": (
b"@@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"SecAccessControlGetTypeID": (sel32or64(b"I", b"Q"),),
"SecKeyCopyAttributes": (b"@@", "", {"retval": {"already_cfretained": True}}),
"AuthorizationRightRemove": (
b"i^{AuthorizationOpaqueRef=}^t",
"",
{"arguments": {1: {"c_array_delimited_by_null": True, "type_modifier": "n"}}},
),
"SecTrustSettingsCopyTrustSettings": (
b"i@Io^@",
"",
{"arguments": {2: {"already_cfretained": True}}},
),
"SecEncryptTransformCreate": (
b"@@o^@",
"",
{"retval": {"already_cfretained": True}},
),
"CMSEncoderCreate": (b"io^@", "", {"arguments": {0: {"already_cfretained": True}}}),
"CMSEncodeContent": (
b"i@@@ZIn^vLo^@",
"",
{
"arguments": {
5: {"c_array_length_in_arg": 6},
7: {"already_cfretained": True},
}
},
),
}
aliases = {
"SecTransformCustomGetAttribute": "SecTranformCustomGetAttribute",
"errSessionInvalidFlags": "errAuthorizationInvalidFlags",
"errSecCSSigDBAccess": "errSecCSDBAccess",
"errSSLServerAuthCompleted": "errSSLPeerAuthCompleted",
"errSSLLast": "errSSLUnexpectedRecord",
"errSessionInternal": "errAuthorizationInternal",
"kSecRequirementTypeCount": "kSecInvalidRequirementType",
"errSSLClientAuthCompleted": "errSSLPeerAuthCompleted",
"errSecCSSigDBDenied": "errSecCSDBDenied",
}
cftypes = [
("SecKeyRef", b"^{OpaqueSecKeyRef=}", "SecKeyGetTypeID", None),
("SecPasswordRef", b"^{OpaqueSecPasswordRef=}", "SecPasswordGetTypeID", None),
(
"SecKeychainItemRef",
b"^{OpaqueSecKeychainItemRef=}",
"SecKeyChainItemGetTypeID",
None,
),
("SecTaskRef", b"^{__SecTask=}", "SecTaskGetTypeID", None),
(
"SecCertificateRef",
b"^{OpaqueSecCertificateRef=}",
"SecCertificateGetTypeID",
None,
),
(
"SecTrustedApplicationRef",
b"^{OpaqueSecTrustedApplicationRef=}",
"SecTrustedApplicationGetTypeID",
None,
),
("CMSEncoderRef", b"^{_CMSEncoder=}", "CMSEncoderGetTypeID", None),
(
"SecAccessControlRef",
b"^{OpaqueSecAccessControlRef=}",
"SecAccessControlGetTypeID",
None,
),
("SecCodeRef", b"^{__SecCode=}", "SecCodeGetTypeID", None),
("CMSDecoderRef", b"^{_CMSDecoder=}", "CMSDecoderGetTypeID", None),
("SecAccessRef", b"^{OpaqueSecAccessRef=}", "SecAccessGetTypeID", None),
("SecIdentityRef", b"^{OpaqueSecIdentityRef=}", "SecIdentityGetTypeID", None),
("SSLContextRef", b"^{SSLContext=}", "SSLContextGetTypeID", None),
("SecRequirementRef", b"^{__SecRequirement=}", "SecRequirementGetTypeID", None),
("SecPolicyRef", b"^{OpaqueSecPolicyRef=}", "SecPolicyGetTypeID", None),
("SecTrustRef", b"^{__SecTrust=}", "SecTrustGetTypeID", None),
("SecACLRef", b"^{OpaqueSecTrustRef=}", "SecACLGetTypeID", None),
("SecKeychainRef", b"^{OpaqueSecKeychainRef=}", "SecKeyChainGetTypeID", None),
]
misc.update(
{
"AuthorizationRef": objc.createOpaquePointerType(
"AuthorizationRef", b"^{AuthorizationOpaqueRef=}"
),
"SecureDownloadRef": objc.createOpaquePointerType(
"SecureDownloadRef", b"^{OpaqueSecureDownload=}"
),
"SecRandomRef": objc.createOpaquePointerType(
"SecRandomRef", b"^{__SecRandom=}"
),
"SecTransformImplementationRef": objc.createOpaquePointerType(
"SecTransformImplementationRef", b"^{OpaqueSecTransformImplementation=}"
),
}
)
expressions = {"kAuthorizationEmptyEnvironment": "None"}
# END OF FILE
| [
"[email protected]"
]
| |
0b2c7b6c78f2f20e685b99106e28b2dcfabe7a03 | 9d852841463c64f75da8a8579c32cea856d2073d | /leetcode/validate_binary_search_tree.py | 4931cdb9a7da506dea78bd8a759a89b592284296 | []
| no_license | LarsIndus/algorithms-DS | 2d94a5ba3e17de7c8d9e7ac4ace8eb70bb2a7331 | 32a64a4522f8474ab63421b06e945f6e44a441e1 | refs/heads/master | 2023-04-26T00:13:06.026785 | 2021-05-20T18:55:12 | 2021-05-20T18:55:12 | 243,239,680 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,476 | py | """
Leetcode Problem 98: Validate Binary Search Tree (Medium)
Given the root of a binary tree, determine if it is a valid binary search tree (BST).
A valid BST is defined as follows:
- The left subtree of a node contains only nodes with keys less than the node's key.
- The right subtree of a node contains only nodes with keys greater than the node's key.
- Both the left and right subtrees must also be binary search trees.
Complexity for this solution:
O(n) time and space
Source: https://www.youtube.com/watch?v=ofuXorE-JKE
"""
# Node implementation --------------------------------------------------------
class newNode:
# Construct to create a newNode
def __init__(self, data):
self.data = data
self.left = None
self.right = None
# Solution -------------------------------------------------------------------
def is_valid_BST(root):
return helper(root, float("-inf"), float("inf"))
def helper(root, min_value, max_value):
if root is None:
return True
if root.data < min_value or root.data > max_value:
return False
valid_left = helper(root.left, min_value, root.data)
valid_right = helper(root.right, root.data, max_value)
return valid_left and valid_right
# Testing --------------------------------------------------------------------
def main():
# Test 1: Empty tree
tree = None
if is_valid_BST(tree):
print("Passed test 1 (emtpy tree).")
else:
print("Test 1 (empty tree) failed!")
# Test 2: Only root node
tree = newNode(1)
if is_valid_BST(tree):
print("Passed test 2 (only root node).")
else:
print("Test 2 (only root node) failed!")
# Test 3: Valid BST
tree = newNode(2)
tree.left = newNode(1)
tree.right = newNode(3)
tree.left.left = newNode(0)
tree.right.left = newNode(2)
tree.right.right = newNode(9)
if is_valid_BST(tree):
print("Passed test 3 (valid tree).")
else:
print("Test 3 (valid tree) failed!")
# Test 4: Non-valid BST
tree = newNode(2)
tree.left = newNode(1)
tree.right = newNode(3)
tree.left.left = newNode(0)
tree.right.left = newNode(1)
tree.right.right = newNode(9)
if not is_valid_BST(tree):
print("Passed test 4 (non-valid tree).")
else:
print("Test 4 (non-valid tree) failed!")
if __name__ == '__main__':
main() | [
"[email protected]"
]
| |
c394f35d81a2eb6ac4c455dd44b7add384a8b18b | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/84/usersdata/203/57032/submittedfiles/lista1.py | 4fe9d85ddd31f1891fa346e0bba2e39623b993ce | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 360 | py | # -*- coding: utf-8 -*-
n=int(input('tamanho da lista: '))
l=[]
somai=0
qi=0
somap=0
qp=0
for i in range (1,n+1,1):
a=int(input('elemento da lista: '))
l.append(a)
for i in range (1,len(lista),1):
if l(i)%2==0:
somap=somap+l(i)
qp=qp+1
else:
somai=somai+l(i)
qi=qi+1
print(somai)
print(somap)
print(qi)
print(qp) | [
"[email protected]"
]
| |
9166a5025b83503317fc99cf5620f56acadc063c | 35fb652b0b20e7352cacdc078e23464fad40ccf3 | /web/controllers/food/food.py | ed79027f6fa2230bee2cb9150725d18254385a43 | []
| no_license | xiaoheng14/flask_wx_order | 52f8fe01a473855c22a43c2651b102c291dbde04 | be3314fdb0266eecf4ca7f5a55b2ea24078857c9 | refs/heads/master | 2020-08-23T03:59:19.006943 | 2018-11-19T12:21:25 | 2018-11-19T12:21:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 615 | py | # _*_ coding: utf-8 _*_
"""
__author__ = 'lawtech'
__date__ = '2018/10/27 3:14 PM'
"""
from flask import Blueprint
from common.libs.helper import ops_render
route_food = Blueprint('food_page', __name__)
@route_food.route("/index")
def index():
return ops_render("food/index.html")
@route_food.route("/info")
def info():
return ops_render("food/info.html")
@route_food.route("/set")
def set():
return ops_render("food/set.html")
@route_food.route("/cat")
def cat():
return ops_render("food/cat.html")
@route_food.route("/cat-set")
def catSet():
return ops_render("food/cat_set.html")
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.