blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
3
616
content_id
stringlengths
40
40
detected_licenses
sequencelengths
0
112
license_type
stringclasses
2 values
repo_name
stringlengths
5
115
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringclasses
777 values
visit_date
timestamp[us]date
2015-08-06 10:31:46
2023-09-06 10:44:38
revision_date
timestamp[us]date
1970-01-01 02:38:32
2037-05-03 13:00:00
committer_date
timestamp[us]date
1970-01-01 02:38:32
2023-09-06 01:08:06
github_id
int64
4.92k
681M
star_events_count
int64
0
209k
fork_events_count
int64
0
110k
gha_license_id
stringclasses
22 values
gha_event_created_at
timestamp[us]date
2012-06-04 01:52:49
2023-09-14 21:59:50
gha_created_at
timestamp[us]date
2008-05-22 07:58:19
2023-08-21 12:35:19
gha_language
stringclasses
149 values
src_encoding
stringclasses
26 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
length_bytes
int64
3
10.2M
extension
stringclasses
188 values
content
stringlengths
3
10.2M
authors
sequencelengths
1
1
author_id
stringlengths
1
132
c7bb1673981cf1ed24dc3268493e926dbed7325e
2455062787d67535da8be051ac5e361a097cf66f
/Producers/BSUB/TrigProd_amumu_a5_dR5/trigger_amumu_producer_cfg_TrigProd_amumu_a5_dR5_738.py
9e8620b81cb1403077f96d97129d6c9a48873c8f
[]
no_license
kmtos/BBA-RecoLevel
6e153c08d5ef579a42800f6c11995ee55eb54846
367adaa745fbdb43e875e5ce837c613d288738ab
refs/heads/master
2021-01-10T08:33:45.509687
2015-12-04T09:20:14
2015-12-04T09:20:14
43,355,189
0
0
null
null
null
null
UTF-8
Python
false
false
3,360
py
import FWCore.ParameterSet.Config as cms process = cms.Process("PAT") #process.load("BBA/Analyzer/bbaanalyzer_cfi") process.load("FWCore.MessageLogger.MessageLogger_cfi") process.load('Configuration.EventContent.EventContent_cff') process.load("Configuration.Geometry.GeometryRecoDB_cff") process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff") process.load("PhysicsTools.PatAlgos.producersLayer1.patCandidates_cff") process.load("PhysicsTools.PatAlgos.selectionLayer1.selectedPatCandidates_cff") from Configuration.AlCa.GlobalTag import GlobalTag process.GlobalTag = GlobalTag(process.GlobalTag, 'MCRUN2_71_V1::All', '') process.load("Configuration.StandardSequences.MagneticField_cff") #################### # Message Logger #################### process.MessageLogger.cerr.FwkReport.reportEvery = cms.untracked.int32(100) process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) ) process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) ) ## switch to uncheduled mode process.options.allowUnscheduled = cms.untracked.bool(True) process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(500) ) #################### # Input File List #################### # Input source process.source = cms.Source("PoolSource", fileNames = cms.untracked.vstring('root://eoscms//eos/cms/store/user/ktos/RECO_Step3_amumu_a5/RECO_Step3_amumu_a5_738.root'), secondaryFileNames = cms.untracked.vstring() ) ############################################################ # Defining matching in DeltaR, sorting by best DeltaR ############################################################ process.mOniaTrigMatch = cms.EDProducer("PATTriggerMatcherDRLessByR", src = cms.InputTag( 'slimmedMuons' ), matched = cms.InputTag( 'patTrigger' ), # selections of trigger objects matchedCuts = cms.string( 'type( "TriggerMuon" ) && path( "HLT_Mu16_TkMu0_dEta18_Onia*")' ), # input does not yet have the 'saveTags' parameter in HLT maxDPtRel = cms.double( 0.5 ), # no effect here maxDeltaR = cms.double( 0.3 ), #### selection of matches maxDeltaEta = cms.double( 0.2 ), # no effect here resolveAmbiguities = cms.bool( True ),# definition of matcher output resolveByMatchQuality = cms.bool( True )# definition of matcher output ) # talk to output module process.out = cms.OutputModule("PoolOutputModule", fileName = cms.untracked.string("file:RECO_Step3_amumu_a5_TrigProd_738.root"), outputCommands = process.MINIAODSIMEventContent.outputCommands ) process.out.outputCommands += [ 'drop *_*_*_*', 'keep *_*slimmed*_*_*', 'keep *_pfTausEI_*_*', 'keep *_hpsPFTauProducer_*_*', 'keep *_hltTriggerSummaryAOD_*_*', 'keep *_TriggerResults_*_HLT', 'keep *_patTrigger*_*_*', 'keep *_prunedGenParticles_*_*', 'keep *_mOniaTrigMatch_*_*' ] ################################################################################ # Running the matching and setting the the trigger on ################################################################################ from PhysicsTools.PatAlgos.tools.trigTools import * switchOnTrigger( process ) # This is optional and can be omitted. switchOnTriggerMatching( process, triggerMatchers = [ 'mOniaTrigMatch' ]) process.outpath = cms.EndPath(process.out)
5196d1e64b5fb81e24e750fef2df4b21bee75646
479696ed99f10e449308bf3379e8b3d167365ebe
/spa/migrations/0006_auto__chg_field_mix_title.py
dfb4bf030f46410d19c07ad244be39c232dd2b12
[ "BSD-2-Clause" ]
permissive
fergalmoran/dss
7c690ba0b858c3d7d115af54655954ecee64407e
684fb4030e33212c3ecde774ca86cb74a1ffc8ac
refs/heads/master
2021-07-06T02:46:49.064282
2015-01-04T22:19:31
2015-01-04T22:19:31
5,353,985
0
0
BSD-2-Clause
2021-06-10T17:35:38
2012-08-09T10:03:17
JavaScript
UTF-8
Python
false
false
16,256
py
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Changing field 'Mix.title' db.alter_column(u'spa_mix', 'title', self.gf('django.db.models.fields.CharField')(max_length=150)) def backwards(self, orm): # Changing field 'Mix.title' db.alter_column(u'spa_mix', 'title', self.gf('django.db.models.fields.CharField')(max_length=50)) models = { u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'spa._activity': { 'Meta': {'object_name': '_Activity'}, 'date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'uid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}) }, 'spa._lookup': { 'Meta': {'object_name': '_Lookup'}, 'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'spa.chatmessage': { 'Meta': {'object_name': 'ChatMessage'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'message': ('django.db.models.fields.TextField', [], {}), 'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'chat_messages'", 'null': 'True', 'to': "orm['spa.UserProfile']"}) }, 'spa.comment': { 'Meta': {'object_name': 'Comment'}, 'comment': ('django.db.models.fields.CharField', [], {'max_length': '1024'}), 'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mix': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['spa.Mix']"}), 'time_index': ('django.db.models.fields.IntegerField', [], {}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}) }, 'spa.event': { 'Meta': {'object_name': 'Event'}, 'attendees': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'attendees'", 'symmetrical': 'False', 'to': u"orm['auth.User']"}), 'date_created': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 4, 24, 0, 0)'}), 'event_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 4, 24, 0, 0)'}), 'event_description': ('tinymce.views.HTMLField', [], {}), 'event_recurrence': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spa.Recurrence']"}), 'event_time': ('django.db.models.fields.TimeField', [], {'default': 'datetime.datetime(2013, 4, 24, 0, 0)'}), 'event_title': ('django.db.models.fields.CharField', [], {'max_length': '250'}), 'event_venue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spa.Venue']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'spa.genre': { 'Meta': {'object_name': 'Genre'}, 'description': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'slug': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}) }, 'spa.label': { 'Meta': {'object_name': 'Label'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'spa.mix': { 'Meta': {'object_name': 'Mix'}, 'description': ('django.db.models.fields.TextField', [], {}), 'download_allowed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'download_url': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'duration': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'genres': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['spa.Genre']", 'symmetrical': 'False'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_featured': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'local_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}), 'mix_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}), 'stream_url': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '150'}), 'uid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '38', 'blank': 'True'}), 'upload_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2013, 4, 24, 0, 0)'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spa.UserProfile']"}), 'waveform_generated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) }, 'spa.mixdownload': { 'Meta': {'object_name': 'MixDownload', '_ormbases': ['spa._Activity']}, u'_activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa._Activity']", 'unique': 'True', 'primary_key': 'True'}), 'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'downloads'", 'to': "orm['spa.Mix']"}) }, 'spa.mixfavourite': { 'Meta': {'object_name': 'MixFavourite', '_ormbases': ['spa._Activity']}, u'_activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa._Activity']", 'unique': 'True', 'primary_key': 'True'}), 'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'favourites'", 'to': "orm['spa.Mix']"}) }, 'spa.mixlike': { 'Meta': {'object_name': 'MixLike', '_ormbases': ['spa._Activity']}, u'_activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa._Activity']", 'unique': 'True', 'primary_key': 'True'}), 'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'likes'", 'to': "orm['spa.Mix']"}) }, 'spa.mixplay': { 'Meta': {'object_name': 'MixPlay', '_ormbases': ['spa._Activity']}, u'_activity_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa._Activity']", 'unique': 'True', 'primary_key': 'True'}), 'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'plays'", 'to': "orm['spa.Mix']"}) }, 'spa.purchaselink': { 'Meta': {'object_name': 'PurchaseLink'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'provider': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'track': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'purchase_link'", 'to': "orm['spa.Tracklist']"}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}) }, 'spa.recurrence': { 'Meta': {'object_name': 'Recurrence', '_ormbases': ['spa._Lookup']}, u'_lookup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['spa._Lookup']", 'unique': 'True', 'primary_key': 'True'}) }, 'spa.release': { 'Meta': {'object_name': 'Release'}, 'embed_code': ('django.db.models.fields.TextField', [], {'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'release_artist': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'release_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 4, 24, 0, 0)'}), 'release_description': ('django.db.models.fields.TextField', [], {}), 'release_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}), 'release_label': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spa.Label']"}), 'release_title': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['spa.UserProfile']"}) }, 'spa.releaseaudio': { 'Meta': {'object_name': 'ReleaseAudio'}, 'description': ('django.db.models.fields.TextField', [], {}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'local_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}), 'release': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'release_audio'", 'null': 'True', 'to': "orm['spa.Release']"}) }, 'spa.tracklist': { 'Meta': {'object_name': 'Tracklist'}, 'artist': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'index': ('django.db.models.fields.SmallIntegerField', [], {}), 'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'mix': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tracklist'", 'to': "orm['spa.Mix']"}), 'remixer': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'timeindex': ('django.db.models.fields.TimeField', [], {'null': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, 'spa.userfollows': { 'Meta': {'object_name': 'UserFollows'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user_from': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'followers'", 'unique': 'True', 'to': "orm['spa.UserProfile']"}), 'user_to': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'following'", 'unique': 'True', 'to': "orm['spa.UserProfile']"}) }, 'spa.userprofile': { 'Meta': {'object_name': 'UserProfile'}, 'activity_sharing': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'activity_sharing_networks': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'avatar_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}), 'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'social'", 'max_length': '15'}), 'description': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}), 'display_name': ('django.db.models.fields.CharField', [], {'max_length': '35', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'slug': ('django.db.models.fields.SlugField', [], {'default': 'None', 'max_length': '50', 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'unique': 'True'}) }, 'spa.venue': { 'Meta': {'object_name': 'Venue'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), 'venue_address': ('django.db.models.fields.CharField', [], {'max_length': '1024'}), 'venue_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}), 'venue_name': ('django.db.models.fields.CharField', [], {'max_length': '250'}) } } complete_apps = ['spa']
66ac2692352f0a5f791832e0e94b339c0114130d
578bdcf2720805c1075ba348764983d99031911f
/Udacity/Hackerrank/Python Generators/prime_number.py
0cd0ae59c44eb3425187659a67b59dfde8620d90
[]
no_license
mrudula-pb/Python_Code
994de4720289ded0a55017407d27b1d0f0b08c65
0dcdc6589d3c614bd1e6a03aa5c2b55664b9e6b2
refs/heads/master
2023-03-25T16:52:27.420925
2021-03-22T21:40:37
2021-03-22T21:40:37
350,476,487
0
0
null
null
null
null
UTF-8
Python
false
false
195
py
def check_prime(number): for divisor in range(2, int(number ** 0.5) + 1): if number % divisor == 0: return False return True value = check_prime(2) print(value)
6ab7cf510d5d31d1893060487705a171360b9037
fab14fae2b494068aa793901d76464afb965df7e
/benchmarks/f3_wrong_hints/scaling_ltl_infinite_state/18-extending_bound_36.py
e8c0d69e04efe42fc7f242700e8ef7a444fd8601
[ "MIT" ]
permissive
teodorov/F3
673f6f9ccc25acdfdecbfc180f439253474ba250
c863215c318d7d5f258eb9be38c6962cf6863b52
refs/heads/master
2023-08-04T17:37:38.771863
2021-09-16T07:38:28
2021-09-16T07:38:28
null
0
0
null
null
null
null
UTF-8
Python
false
false
10,424
py
from typing import Tuple, FrozenSet from collections import Iterable from mathsat import msat_term, msat_env from mathsat import msat_make_constant, msat_declare_function from mathsat import msat_get_integer_type, msat_get_rational_type, msat_get_bool_type from mathsat import msat_make_and, msat_make_not, msat_make_or from mathsat import msat_make_leq, msat_make_equal from mathsat import msat_make_number, msat_make_plus from pysmt.environment import Environment as PysmtEnv import pysmt.typing as types from ltl.ltl import TermMap, LTLEncoder from utils import name_next, symb_to_next from hint import Hint, Location def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term): geq = msat_make_geq(menv, arg0, arg1) return msat_make_not(menv, geq) def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term): return msat_make_leq(menv, arg1, arg0) def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term): leq = msat_make_leq(menv, arg0, arg1) return msat_make_not(menv, leq) def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term): n_arg0 = msat_make_not(menv, arg0) return msat_make_or(menv, n_arg0, arg1) def check_ltl(menv: msat_env, enc: LTLEncoder) -> Tuple[Iterable, msat_term, msat_term, msat_term]: assert menv assert isinstance(menv, msat_env) assert enc assert isinstance(enc, LTLEncoder) bool_type = msat_get_bool_type(menv) real_type = msat_get_rational_type(menv) i = msat_declare_function(menv, "i", real_type) i = msat_make_constant(menv, i) r = msat_declare_function(menv, "r", real_type) r = msat_make_constant(menv, r) l = msat_declare_function(menv, "l", real_type) l = msat_make_constant(menv, l) inc_i = msat_declare_function(menv, "inc_i", bool_type) inc_i = msat_make_constant(menv, inc_i) x_i = msat_declare_function(menv, name_next("i"), real_type) x_i = msat_make_constant(menv, x_i) x_r = msat_declare_function(menv, name_next("r"), real_type) x_r = msat_make_constant(menv, x_r) x_l = msat_declare_function(menv, name_next("l"), real_type) x_l = msat_make_constant(menv, x_l) x_inc_i = msat_declare_function(menv, name_next("inc_i"), bool_type) x_inc_i = msat_make_constant(menv, x_inc_i) curr2next = {i: x_i, r: x_r, l: x_l, inc_i: x_inc_i} zero = msat_make_number(menv, "0") one = msat_make_number(menv, "1") r_gt_0 = msat_make_gt(menv, r, zero) r_lt_l = msat_make_lt(menv, r, l) i_geq_0 = msat_make_geq(menv, i, zero) init = msat_make_and(menv, r_gt_0, r_lt_l) init = msat_make_and(menv, init, msat_make_and(menv, i_geq_0, msat_make_not(menv, inc_i))) init = msat_make_and(menv, init, msat_make_gt(menv, l, zero)) # r' = r trans = msat_make_equal(menv, x_r, r) # i < l -> ((inc_i' & i' = i + 1) | (!inc_i' & i' = i)) & l' = l i_lt_l = msat_make_lt(menv, i, l) x_i_eq_i_p_1 = msat_make_and(menv, x_inc_i, msat_make_equal(menv, x_i, msat_make_plus(menv, i, one))) x_i_eq_i = msat_make_and(menv, msat_make_not(menv, x_inc_i), msat_make_equal(menv, x_i, i)) x_i_eq_i_p_1_or_i = msat_make_or(menv, x_i_eq_i_p_1, x_i_eq_i) x_l_eq_l = msat_make_equal(menv, x_l, l) x_i_eq_i_p_1_or_i_and_x_l_eq_l = msat_make_and(menv, x_i_eq_i_p_1_or_i, x_l_eq_l) trans = msat_make_and(menv, trans, msat_make_impl(menv, i_lt_l, x_i_eq_i_p_1_or_i_and_x_l_eq_l)) # i >= l -> i' = 0 & l' = l + 1 & !inc_i' i_geq_l = msat_make_geq(menv, i, l) x_i_eq_0 = msat_make_equal(menv, x_i, zero) x_l_eq_l_p_1 = msat_make_equal(menv, x_l, msat_make_plus(menv, l, one)) x_i_eq_0_and_x_l_eq_l_p_1 = msat_make_and(menv, msat_make_and(menv, x_i_eq_0, x_l_eq_l_p_1), msat_make_not(menv, x_inc_i)) trans = msat_make_and(menv, trans, msat_make_impl(menv, i_geq_l, x_i_eq_0_and_x_l_eq_l_p_1)) # (G F inc_i) -> ! G F r > i G_F_x_i_gt_i = enc.make_G(enc.make_F(inc_i)) r_gt_i = msat_make_gt(menv, r, i) n_G_F_r_gt_i = msat_make_not(menv, enc.make_G(enc.make_F(r_gt_i))) ltl = msat_make_impl(menv, G_F_x_i_gt_i, n_G_F_r_gt_i) return TermMap(curr2next), init, trans, ltl def hints(env: PysmtEnv) -> FrozenSet[Hint]: assert isinstance(env, PysmtEnv) mgr = env.formula_manager i = mgr.Symbol("i", types.REAL) r = mgr.Symbol("r", types.REAL) l = mgr.Symbol("l", types.REAL) inc_i = mgr.Symbol("inc_i", types.BOOL) symbs = frozenset([i, r, l, inc_i]) x_i = symb_to_next(mgr, i) x_r = symb_to_next(mgr, r) x_l = symb_to_next(mgr, l) x_inc_i = symb_to_next(mgr, inc_i) res = [] n0 = mgr.Real(0) n1 = mgr.Real(1) stutter = mgr.Equals(x_i, i) loc = Location(env, mgr.GE(i, n0), stutterT=stutter) loc.set_progress(0, mgr.Equals(x_i, mgr.Plus(i, n1))) h_i = Hint("h_i0", env, frozenset([i]), symbs) h_i.set_locs([loc]) res.append(h_i) loc = Location(env, mgr.GE(r, n0)) loc.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1))) h_r = Hint("h_r0", env, frozenset([r]), symbs) h_r.set_locs([loc]) res.append(h_r) loc = Location(env, inc_i) loc.set_progress(0, x_inc_i) h_inc = Hint("h_inc0", env, frozenset([inc_i]), symbs) h_inc.set_locs([loc]) res.append(h_inc) stutter = mgr.Equals(x_i, i) loc = Location(env, mgr.LE(i, n0), stutterT=stutter) loc.set_progress(0, mgr.Equals(x_i, mgr.Minus(i, n1))) h_i = Hint("h_i1", env, frozenset([i]), symbs) h_i.set_locs([loc]) res.append(h_i) loc = Location(env, mgr.LE(r, n0)) loc.set_progress(0, mgr.Equals(x_r, mgr.Minus(r, n1))) h_r = Hint("h_r1", env, frozenset([r]), symbs) h_r.set_locs([loc]) res.append(h_r) loc = Location(env, mgr.LE(l, n0)) loc.set_progress(0, mgr.Equals(x_l, mgr.Minus(l, n1))) h_l = Hint("h_l1", env, frozenset([l]), symbs) h_l.set_locs([loc]) res.append(h_l) loc = Location(env, mgr.Not(inc_i)) loc.set_progress(0, mgr.Not(x_inc_i)) h_inc = Hint("h_inc1", env, frozenset([inc_i]), symbs) h_inc.set_locs([loc]) res.append(h_inc) loc0 = Location(env, mgr.GE(i, n0)) loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1))) loc1 = Location(env, mgr.GE(i, n0)) loc1.set_progress(0, mgr.Equals(x_i, i)) h_i = Hint("h_i2", env, frozenset([i]), symbs) h_i.set_locs([loc0, loc1]) res.append(h_i) loc0 = Location(env, mgr.GE(r, n0)) loc0.set_progress(1, mgr.Equals(x_r, r)) loc1 = Location(env, mgr.GE(r, n0)) loc1.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1))) h_r = Hint("h_r2", env, frozenset([r]), symbs) h_r.set_locs([loc0, loc1]) res.append(h_r) loc0 = Location(env, mgr.GE(l, n0)) loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1))) loc1 = Location(env, mgr.GE(l, n0)) loc1.set_progress(0, mgr.Equals(x_l, l)) h_l = Hint("h_l2", env, frozenset([l]), symbs) h_l.set_locs([loc0, loc1]) res.append(h_l) loc0 = Location(env, mgr.Not(inc_i)) loc0.set_progress(1, x_inc_i) loc1 = Location(env, inc_i) loc1.set_progress(0, mgr.Not(x_inc_i)) h_inc = Hint("h_inc2", env, frozenset([inc_i]), symbs) h_inc.set_locs([loc0, loc1]) res.append(h_inc) loc0 = Location(env, mgr.GE(i, n0), mgr.GE(l, n0), stutterT=mgr.Equals(x_i, mgr.Plus(i, l))) loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1))) loc1 = Location(env, mgr.GE(i, n0)) loc1.set_progress(0, mgr.Equals(x_i, i)) h_i = Hint("h_i3", env, frozenset([i]), symbs) h_i.set_locs([loc0, loc1]) res.append(h_i) loc0 = Location(env, mgr.GE(r, n0), mgr.GE(i, n0), stutterT=mgr.Equals(x_r, mgr.Plus(r, i))) loc0.set_progress(1, mgr.Equals(x_r, r)) loc1 = Location(env, mgr.GE(r, n0)) loc1.set_progress(0, mgr.Equals(x_r, mgr.Plus(r, n1))) h_r = Hint("h_r3", env, frozenset([r]), symbs) h_r.set_locs([loc0, loc1]) res.append(h_r) loc0 = Location(env, mgr.GE(l, n0), mgr.GE(r, n0), stutterT=mgr.Equals(x_l, mgr.Plus(l, r))) loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1))) loc1 = Location(env, mgr.GE(l, n0)) loc1.set_progress(0, mgr.Equals(x_l, l)) h_l = Hint("h_l3", env, frozenset([l]), symbs) h_l.set_locs([loc0, loc1]) res.append(h_l) loc0 = Location(env, mgr.GE(i, n0)) loc0.set_progress(1, mgr.Equals(x_i, mgr.Plus(i, n1))) loc1 = Location(env, mgr.GE(i, n0)) loc1.set_progress(2, mgr.Equals(x_i, i)) loc2 = Location(env, mgr.GE(i, n0)) loc2.set_progress(0, mgr.Equals(x_i, i)) h_i = Hint("h_i4", env, frozenset([i]), symbs) h_i.set_locs([loc0, loc1, loc2]) res.append(h_i) loc0 = Location(env, mgr.GE(r, n0)) loc0.set_progress(1, mgr.Equals(x_r, r)) loc1 = Location(env, mgr.GE(r, n0)) loc1.set_progress(2, mgr.Equals(x_r, mgr.Plus(r, n1))) loc2 = Location(env, mgr.GE(r, n0)) loc2.set_progress(0, mgr.Equals(x_r, r)) h_r = Hint("h_r4", env, frozenset([r]), symbs) h_r.set_locs([loc0, loc1, loc2]) res.append(h_r) loc0 = Location(env, mgr.GE(l, n0)) loc0.set_progress(1, mgr.Equals(x_l, mgr.Plus(l, n1))) loc1 = Location(env, mgr.GE(l, n0)) loc1.set_progress(2, mgr.Equals(x_l, l)) loc2 = Location(env, mgr.GE(l, n0)) loc2.set_progress(0, mgr.Equals(x_l, l)) h_l = Hint("h_l4", env, frozenset([l]), symbs) h_l.set_locs([loc0, loc1, loc2]) res.append(h_l) loc0 = Location(env, mgr.Not(inc_i)) loc0.set_progress(1, x_inc_i) loc1 = Location(env, inc_i, stutterT=x_inc_i) loc1.set_progress(2, mgr.Not(x_inc_i)) loc2 = Location(env, mgr.Not(inc_i)) loc2.set_progress(0, mgr.Not(x_inc_i)) h_inc = Hint("h_inc4", env, frozenset([inc_i]), symbs) h_inc.set_locs([loc0, loc1, loc2]) res.append(h_inc) return frozenset(res)
f8357c5fbc9d1cc8439c6f4dcde1207b8d795b57
29f6b4804f06b8aabccd56fd122b54e4d556c59a
/CodeAnalysis/SourceMeter_Interface/SourceMeter-8.2.0-x64-linux/Python/Tools/python/astroid/tests/resources.py
03d45623fecda846c17a7b4088d054413c44c475
[ "Apache-2.0" ]
permissive
obahy/Susereum
6ef6ae331c7c8f91d64177db97e0c344f62783fa
56e20c1777e0c938ac42bd8056f84af9e0b76e46
refs/heads/master
2020-03-27T11:52:28.424277
2018-12-12T02:53:47
2018-12-12T02:53:47
146,511,286
3
2
Apache-2.0
2018-12-05T01:34:17
2018-08-28T21:57:59
HTML
UTF-8
Python
false
false
2,373
py
# Copyright 2014 Google, Inc. All rights reserved. # contact http://www.logilab.fr/ -- mailto:[email protected] # # This file is part of astroid. # # astroid is free software: you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published by the # Free Software Foundation, either version 2.1 of the License, or (at your # option) any later version. # # astroid is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License # for more details. # # You should have received a copy of the GNU Lesser General Public License along # with astroid. If not, see <http://www.gnu.org/licenses/>. import os import sys import pkg_resources from astroid import builder from astroid import MANAGER from astroid.bases import BUILTINS DATA_DIR = 'testdata/python{}/'.format(sys.version_info[0]) def find(name): return pkg_resources.resource_filename( 'astroid.tests', os.path.normpath(os.path.join(DATA_DIR, name))) def build_file(path, modname=None): return builder.AstroidBuilder().file_build(find(path), modname) class SysPathSetup(object): def setUp(self): sys.path.insert(0, find('')) def tearDown(self): del sys.path[0] datadir = find('') for key in list(sys.path_importer_cache): if key.startswith(datadir): del sys.path_importer_cache[key] class AstroidCacheSetupMixin(object): """Mixin for handling the astroid cache problems. When clearing the astroid cache, some tests fails due to cache inconsistencies, where some objects had a different builtins object referenced. This saves the builtins module and makes sure to add it back to the astroid_cache after the tests finishes. The builtins module is special, since some of the transforms for a couple of its objects (str, bytes etc) are executed only once, so astroid_bootstrapping will be useless for retrieving the original builtins module. """ @classmethod def setUpClass(cls): cls._builtins = MANAGER.astroid_cache.get(BUILTINS) @classmethod def tearDownClass(cls): if cls._builtins: MANAGER.astroid_cache[BUILTINS] = cls._builtins
10312512ec5bb50d42a17331847c87905d872535
163bbb4e0920dedd5941e3edfb2d8706ba75627d
/Code/CodeRecords/2129/60792/251992.py
bb252099bb4818ca24690d96fb3c2ca68175212b
[]
no_license
AdamZhouSE/pythonHomework
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
ffc5606817a666aa6241cfab27364326f5c066ff
refs/heads/master
2022-11-24T08:05:22.122011
2020-07-28T16:21:24
2020-07-28T16:21:24
259,576,640
2
1
null
null
null
null
UTF-8
Python
false
false
123
py
n=int(input()) count=0 while n!=1: if n%2==0: n=n//2 else: n=n-1 count+=1 print(count)
2b8be2aeed918c270e1676da965fe5fdcb587c62
368c66467b78adf62da04cb0b8cedd2ef37bb127
/BOJ/Python/5430_AC.py
99d3c6a86c437339ec939eb8199c2b0eed1a0f4d
[]
no_license
DJHyun/Algorithm
c8786ddcd8b5693fc9b3b4721fdf1eeda21611c5
fd6ae800886dac4ec5ff6cf2618bc2c839a76e7a
refs/heads/master
2020-07-30T16:32:49.344329
2020-02-25T07:59:34
2020-02-25T07:59:34
210,289,983
0
0
null
null
null
null
UTF-8
Python
false
false
1,504
py
# baekjoon source = "https://www.acmicpc.net/problem/5430" import sys T = int(sys.stdin.readline()) for test_case in range(T): meto = sys.stdin.readline().strip() count = int(sys.stdin.readline()) len_meto = len(meto) first = -1 c = sys.stdin.readline() len_c = len(c) c = c[1:len_c - 2].split(',') rear = count - 1 if count == 0: if 'D' in meto: print('error') else: print('[]') else: flag = True for i in range(len_meto): if meto[i] == 'D': if first == rear: print('error') break if flag: first += 1 c[first] = 0 else: c[rear] = 0 rear -= 1 else: if flag: flag = False else: flag = True else: print('[', end='') if flag: for j in range(first + 1, rear + 1): if j != rear: print(c[j] + ',', end='') else: print(c[j], end='') print(']') else: for j in range(rear, first, -1): if j != first+1: print(c[j] + ',', end='') else: print(c[j], end='') print(']')
30e1c976566bb28599db9f3287b764540219faef
30a37ab89a4a8101fb53308301628e8a7458d1fe
/test/functional/mempool_limit.py
cec38f48e5de886c44c4177d801a91ffd1a9f984
[ "MIT" ]
permissive
BFBCOIN/bfbcoin-core
1001e55f54a073ac645443c40fd5c7e6d117c07c
7c3b6dcc8e63f8041331846e0d8230c8db059e23
refs/heads/master
2020-04-03T15:31:05.749852
2018-10-30T11:36:00
2018-10-30T11:36:00
155,365,385
2
0
null
null
null
null
UTF-8
Python
false
false
2,016
py
#!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers # Copyright (c) 2017 The bfb Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test mempool limiting together/eviction with the wallet.""" from test_framework.test_framework import bfbTestFramework from test_framework.util import * class MempoolLimitTest(bfbTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 self.extra_args = [["-maxmempool=5", "-spendzeroconfchange=0"]] def run_test(self): txouts = gen_return_txouts() relayfee = self.nodes[0].getnetworkinfo()['relayfee'] txids = [] utxos = create_confirmed_utxos(relayfee, self.nodes[0], 91) #create a mempool tx that will be evicted us0 = utxos.pop() inputs = [{ "txid" : us0["txid"], "vout" : us0["vout"]}] outputs = {self.nodes[0].getnewaddress() : 0.0001} tx = self.nodes[0].createrawtransaction(inputs, outputs) self.nodes[0].settxfee(relayfee) # specifically fund this tx with low fee txF = self.nodes[0].fundrawtransaction(tx) self.nodes[0].settxfee(0) # return to automatic fee selection txFS = self.nodes[0].signrawtransaction(txF['hex']) txid = self.nodes[0].sendrawtransaction(txFS['hex']) relayfee = self.nodes[0].getnetworkinfo()['relayfee'] base_fee = relayfee*100 for i in range (3): txids.append([]) txids[i] = create_lots_of_big_transactions(self.nodes[0], txouts, utxos[30*i:30*i+30], 30, (i+1)*base_fee) # by now, the tx should be evicted, check confirmation state assert(txid not in self.nodes[0].getrawmempool()) txdata = self.nodes[0].gettransaction(txid) assert(txdata['confirmations'] == 0) #confirmation should still be 0 if __name__ == '__main__': MempoolLimitTest().main()
1e7107daf91ac3e6cc7dc2170334bdc94649cb89
6b2a8dd202fdce77c971c412717e305e1caaac51
/solutions_5706278382862336_0/Python/AlonH/2014C1A.py
c5e1e86cdaf0c09e44104a4a883bdd4b1e7750b8
[]
no_license
alexandraback/datacollection
0bc67a9ace00abbc843f4912562f3a064992e0e9
076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf
refs/heads/master
2021-01-24T18:27:24.417992
2017-05-23T09:23:38
2017-05-23T09:23:38
84,313,442
2
4
null
null
null
null
UTF-8
Python
false
false
571
py
import math f = open("A-small-attempt0.in","r") o = open("A-small-answers.txt","w") T = int(f.readline()) for t in range(1,T+1): inp = [float(a) for a in f.readline().split("/")] p = inp[0] q = inp[1] print(p,"/",q) b = int(math.log(q,2)) print(b) p = p/(q/(2**b)) print(p) a = int(math.log(p,2)) if p%1 != 0: o.write("Case #"+str(t)+": impossible"+"\n") else: o.write("Case #"+str(t)+": "+str(b-a)+"\n") o.close() #o.write("Case #"+str(t)+": NOT POSSIBLE"+"\n") #A-small-attempt0.in
916921a156a5df7facf6056f5dbcab774da038e9
0e0cf67455424e68705c428fc2a5cd71e74879f6
/practico_03/ejercicio_04.py
378e079e9a55c528232d49537c3db72bfe3d1fce
[]
no_license
JoacoDiPerna/frro-soporte-2019-12
cf43a3a6f722350891051816aac9d7e50a91add4
f918c094346ba350c0672596fe316c60ae8fdc7c
refs/heads/master
2020-04-29T00:29:57.757014
2019-08-09T19:39:30
2019-08-09T19:39:30
175,695,790
1
0
null
2019-08-09T19:39:32
2019-03-14T20:38:41
Python
UTF-8
Python
false
false
1,118
py
# Implementar la funcion buscar_persona, que devuelve el registro de una persona basado en su id. # El return es una tupla que contiene sus campos: id, nombre, nacimiento, dni y altura. # Si no encuentra ningun registro, devuelve False. import datetime from practico_03.ejercicio_01 import create_connection from practico_03.ejercicio_01 import reset_tabla from practico_03.ejercicio_02 import agregar_persona from getpass import getuser def buscar_persona(id_persona): conn = create_connection( 'C:\\Users\\' + getuser() + '\\Desktop\\tps_python.db') sql = "SELECT * FROM personas WHERE id_persona=? ORDER BY id_persona ASC" cur = conn.cursor() cur.execute(sql, (id_persona,)) rows = cur.fetchall() cur.close() conn.commit() conn.close() return False if not rows else rows[0] @reset_tabla def pruebas(): juan = buscar_persona(agregar_persona( 'juan perez', datetime.datetime(1988, 5, 15), 32165498, 180)) assert juan == (1, 'juan perez', '1988-05-15', 32165498, 180) assert buscar_persona(12345) is False if __name__ == '__main__': pruebas()
b67611e1022940616dfa02c1031329d904b7f33e
426742533fc90d9240d01f0d0347a1648cc91430
/freeways/migrations/0002_auto_20150621_0106.py
ddf17e24c9e4270ee812a4c5a5b93968f01785a1
[]
no_license
kdechant/freeways
5093a67ad917cf02fab4706ea21b81b05d41e84a
8a9b73304db06c2423b2729bbe8f72aaa1bff53d
refs/heads/master
2021-09-29T05:39:48.687864
2020-01-08T04:32:42
2020-01-08T04:33:46
37,783,501
0
0
null
2021-06-10T17:35:47
2015-06-20T19:45:57
Python
UTF-8
Python
false
false
1,164
py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('freeways', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='routesegment', name='ring', ), migrations.AddField( model_name='routesegment', name='distance_from_origin', field=models.DecimalField(editable=False, default=0, max_digits=6, decimal_places=3), ), migrations.AlterField( model_name='routesegment', name='geojson', field=models.TextField(editable=False, null=True, blank=True), ), migrations.AlterField( model_name='routesegment', name='lane_miles', field=models.DecimalField(editable=False, default=0, max_digits=5, decimal_places=2), ), migrations.AlterField( model_name='routesegment', name='length', field=models.DecimalField(editable=False, default=0, max_digits=5, decimal_places=2), ), ]
b12fd17fff74d25b03c124c311932fb6787afc78
b34808a8571340dcb3d70bd29d59930a6a3e4463
/catalogueapp/views.py
eb6bf54a399a469eece4d25c8cec86c633a415a4
[]
permissive
OpenDataServices/iCAN-Scot-Catalogue
110ee5030b258555a45e9061feb97a5ce031cc48
205cf3e6e8ef984f3f4e3d89537c21bdafb805a1
refs/heads/master
2021-08-04T08:31:15.411708
2020-06-25T09:24:32
2020-06-25T09:34:27
191,113,427
1
1
BSD-3-Clause
2020-06-05T21:15:14
2019-06-10T06:52:06
Python
UTF-8
Python
false
false
6,626
py
from django.shortcuts import render, redirect from django.http import Http404, JsonResponse from django.contrib.auth.decorators import permission_required, login_required from catalogueapp.forms import AddForm, EditOrganisationForm from catalogueapp.tools import ALISS_URL, ALISS_Importer from catalogueapp.models import Service, Organisation def index(request): context = { 'search': request.GET.get('search', ''), } if context['search']: context['organisations'] = Organisation.objects.raw( """ SELECT * FROM ( SELECT catalogueapp_organisation.*, to_tsvector( catalogueapp_organisation.name || ' ' || catalogueapp_organisation.description || ' ' || catalogueapp_organisation.our_description_markdown || ' ' || array_agg(catalogueapp_service.name)::text || ' ' || array_agg(catalogueapp_service.description)::text ) AS search_vector FROM catalogueapp_organisation JOIN catalogueapp_service ON catalogueapp_service.organisation_id = catalogueapp_organisation.id WHERE catalogueapp_service.active = '1' GROUP BY catalogueapp_organisation.id ORDER BY catalogueapp_organisation.name ASC ) AS data WHERE search_vector @@ to_tsquery(%s) """, [context['search']] ) else: context['organisations'] = Organisation.objects.raw( """SELECT catalogueapp_organisation.* FROM catalogueapp_organisation JOIN catalogueapp_service ON catalogueapp_service.organisation_id = catalogueapp_organisation.id WHERE catalogueapp_service.active = '1' GROUP BY catalogueapp_organisation.id ORDER BY catalogueapp_organisation.name ASC """, ) return render(request, 'catalogueapp/index.html', context) def organisation_index(request, aliss_id): context = { 'organisation': Organisation.objects.get(aliss_id=aliss_id), } context['services'] = Service.objects.filter(organisation=context['organisation'], active=True) if not context['services']: raise Http404 return render(request, 'catalogueapp/organisation/index.html', context) @permission_required('catalogueapp.catalogueadmin', login_url='/accounts/login/') def adminindex(request): context = {} return render(request, 'catalogueapp/admin/index.html', context) @permission_required('catalogueapp.catalogueadmin', login_url='/accounts/login/') def admin_add(request): context = {} if request.method == 'POST': context['form'] = AddForm(request.POST) if context['form'].is_valid(): url = ALISS_URL(context['form'].cleaned_data['url']) if url.is_service(): importer = ALISS_Importer() service = importer.import_from_service_URL(url) importer.update_organisation(service.organisation) return redirect('admin_service_index', aliss_id=service.aliss_id) else: context['form'].add_error('url', "That does not look like a service URL?") else: context['form'] = AddForm() return render(request, 'catalogueapp/admin/add.html', context) @permission_required('catalogueapp.catalogueadmin', login_url='/accounts/login/') def admin_service_list(request): context = { 'services': Service.objects.all(), } return render(request, 'catalogueapp/admin/services.html', context) @permission_required('catalogueapp.catalogueadmin', login_url='/accounts/login/') def admin_service_index(request, aliss_id): context = { 'service': Service.objects.get(aliss_id=aliss_id), } if request.method == 'POST': if request.POST['action'] == 'update': importer = ALISS_Importer() importer.update_service(context['service']) elif request.POST['action'] == 'inactive': context['service'].active = False context['service'].save() elif request.POST['action'] == 'active': context['service'].active = True context['service'].save() return render(request, 'catalogueapp/admin/service/index.html', context) @permission_required('catalogueapp.catalogueadmin', login_url='/accounts/login/') def admin_organisation_list(request): context = { 'organisations': Organisation.objects.all(), } return render(request, 'catalogueapp/admin/organisations.html', context) @permission_required('catalogueapp.catalogueadmin', login_url='/accounts/login/') def admin_organisation_index(request, aliss_id): context = { 'organisation': Organisation.objects.get(aliss_id=aliss_id), } context['services'] = Service.objects.filter(organisation=context['organisation']) if request.method == 'POST' and request.POST['action'] == 'update': importer = ALISS_Importer() importer.update_organisation(context['organisation']) return render(request, 'catalogueapp/admin/organisation/index.html', context) @permission_required('catalogueapp.catalogueadmin', login_url='/accounts/login/') def admin_organisation_edit(request, aliss_id): context = { 'organisation': Organisation.objects.get(aliss_id=aliss_id), } if request.method == 'POST': context['form'] = EditOrganisationForm(request.POST, instance=context['organisation']) if context['form'].is_valid(): context['organisation'].our_description_markdown = context['form'].cleaned_data['our_description_markdown'] context['organisation'].save() return redirect('admin_organisation_index', aliss_id=context['organisation'].aliss_id) else: context['form'] = EditOrganisationForm(instance=context['organisation']) return render(request, 'catalogueapp/admin/organisation/edit.html', context) @permission_required('catalogueapp.catalogueadmin', login_url='/accounts/login/') def admin_organisation_edit_preview(request, aliss_id): context = { 'organisation': Organisation.objects.get(aliss_id=aliss_id), } context['organisation'].our_description_markdown = request.POST.get('description_markdown', '') return JsonResponse( {'description_markdown_html': context['organisation'].get_our_description_markdown_html()} ) @login_required() def user_profile(request): context = {} return render(request, 'registration/profile.html', context)
551154a56e6dfddcdd8af08b2bb59c3cdd0641ae
e7c03b71f26c463b2670c52cd2fddbc198e3c8cb
/apps/djing2/apps.py
5b7f6f1a79897f208bfb04a3f0217bad381fe9a7
[]
no_license
nerosketch/djing2
71cc96f4829fc047d788dd7d8a94f1035e9740f9
1fbb0941f26389cbfdc8015527ab0d426c2e2c01
refs/heads/master
2023-01-13T15:12:50.492646
2022-11-18T11:24:21
2022-11-18T11:24:21
196,469,351
7
3
null
2020-02-29T19:38:37
2019-07-11T21:50:34
Python
UTF-8
Python
false
false
87
py
from django.apps import AppConfig class Djing2Config(AppConfig): name = "djing2"
b7e62fcc6d1d6be43665ce941a5acff73bb88b22
92207eb2c2d8014da01831c3273efc581929f5c7
/step1/app.py
95d48d2908cb9ca1917bf5984e015f3806c89131
[]
no_license
garetroy/createyourownserver
8de61511a96f65330056f06c23a5a5d880193248
768c587fb14a047ba838caca28f8ff519f10cb4b
refs/heads/master
2020-03-26T04:18:21.643119
2018-08-12T20:37:55
2018-08-12T20:37:55
144,496,869
0
0
null
null
null
null
UTF-8
Python
false
false
364
py
import flask, os from flask import render_template, Flask app = flask.Flask(__name__) @app.route('/') def home(): return flask.render_template('home.html') @app.route('/page2') def page2(): return flask.render_template('secondpage.html') if __name__ == '__main__': port = int(os.environ.get('PORT', 5000)) app.run(debug=True, host='0.0.0.0', port=port)
564417eb8bcf4a0b26bad12b6b03cb0d0390b708
ce6538b5b7da162c1c690a346e7ec9ae0a6291f3
/glass_mine.py
6247053e40d8deebcb40be09594181b1019c11c7
[]
no_license
DaniTodorowa/Softuni
391f13dd61a6d16cd48ee06e9b35b2fd931375df
f7c875fda4e13ec63152671509aaa6eca29d7f50
refs/heads/master
2022-11-25T23:34:49.744315
2020-08-02T08:23:44
2020-08-02T08:23:44
278,938,559
0
0
null
null
null
null
UTF-8
Python
false
false
435
py
class Glass: capacity = 250 def __init__(self): self.content = 0 def fill(self, ml): if (self.capacity - self.content) >= ml: self.content += ml return f"Glass filled with {ml} ml" return f"Cannot add {ml} ml" def empty(self): self.content = 0 return "Glass is now empty" def info(self): return f"{Glass.capacity - self.content} ml left"
072e921e8d2f60228a301e318a11571a82146dd8
c2e49d32b2613d702dd06067bd0ec7846a319fd5
/arelle/DialogArcroleGroup.py
cc9e7e8f45295a1a9ffd5633177bd3217e776755
[ "Apache-2.0" ]
permissive
hamscher/Arelle
c9a020a5955a313c14a4db3a4e7122ec9599714c
64c1beddcc7163e571011faf07a03d8ffe18bb78
refs/heads/master
2023-08-24T14:12:49.055954
2021-10-17T16:55:56
2021-10-17T16:55:56
284,703,106
0
0
Apache-2.0
2020-08-10T15:48:15
2020-08-03T13:08:08
Python
UTF-8
Python
false
false
7,702
py
''' Created on Jun 15, 2012 @author: Mark V Systems Limited (c) Copyright 2012 Mark V Systems Limited, All rights reserved. ''' from tkinter import Toplevel, N, S, E, W, PhotoImage try: from tkinter.ttk import Frame, Button except ImportError: from ttk import Frame, Button import os, re from arelle.UiUtil import gridHdr, gridCell, gridCombobox, label, checkbox from arelle.CntlrWinTooltip import ToolTip from arelle import XbrlConst ''' caller checks accepted, if True, caller retrieves url ''' def getArcroleGroup(mainWin, modelXbrl): dialog = DialogArcroleGroup(mainWin, modelXbrl) return dialog.selectedGroup class DialogArcroleGroup(Toplevel): def __init__(self, mainWin, modelXbrl): parent = mainWin.parent super(DialogArcroleGroup, self).__init__(parent) self.mainWin = mainWin self.parent = parent self.modelXbrl = modelXbrl parentGeometry = re.match("(\d+)x(\d+)[+]?([-]?\d+)[+]?([-]?\d+)", parent.geometry()) dialogX = int(parentGeometry.group(3)) dialogY = int(parentGeometry.group(4)) self.selectedGroup = None self.transient(self.parent) self.title(_("Select Arcrole Group")) frame = Frame(self) ''' dialogFrame = Frame(frame, width=500) dialogFrame.columnconfigure(0, weight=1) dialogFrame.rowconfigure(0, weight=1) dialogFrame.grid(row=0, column=0, columnspan=4, sticky=(N, S, E, W), padx=3, pady=3) ''' # mainWin.showStatus(_("loading formula options and parameters")) # load grid groupLabel = label(frame, 1, 0, _("Group:")) self.arcroleGroups = mainWin.config.get("arcroleGroups", {}) arcroleGroupSelected = self.mainWin.config.get("arcroleGroupSelected") if arcroleGroupSelected in self.arcroleGroups: arcroleGroup = self.arcroleGroups[arcroleGroupSelected] else: arcroleGroup = [] arcroleGroupSelected = None self.groupName = gridCombobox(frame, 2, 0, value=arcroleGroupSelected, values=sorted(self.arcroleGroups.keys()), comboboxselected=self.comboBoxSelected) groupToolTipMessage = _("Select an existing arcrole group, or enter a name for a new arcrole group. " "If selecting an existing group, it can be edited, and changes will be saved in the config file. " "If nothing is changed for an existing group, the saved setting is not disturbed. " "Arcroles with checkboxes below are shown only for arcroles that have relationships in the loaded DTS, " "but if an existing group is selected with more arcroles (that were not in the current DTS) then " "the prior setting with not-present arcroles is preserved. ") ToolTip(self.groupName, text=groupToolTipMessage, wraplength=360) ToolTip(groupLabel, text=groupToolTipMessage, wraplength=360) clearImage = PhotoImage(file=os.path.join(mainWin.imagesDir, "toolbarDelete.gif")) clearGroupNameButton = Button(frame, image=clearImage, width=12, command=self.clearGroupName) clearGroupNameButton.grid(row=0, column=3, sticky=W) ToolTip(clearGroupNameButton, text=_("Remove the currently selected arcrole group from the config file. " "After removing, you may select another arcrole, but must select 'OK' for the " "removal to be saved. "), wraplength=240) arcrolesLabel = label(frame, 1, 1, _("Arcroles:")) ToolTip(arcrolesLabel, text=_("Shows all the arcroles that are present in this DTS. "), wraplength=240) from arelle.ModelRelationshipSet import baseSetArcroles self.options = {} self.checkboxes = [] y = 1 for name, arcrole in baseSetArcroles(self.modelXbrl): if arcrole.startswith("http://"): self.options[arcrole] = arcrole in arcroleGroup self.checkboxes.append( checkbox(frame, 2, y, name[1:], arcrole, columnspan=2) ) y += 1 mainWin.showStatus(None) self.options[XbrlConst.arcroleGroupDetect] = XbrlConst.arcroleGroupDetect in arcroleGroup self.autoOpen = checkbox(frame, 1, y, _("detect"), XbrlConst.arcroleGroupDetect) self.autoOpen.grid(sticky=W, columnspan=2) self.checkboxes.append(self.autoOpen) ToolTip(self.autoOpen, text=_("If checked, this arcrole group will be detected if any arcrole of the group is present in a DTS, for example to open a treeview pane. "), wraplength=240) okButton = Button(frame, text=_("OK"), width=8, command=self.ok) cancelButton = Button(frame, text=_("Cancel"), width=8, command=self.close) cancelButton.grid(row=y, column=1, sticky=E, columnspan=3, pady=3, padx=3) okButton.grid(row=y, column=1, sticky=E, columnspan=3, pady=3, padx=64) ToolTip(okButton, text=_("Open a treeview with named arcrole group and selected arcroles. " "If any changes were made to checkboxes or name, save in the config. "), wraplength=240) ToolTip(cancelButton, text=_("Close this dialog, without saving arcrole group changes or opening a view pane. "), wraplength=240) frame.grid(row=0, column=0, sticky=(N,S,E,W)) frame.columnconfigure(1, weight=3) frame.columnconfigure(2, weight=1) frame.columnconfigure(3, weight=3) window = self.winfo_toplevel() window.columnconfigure(0, weight=1) self.geometry("+{0}+{1}".format(dialogX+50,dialogY+100)) #self.bind("<Return>", self.ok) #self.bind("<Escape>", self.close) self.protocol("WM_DELETE_WINDOW", self.close) self.grab_set() self.wait_window(self) def ok(self, event=None): groupName = self.groupName.value arcrolesSelected = [checkbox.attr for checkbox in self.checkboxes if checkbox.value] if groupName: self.mainWin.config["arcroleGroupSelected"] = groupName if groupName not in self.arcroleGroups or any(checkbox.isChanged for checkbox in self.checkboxes): self.arcroleGroups[groupName] = arcrolesSelected self.mainWin.config["arcroleGroups"] = self.arcroleGroups self.mainWin.saveConfig() self.selectedGroup = (groupName, arcrolesSelected) self.close() def close(self, event=None): self.parent.focus_set() self.destroy() def comboBoxSelected(self, *args): arcroles = self.arcroleGroups.get(self.groupName.value, []) for checkbox in self.checkboxes: checkbox.valueVar.set( checkbox.attr in arcroles ) checkbox.isChanged = False def clearGroupName(self): groupName = self.groupName.value if groupName and groupName in self.arcroleGroups: del self.arcroleGroups[groupName] self.groupName.valueVar.set('') self.groupName["values"] = sorted(self.arcroleGroups.keys()) for checkbox in self.checkboxes: checkbox.valueVar.set( False ) checkbox.isChanged = False
8ddae95edc4c8a92a22c923d4aa6fc4611593209
23f59ad45ac6b889f40b029a506fcc310c06aadb
/web/tests/test_request_context.py
99450ac6e3553a206fe50cc8847514944f406b5b
[]
no_license
ForeverDreamer/im
bac08b9b28fbdd7cc0ced9c4c1f152f318ecb670
962c512226422b00d12dbb31de3d448eca4cbbdc
refs/heads/main
2023-07-17T17:19:34.400825
2021-08-12T10:07:11
2021-08-12T10:07:11
369,503,904
0
0
null
null
null
null
UTF-8
Python
false
false
214
py
from flask import request from web.app import app def test_request_context(): with app.test_request_context('/?name=Peter'): assert request.path == '/' assert request.args['name'] == 'Peter'
f0b14cf2d440998ba5c2e88973e4f5352225f39b
9a44d81e02b02aaf2c27433b4b8ebd15aa1f6279
/multiples.py
c6058a39363ac9aa463bd9b727bf54a481ba5b51
[]
no_license
sabareesh123/guvi-2
91ce0fba591c7565dd377b038b014988daddd68e
98c2e22746a8c2c2942b42c256b88e23dafcda39
refs/heads/master
2020-06-20T13:43:05.536627
2019-07-16T06:42:33
2019-07-16T06:42:33
null
0
0
null
null
null
null
UTF-8
Python
false
false
78
py
n = int(input()) for i in range(1,6): s = n*i print(s,end=" ")
e44f26b7a18c8d4a5582ff84ed15f48867d9be49
2eb779146daa0ba6b71344ecfeaeaec56200e890
/python/oneflow/test/modules/test_adaptive_pool.py
d8b2c08d0e1713a48de811b2e58505d8ab5d9f1a
[ "Apache-2.0" ]
permissive
hxfxjun/oneflow
ee226676cb86f3d36710c79cb66c2b049c46589b
2427c20f05543543026ac9a4020e479b9ec0aeb8
refs/heads/master
2023-08-17T19:30:59.791766
2021-10-09T06:58:33
2021-10-09T06:58:33
414,906,649
0
0
Apache-2.0
2021-10-09T06:15:30
2021-10-08T08:29:45
C++
UTF-8
Python
false
false
3,468
py
""" Copyright 2020 The OneFlow Authors. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest import oneflow as flow import oneflow.unittest from oneflow.nn.common_types import _size_1_t from packaging import version import torch as torch_original from typing import Union, Tuple from oneflow.test_utils.automated_test_util import * NoneType = type(None) # Not the same as those in PyTorch because 'output_size' cannot be NoneType (even in 'torch.nn.AdaptiveAvgPoolXd') _size_2_opt_t_not_none = Union[int, Tuple[Union[int, NoneType], Union[int, NoneType]]] _size_3_opt_t_not_none = Union[ int, Tuple[Union[int, NoneType], Union[int, NoneType], Union[int, NoneType]] ] @flow.unittest.skip_unless_1n1d() class TestAdaptiveAvgPool(flow.unittest.TestCase): @autotest() def test_adaptive_avgpool1d(test_case): m = torch.nn.AdaptiveAvgPool1d(output_size=random().to(_size_1_t)) m.train(random()) device = random_device() m.to(device) x = random_pytorch_tensor(ndim=3).to(device) y = m(x) return y @autotest() def test_adaptive_avgpool2d(test_case): m = torch.nn.AdaptiveAvgPool2d(output_size=random().to(_size_2_opt_t_not_none)) m.train(random()) device = random_device() m.to(device) x = random_pytorch_tensor(ndim=4).to(device) y = m(x) return y @unittest.skipIf( version.parse(torch_original.__version__) < version.parse("1.10.0"), "GPU version 'nn.AdaptiveAvgPool3d' has a bug in PyTorch before '1.10.0'", ) @autotest() def test_adaptive_avgpool3d(test_case): m = torch.nn.AdaptiveAvgPool3d(output_size=random().to(_size_3_opt_t_not_none)) m.train(random()) device = random_device() m.to(device) x = random_pytorch_tensor(ndim=5).to(device) y = m(x) return y @flow.unittest.skip_unless_1n1d() class TestAdaptiveAvgPoolFunctional(flow.unittest.TestCase): @autotest() def test_adaptive_avgpool1d_functional(test_case): device = random_device() x = random_pytorch_tensor(ndim=3).to(device) return torch.nn.functional.adaptive_avg_pool1d(x, output_size=random().to(int)) @autotest() def test_adaptive_avgpool2d_functional(test_case): device = random_device() x = random_pytorch_tensor(ndim=4).to(device) return torch.nn.functional.adaptive_avg_pool2d(x, output_size=random().to(int)) @unittest.skipIf( version.parse(torch_original.__version__) < version.parse("1.10.0"), "GPU version 'nn.AdaptiveAvgPool3d' has a bug in PyTorch before '1.10.0'", ) @autotest() def test_adaptive_avgpool3d_functional(test_case): device = random_device() x = random_pytorch_tensor(ndim=5).to(device) return torch.nn.functional.adaptive_avg_pool2d(x, output_size=random().to(int)) if __name__ == "__main__": unittest.main()
79d739837ad0c7eb49a3da4aa3773672688ce62f
4b3c4878b48b618608d42de1a7b476a37b46b9b8
/atcoder/abc100c.py
065fe6541fd2bf67959f14a90eec4756d9e3005f
[]
no_license
zfhrp6/competitive-programming
3efd2a35717974c5ed9af364181a81144b6c8f19
459e7106ac4ba281217ce80cdc014023ce794bc3
refs/heads/master
2021-01-10T01:42:20.496027
2019-08-03T12:10:43
2019-08-03T12:10:43
51,929,931
0
0
null
null
null
null
UTF-8
Python
false
false
198
py
N = int(input()) a = list(map(int, input().split())) def div2count(num): ret = 0 while num % 2 == 0: ret += 1 num = num // 2 return ret print(sum(map(div2count, a)))
870a6ee86f3d3892ed79a10bc1bd26231e441502
fb8792f0a62f3b3658197a7aabd6aeecf8e311c9
/news/views.py
6ea24217bedd4ceae946a571f306a01414e25d96
[]
no_license
munisisazade/matrix_tutorial
5ddb3af6ba958e663465356de24ae1e3112f5559
22718e2b937bc8856ac024059b9ba2780af38f7e
refs/heads/master
2020-03-22T06:11:36.399412
2018-07-11T17:59:43
2018-07-11T18:00:14
139,616,989
0
0
null
null
null
null
UTF-8
Python
false
false
1,290
py
from django.shortcuts import render from django.http import HttpResponse from news.models import Article from news.forms import ArticleForm from django.contrib import messages from django.core.paginator import Paginator # Create your views here. def index(request): if request.method == 'GET': obj = {} obj["form"] = ArticleForm() news = Article.objects.all() news_list = Paginator(news, 2) page = request.GET.get('page') if page: obj["news_list"] = news_list.page(page) else: obj["news_list"] = news_list.page(1) return render(request, "index.html", obj) else: context = {} form = ArticleForm(request.POST) if form.is_valid(): # Article.objects.create( # title=form.cleaned_data['title'], # description=form.cleaned_data['description'] # ) article = form.save() context["news_list"] = Article.objects.all() context["form"] = form messages.success(request, "Form ugurla dolduruldu") return render(request, "index.html", context) def detail(request, name): obj = {} obj["object"] = Article.objects.get(id=name) return render(request, "detail.html", obj)
8ef6aae5ce529d7db5c1522ac34dfb38391949bd
7bc0075367290ff06565991e19033b13f0604f96
/Mundo 3/aula16/desafio073.py
74b06bdc97ec15c23b8957a536d52baf2183d0f8
[]
no_license
iamtheluiz/curso_em_video_python
298acd90e36473fbf797ba7bf85d729d0ca28407
aa4247b7d206771f9c9b08ad5d8585c3813ddaff
refs/heads/master
2020-04-12T16:17:51.672662
2019-01-22T00:10:41
2019-01-22T00:10:41
162,608,169
1
0
null
null
null
null
UTF-8
Python
false
false
827
py
# imports print(""" |******************| | Desafio073 | |******************| """) print("Tabela do Brasileirão!") # Variáveis tabela = ('Palmeiras', 'Flamengo', 'Internacional', 'Grêmio', 'São Paulo', 'Atlético-MG', 'Athletico-PR', 'Cruzeiro', 'Botafogo', 'Santos', 'Bahia', 'Fluminense', 'Corinthians', 'Chapecoense', 'Ceará', 'Vasco', 'Sport', 'América-MG', 'Vitória', 'Paraná') print("==== 5 Primeiros ====") for pos, time in enumerate(tabela[:5]): print(f'{pos + 1}º => {time}') print("==== 4 Últimos ====") for pos in range(len(tabela) - 4, len(tabela)): print(f'{pos + 1}º => {tabela[pos]}') print("==== Times em Ordem Alfabética ====") for time in sorted(tabela): print(time) print("==== Onde tá a Chapecoense?? ====") print(f'Ela está em {tabela.index("Chapecoense") + 1}º lugar!')
dff03e0fa17d53d82ad062bc2d6e9dd9b6101a86
24a13b07623ce41e57ea4df1fcce0befb14d3406
/model/densenet201/model4_val5.py
55e955e927b2209bb720ad20719f51cf1d10ecd0
[ "MIT" ]
permissive
shorxp/jd-fashion
5f37e6c2235200944869e9a0da4d741c89d63b9e
817f693672f418745e3a4c89a0417a3165b08130
refs/heads/master
2021-09-22T18:40:13.030601
2018-09-13T13:50:05
2018-09-13T13:50:05
null
0
0
null
null
null
null
UTF-8
Python
false
false
7,032
py
""" 以model1为原型,新增real crop """ import math import os import queue import time import keras from keras.layers import Dense, BatchNormalization, Activation import config from util import data_loader from util import keras_util from util.keras_util import KerasModelConfig model_config = KerasModelConfig(k_fold_file="1.txt", model_path=os.path.abspath(__file__), image_resolution=224, data_type=[config.DATA_TYPE_ORIGINAL], label_position=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], train_batch_size=[16, 16, 16], val_batch_size=256, predict_batch_size=256, initial_epoch=2, epoch=[1, 4, 10], lr=[0.0005, 0.00005, 0.000005], freeze_layers=[-1, 0.6, 5]) def get_model(freeze_layers=-1, lr=0.01, output_dim=1, weights="imagenet"): base_model = keras.applications.DenseNet201(include_top=False, weights=weights, input_shape=model_config.image_shape, pooling="avg") x = base_model.output x = Dense(256, use_bias=False)(x) x = BatchNormalization()(x) x = Activation("relu")(x) predictions = Dense(units=output_dim, activation='sigmoid')(x) model = keras.Model(inputs=base_model.input, outputs=predictions) if freeze_layers == -1: print("freeze all basic layers, lr=%f" % lr) for layer in base_model.layers: layer.trainable = False else: if freeze_layers < 1: freeze_layers = math.floor(len(base_model.layers) * freeze_layers) for layer in range(freeze_layers): base_model.layers[layer].train_layer = False print("freeze %d basic layers, lr=%f" % (freeze_layers, lr)) model.compile(loss="binary_crossentropy", optimizer=keras.optimizers.Adam(lr=lr)) # model.summary() print("basic model have %d layers" % len(base_model.layers)) return model def train(): evaluate_queue = queue.Queue() evaluate_task = keras_util.EvaluateTask(evaluate_queue) evaluate_task.setDaemon(True) evaluate_task.start() checkpoint = keras_util.EvaluateCallback(model_config, evaluate_queue) start = time.time() model_config.save_log("####### start train model") init_stage = model_config.get_init_stage() model_config.save_log("####### init stage is %d" % init_stage) for i in range(init_stage, len(model_config.epoch)): model_config.save_log("####### lr=%f, freeze layers=%2f epoch=%d" % ( model_config.lr[i], model_config.freeze_layers[i], model_config.epoch[i])) clr = keras_util.CyclicLrCallback(base_lr=model_config.lr[i], max_lr=model_config.lr[i] * 5, step_size=model_config.get_steps_per_epoch(i) / 2) train_flow = data_loader.KerasGenerator(model_config=model_config, featurewise_center=True, featurewise_std_normalization=True, width_shift_range=0.15, height_shift_range=0.1, horizontal_flip=True, real_transform=True, rescale=1. / 256).flow_from_files(model_config.train_files, mode="fit", target_size=model_config.image_size, batch_size= model_config.train_batch_size[i], shuffle=True, label_position=model_config.label_position) if i == 0: model_config.save_log("####### initial epoch is 0, end epoch is %d" % model_config.epoch[i]) model = get_model(freeze_layers=model_config.freeze_layers[i], lr=model_config.lr[i], output_dim=len(model_config.label_position)) model.fit_generator(generator=train_flow, steps_per_epoch=model_config.get_steps_per_epoch(i), epochs=model_config.epoch[i], workers=16, verbose=1, callbacks=[checkpoint, clr]) else: model = get_model(freeze_layers=model_config.freeze_layers[i], output_dim=len(model_config.label_position), lr=model_config.lr[i], weights=None) if i == init_stage: model_config.save_log("####### load weight file: %s" % model_config.get_weights_path(model_config.initial_epoch)) model.load_weights(model_config.get_weights_path(model_config.initial_epoch)) model_config.save_log("####### initial epoch is %d, end epoch is %d" % ( model_config.initial_epoch, model_config.epoch[i])) model.fit_generator(generator=train_flow, steps_per_epoch=model_config.get_steps_per_epoch(i), epochs=model_config.epoch[i], initial_epoch=model_config.initial_epoch, workers=16, verbose=1, callbacks=[checkpoint, clr]) else: model_config.save_log("####### load weight file: %s" % model_config.get_weights_path(model_config.epoch[i - 1])) model.load_weights(model_config.get_weights_path(model_config.epoch[i - 1])) model_config.save_log( "####### initial epoch is %d, end epoch is %d" % (model_config.epoch[i - 1], model_config.epoch[i])) model.fit_generator(generator=train_flow, steps_per_epoch=model_config.get_steps_per_epoch(i), epochs=model_config.epoch[i], initial_epoch=model_config.epoch[i - 1], workers=16, verbose=1, callbacks=[checkpoint, clr]) model_config.save_log("####### train model spend %d seconds" % (time.time() - start)) model_config.save_log("####### train model spend %d seconds average" % ((time.time() - start) / model_config.epoch[-1]))
dd8785c0296e0a16b4aa8fe1670176cd02aefe7c
2324dea2cb3003c8ab7e8fd80588d44973eb8c77
/Euler_8_390c.py
35f9f192188dc8a48a12c80b619a504e65823cbe
[]
no_license
MikeOcc/MyProjectEulerFiles
5f51bc516cb6584732dc67bb2f9c7fd9e6d51e56
4d066d52380aade215636953589bf56d6b88f745
refs/heads/master
2021-01-16T18:45:44.133229
2015-05-27T18:28:43
2015-05-27T18:28:43
5,876,116
0
0
null
null
null
null
UTF-8
Python
false
false
1,153
py
# # # Euler 278 # # from time import time from Functions import primes,RetFact from collections import defaultdict from math import sqrt def FactorSieve(n): n += 1 f = defaultdict(list) for p in xrange(2, n): if p not in f: for i in xrange(p + p, n, p): j, k = i, 1 while j % p == 0: j //= p k *= p f[i].append(p) if f[p]==[]:f[p]=[p] return f st=time() #v(1+b2), v(1+c2) and v(b2+c2) #F=FactorSieve(1250002) F=FactorSieve(1250) def T(b,c): a2 = b*b +1 b2 = c*c +1 #c2 = b*b + c*c #return (((4*a2*b2) - (a2 + b2 - c2)**2)**.5)/4. #A4=(((4*a2*b2) - 4)**.5) A42 = (a2*b2) - 1 #A = pow(A42,.5)/2 #A = (A42**.5)/2 A = sqrt(A42)/2 return A n=10**8 summ = 0 for i in xrange(2,22872,2): for j in xrange(i,50000000,2): A = T(i,j) if int(A)==A and A<=n: summ += int(A) # if j in F: #print i,j,int(A) #,F[i],F[j] # else: # print "! ",i,j,int(A) #,F[i],RetFact(j) #,RetFact(A) if A>n: #print "overflow at", i,j break print "sum of funky triangles is ",summ print "time elapsed ", time()-st
771a4a4a6e322e596debdfb4b5735e6d2913aaa3
b92adbd59161b701be466b3dbeab34e2b2aaf488
/.c9/metadata/environment/git_practice/ib_miniprojects_backend/essentials_kit_management/interactors/get_admin_account_details_interactor.py
99a60864937770d8a0707a63b1ffb2bb5aabc2ca
[]
no_license
R151865/cloud_9_files
7486fede7af4db4572f1b8033990a0f07f8749e8
a468c44e9aee4a37dea3c8c9188c6c06e91cc0c4
refs/heads/master
2022-11-22T10:45:39.439033
2020-07-23T09:31:52
2020-07-23T09:31:52
281,904,416
0
1
null
2022-11-20T00:47:10
2020-07-23T09:08:48
Python
UTF-8
Python
false
false
548
py
{"filter":false,"title":"get_admin_account_details_interactor.py","tooltip":"/git_practice/ib_miniprojects_backend/essentials_kit_management/interactors/get_admin_account_details_interactor.py","undoManager":{"mark":-1,"position":-1,"stack":[]},"ace":{"folds":[],"scrolltop":0,"scrollleft":0,"selection":{"start":{"row":0,"column":0},"end":{"row":0,"column":0},"isBackwards":false},"options":{"guessTabSize":true,"useWrapMode":false,"wrapToView":true},"firstLineState":0},"timestamp":1594287027811,"hash":"65e039a7a53b00a782bc458c2b43aa4ed442474b"}
6c3bcd0588462a4e5f7f60dd06b5f197d341edf8
19049316bb6d769bffcb61d2dfb2b241b65fdb65
/server/app/ai/mmfashion/mmfashion/models/registry.py
8cbc143707cb4ee919a9e5b735f1957dcb75e9fe
[ "Apache-2.0" ]
permissive
alveloper/fashion-scanner-v1
7e43b515ad329d19982e5dd5fe92dfbab0bad948
067d73cbe3417c2ef337e64ca251c4f883713974
refs/heads/master
2023-08-12T03:21:32.169558
2021-09-07T15:38:58
2021-09-07T15:38:58
null
0
0
null
null
null
null
UTF-8
Python
false
false
377
py
from ..utils import Registry BACKBONES = Registry('backbone') GLOBALPOOLING = Registry('global_pool') # global pooling ATTRPREDICTOR = Registry('attr_predictor') # predict attributes CATEPREDICTOR = Registry('cate_predictor') # predict category LOSSES = Registry('loss') # loss function PREDICTOR = Registry('predictor') GEOMETRICMATCHING = Registry('geometric_matching')
14c48c22b1e687c04b46200c8bfa69ee603b2a11
e47bc9571c59b1c6e8aeb4231a286ab8577802d4
/easy/888-fair-candy-swap.py
8aee86dc55f4d02630c6b484960f1a8054bca360
[ "MIT" ]
permissive
changmeng72/leecode_python3
d0176502dfaf3c8b455ec491c72979dd25b66b3e
8384f52f0dd74b06b1b6aefa277dde6a228ff5f3
refs/heads/main
2023-05-27T10:35:43.465283
2021-06-09T00:20:59
2021-06-09T00:20:59
375,127,027
0
0
null
null
null
null
UTF-8
Python
false
false
542
py
class Solution: def fairCandySwap(self, aliceSizes: List[int], bobSizes: List[int]) -> List[int]: aliceSum = sum(aliceSizes) bobSum = sum(bobSizes) diff = (aliceSum - bobSum)/2 bob_dict = {} for i in range(len(bobSizes)): bob_dict[bobSizes[i]] = i for i in range(len(aliceSizes)): k = bob_dict.get(aliceSizes[i]-diff,-1) if k!=-1: return [aliceSizes[i],bobSizes[k]] return [0,0]
99393122c191269914bababfb52fe894d63c4585
f889bc01147869459c0a516382e7b95221295a7b
/swagger_client/models/bundle_data_bundle_option_extension_interface.py
186314a8a23389437f249b99836d42fcf7756f20
[]
no_license
wildatheart/magento2-api-client
249a86f5c0289743f8df5b0324ccabd76f326512
e6a707f85b37c6c3e4ef3ff78507a7deb8f71427
refs/heads/master
2021-07-14T16:01:17.644472
2017-10-18T13:33:08
2017-10-18T13:33:08
107,412,121
1
1
null
null
null
null
UTF-8
Python
false
false
2,468
py
# coding: utf-8 """ Magento Community No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) OpenAPI spec version: 2.2 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from pprint import pformat from six import iteritems import re class BundleDataBundleOptionExtensionInterface(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { } attribute_map = { } def __init__(self): """ BundleDataBundleOptionExtensionInterface - a model defined in Swagger """ def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ if not isinstance(other, BundleDataBundleOptionExtensionInterface): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
0a3864ab0e1a31b27abb83ceb06746239bded135
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p03600/s387875234.py
e5d97020c3d1d82fca4cb72b6c976aa6b076e6c5
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
1,629
py
import heapq from collections import deque N = int(input()) A = [[int(a) for a in input().split()] for _ in range(N)] def dijkstra_heap(s, edge, n): #始点sから各頂点への最短距離 d = [10**9+1] * n used = [True] * n #True:未確定 d[s] = 0 used[s] = False edgelist = [] for a,b in edge[s]: heapq.heappush(edgelist,a*(10**6)+b) while len(edgelist): minedge = heapq.heappop(edgelist) #まだ使われてない頂点の中から最小の距離のものを探す if not used[minedge%(10**6)]: continue v = minedge%(10**6) d[v] = minedge//(10**6) used[v] = False for e in edge[v]: if used[e[1]]: heapq.heappush(edgelist,(e[0]+d[v])*(10**6)+e[1]) return d Road = [[] for _ in range(N)] h = [] for i in range(N): for j in range(i+1, N): heapq.heappush(h, (A[i][j], i, j)) m = h[0][0] D = [[10**9+1]*N for _ in range(N)] ans = 0 while h: t = heapq.heappop(h) cost = t[0] i = t[1] j = t[2] if cost < 2*m: Road[i].append((cost, j)) Road[j].append((cost, i)) D[i][j] = cost D[j][i] = cost elif D[i][j] > cost: D[i] = dijkstra_heap(i, Road, N) if D[i][j] > cost: Road[i].append((cost, j)) Road[j].append((cost, i)) D[i][j] = cost D[j][i] = cost if D[i][j] < cost: ans = -1 break if ans == 0: for i in range(N): for t in Road[i]: ans += t[0] ans //= 2 print(ans)
6a7f5db06b119930e8b4142194d3392943046c09
3d50cdc1fd01717dbcc43007d96a390631725ab4
/models/api/backtest/analysis.py
588e064d6eec6a3962b2eb0095390f9a30caab1e
[]
no_license
danielwangxh/golden_eye
22540fc9b60e710263d348de2ecce13928b795fc
1f6c9dc6b5cb806398024e5b678f150c074a689a
refs/heads/master
2021-06-12T06:02:15.028910
2017-02-05T04:31:16
2017-02-05T04:31:16
null
0
0
null
null
null
null
UTF-8
Python
false
false
5,485
py
# -*- coding: utf8 -*- import os from simplejson import loads import pandas as pd from models.api.backtest.calculator import FittingDataCalculator from sell_signals.base import SellSignal from consts import ema_file_dir, init_point, init_time, init_offset from libs.utils import ifcode_day_map def read_df(path): df_list = loads(open(path).read()) return pd.DataFrame(df_list, columns=['time_index', 'price', 'volume', 'ema_short', 'ema_long']) class DataAnalyzer(object): @classmethod def ema(cls, date, ifcode, period_short, period_long): _file = '%s/%s_%s_%s_%s' % (ema_file_dir, date, ifcode, period_short, period_long) if os.path.isfile(_file): df = read_df(_file) else: df = FittingDataCalculator.ema_df(date, ifcode, period_short, period_long) if df.empty: return [] sig_infos = SellSignal.compare_ema(df, limit_period=60) profit_infos = SellSignal.profit_infos(sig_infos) return profit_infos #************************************************************** #macd @classmethod def macd(cls, date, ifcode, period_short=12, period_long=26, period_dif=9, \ pre_point=init_point, pre_time=init_time, offset=init_offset): #date : 'yyyy-mm-dd' df = FittingDataCalculator.macd_df(date, ifcode, period_short, period_long, \ period_dif, pre_point, pre_time) if df.empty: return [] sig_infos = SellSignal.compare_macd(df, 3, offset) profit_infos = SellSignal.profit_infos(sig_infos) flags = SellSignal.out_flags(sig_infos) return profit_infos @classmethod def macd_chart(cls, date, ifcode, period_short=12, period_long=26, period_dif=9, \ pre_point=init_point, pre_time=init_time, offset=init_offset): df = FittingDataCalculator.macd_df(date, ifcode, period_short, period_long, \ period_dif, pre_point, pre_time) price = df[['time_index', 'price']].values.tolist() macd_dif = df[['time_index', 'macd_dif']].values.tolist() macd_dem = df[['time_index', 'macd_dem']].values.tolist() # flag sig_infos = SellSignal.compare_macd(df, 3, offset) flags = SellSignal.out_flags(sig_infos) return [price, macd_dif, macd_dem, flags] #********************************************************* #analysis #********************************************************* @classmethod def macd_analysis(cls, date, ifcode, period_short, period_long, \ period_dif, pre_point, pre_time, pre_offset): #date : 'yyyy-mm-dd' df = FittingDataCalculator.macd_df(date, ifcode, period_short, period_long, \ period_dif, pre_point, pre_time) if df.empty: return [] sig_infos = SellSignal.compare_macd(df, 3, pre_offset) profit_infos = SellSignal.profit_infos(sig_infos) return profit_infos @classmethod def macd_if_analysis(cls, ifcode, pre_point, pre_time, pre_offset, \ period_short=12, period_long=26, period_dif=9, trans_amount=1): rs = [] total = 0 pos_num = 0 nag_num = 0 trans_total_num = 0 date_list = ifcode_day_map(ifcode) for day in date_list: profit_infos = cls.macd_analysis(day, ifcode, period_short, period_long, \ period_dif, pre_point, pre_time, pre_offset) profit_all = 0 trans_num = (len(profit_infos) - 1) / 2 trans_total_num += trans_num for item in profit_infos: if item['gain'] != '-': profit_all += int(item['gain']) * trans_amount rs.append({'date': day, 'profit': profit_all, 'trans_num': trans_num}) total += profit_all if profit_all >= 0: pos_num += 1 elif profit_all < 0 : nag_num += 1 if nag_num == 0: profit_rate = pos_num else: profit_rate = pos_num*1.0/nag_num fees = trans_total_num * 2300 real_profit = total - fees return {'profit': total, 'real_profit': real_profit, 'profit_rate': profit_rate, 'trans_total_num': trans_total_num, 'fees': fees, 'trans_amount': trans_amount} #************************************************************* #boll @classmethod def boll_chart(cls, date, ifcode, period_short=50, period_long=80, pre_point=10): df = FittingDataCalculator.boll_df(date, ifcode, period_short, period_long, pre_point) price = df[['time_index', 'price']].values.tolist() boll_up = df[['time_index', 'boll_up']].values.tolist() boll_dn = df[['time_index', 'boll_dn']].values.tolist() boll_mb = df[['time_index', 'boll_mb']].values.tolist() return price, boll_up, boll_dn, boll_mb @classmethod def boll(cls, date, ifcode, period_short=50, period_long=80, pre_point=10): df = FittingDataCalculator.boll_df(date, ifcode, period_short, period_long, pre_point) sig_infos = SellSignal.compare_boll_b_percent(df) profit_infos = SellSignal.profit_infos(sig_infos) return profit_infos
62ff16334955a6f7413d9d5e053b966753d69de4
e47bd43968732b55907aa1b8d3530b9ec12bc0e1
/pipeline/pipeline_stack.py
91dd67f89f5ee450816f3521282190c65eac6b36
[ "Apache-2.0" ]
permissive
damshenas/enechobot
523763c8bb3d54e86fb6586c4883ca3008704014
9e346a87433abc1d95c60c3b9ee9401becf7a88f
refs/heads/main
2023-06-28T16:21:08.637064
2021-08-09T10:19:08
2021-08-09T10:19:08
394,225,747
0
0
Apache-2.0
2021-08-09T10:19:09
2021-08-09T09:15:21
null
UTF-8
Python
false
false
4,368
py
import json from aws_cdk import (core, aws_codebuild as codebuild, aws_codecommit as codecommit, aws_codepipeline as codepipeline, aws_codepipeline_actions as codepipeline_actions, aws_s3 as s3, aws_lambda as lambda_) class PipelineStack(core.Stack): def __init__(self, scope: core.Construct, id: str, *, repo_name: str=None, application_code: lambda_.CfnParametersCode=None, **kwargs) -> None: super().__init__(scope, id, **kwargs) # In this stack we create the pipeline using CDK # We have one pipeline, 2 builds and 1 deployment # The pipeline has multiple stages (in this example we have 3 stages: Source, Build, and Deploy) # 1 builds is for creating the artifact used for lambda function # another build is for creating the cloudformation template and the whole infra # reading the buildspecs JSON from file but should also be possible to write it in Python with open('./pipeline/buildspecs.json') as f: buildspecs = json.load(f) ### S3 bucket # for build output build_output_S3_bucket = s3.Bucket(self, "BUILD_OUTCOME") # Important Note. It is better not to create the repo in the stack as destroying the stack can delete the repo!! code = codecommit.Repository.from_repository_name(self, "ImportedRepo", repo_name) # buildspec phase name: built. Possible phases: build,install,post_build,pre_build cdk_build_spec = codebuild.BuildSpec.from_object(buildspecs["cdk_build_spec"]) telegram_build_spec = codebuild.BuildSpec.from_object(buildspecs["telegram_build_spec"]) cdk_build = codebuild.PipelineProject(self, "CdkBuild", build_spec=cdk_build_spec) telegram_build = codebuild.PipelineProject(self, 'telegram', build_spec=telegram_build_spec) source_output = codepipeline.Artifact() cdk_build_output = codepipeline.Artifact("CdkBuildOutput") telegram_build_output = codepipeline.Artifact("TelegramBuildOutput") telegram_lambda_location = telegram_build_output.s3_location pipeline_source_stage = codepipeline.StageProps(stage_name="Source", actions=[ codepipeline_actions.CodeCommitSourceAction( action_name="CodeCommit_Source", repository=code, branch="develop", output=source_output)]) pipeline_build_stage = codepipeline.StageProps(stage_name="Build", actions=[ codepipeline_actions.CodeBuildAction( action_name="telegram_build", project=telegram_build, input=source_output, outputs=[telegram_build_output]), codepipeline_actions.CodeBuildAction( action_name="CDK_Build", project=cdk_build, input=source_output, outputs=[cdk_build_output]) ]) pipeline_deploy_stage_action1 = codepipeline_actions.CloudFormationCreateUpdateStackAction( action_name="Lambda_CFN_Deploy", template_path=cdk_build_output.at_path("EnEchoBot.template.json"), stack_name="TelegramDeploymentStack", admin_permissions=True, parameter_overrides=dict( application_code.assign( bucket_name=telegram_lambda_location.bucket_name, object_key=telegram_lambda_location.object_key, object_version=telegram_lambda_location.object_version)), extra_inputs=[telegram_build_output]) pipeline_deploy_stage = codepipeline.StageProps(stage_name="Deploy", actions=[pipeline_deploy_stage_action1]) codepipeline.Pipeline(self, "Pipeline", stages=[pipeline_source_stage, pipeline_build_stage, pipeline_deploy_stage], artifact_bucket=build_output_S3_bucket)
effe5703e8df5737683f1cdd5c1f32c9ba9485c3
292437b85108504a7ca91571f26a639a313501b6
/venv2/lib/python2.7/site-packages/monasca_setup/main.py
aa428c7d23431698843c27bc087991f9cd920fb1
[]
no_license
heekof/monitoring-agent
c86bebcf77091490df7a6b8c881b85fdb2b9e4eb
b1c079efdf2dabe854f2aa3d96f36d2ec7021070
refs/heads/master
2021-01-15T15:39:01.512801
2016-08-31T20:53:38
2016-08-31T20:53:38
58,620,098
1
0
null
null
null
null
UTF-8
Python
false
false
15,290
py
#!/usr/bin/env python # (C) Copyright 2015-2016 Hewlett Packard Enterprise Development Company LP """ Detect running daemons then configure and start the agent. """ import argparse from glob import glob import logging import os import pwd import socket import subprocess import sys import agent_config import monasca_setup.utils as utils from monasca_setup.utils import write_template from service.detection import detect_init log = logging.getLogger(__name__) CUSTOM_PLUGIN_PATH = '/usr/lib/monasca/agent/custom_detect.d' # dirname is called twice to get the dir 1 above the location of the script PREFIX_DIR = os.path.dirname(os.path.dirname(os.path.realpath(sys.argv[0]))) def main(argv=None): parser = argparse.ArgumentParser(description='Configure and setup the agent. In a full run it will detect running' + ' daemons then configure and start the agent.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) args = parse_arguments(parser) if args.verbose: logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s") else: logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") if args.dry_run: log.info("Running in dry run mode, no changes will be made only reported") # Detect and if possibly enable the agent service agent_service = detect_init(PREFIX_DIR, args.config_dir, args.log_dir, args.template_dir, username=args.user) # Skip base setup if only installing plugins or running specific detection # plugins if not args.install_plugins_only and args.detection_plugins is None: if not args.skip_enable: agent_service.enable() # Verify required options if args.username is None or args.password is None or args.keystone_url is None: log.error('Username, password and keystone_url are required when running full configuration.') parser.print_help() sys.exit(1) base_configuration(args) # Collect the set of detection plugins to run detected_plugins = utils.discover_plugins(CUSTOM_PLUGIN_PATH) if args.system_only: from detection.plugins.system import System plugins = [System] elif args.detection_plugins is not None: plugins = utils.select_plugins(args.detection_plugins, detected_plugins) else: plugins = detected_plugins plugin_names = [p.__name__ for p in plugins] if args.remove: # Remove entries for each plugin from the various plugin config files changes = remove_config(args, plugin_names) else: # Run detection for all the plugins, halting on any failures if plugins were specified in the arguments detected_config = plugin_detection(plugins, args.template_dir, args.detection_args, skip_failed=(args.detection_plugins is None)) if detected_config is None: return 1 # Indicates detection problem, skip remaining steps and give non-zero exit code changes = modify_config(args, detected_config) # Don't restart if only doing detection plugins and no changes found if args.detection_plugins is not None and not changes: log.info('No changes found for plugins {0}, skipping restart of Monasca Agent'.format(plugin_names)) return 0 elif args.dry_run: log.info('Running in dry mode, skipping changes and restart of Monasca Agent') return 0 # Now that the config is built, start the service if args.install_plugins_only: log.info('Command line option install_plugins_only set, skipping ' 'service (re)start.') else: try: agent_service.start(restart=True) except subprocess.CalledProcessError: log.error('The service did not startup correctly see %s' % args.log_dir) def base_configuration(args): """Write out the primary Agent configuration and setup the service. :param args: Arguments from the command line :return: None """ gid = pwd.getpwnam(args.user).pw_gid # Write the main agent.yaml - Note this is always overwritten log.info('Configuring base Agent settings.') dimensions = {} # Join service in with the dimensions if args.service: dimensions.update({'service': args.service}) if args.dimensions: dimensions.update(dict(item.strip().split(":") for item in args.dimensions.split(","))) args.dimensions = dict((name, value) for (name, value) in dimensions.iteritems()) write_template(os.path.join(args.template_dir, 'agent.yaml.template'), os.path.join(args.config_dir, 'agent.yaml'), {'args': args, 'hostname': socket.getfqdn()}, gid, is_yaml=True) # Write the supervisor.conf write_template(os.path.join(args.template_dir, 'supervisor.conf.template'), os.path.join(args.config_dir, 'supervisor.conf'), {'prefix': PREFIX_DIR, 'log_dir': args.log_dir, 'monasca_user': args.user}, gid) def modify_config(args, detected_config): changes = False # Compare existing and detected config for each check plugin and write out the plugin config if changes for key, value in detected_config.iteritems(): if args.overwrite: changes = True if args.dry_run: continue else: agent_config.save_plugin_config(args.config_dir, key, args.user, value) else: old_config = agent_config.read_plugin_config_from_disk(args.config_dir, key) # merge old and new config, new has precedence if old_config is not None: if key == "http_check": old_config_urls = [i['url'] for i in old_config['instances'] if 'url' in i] value, old_config = agent_config.check_endpoint_changes(value, old_config) agent_config.merge_by_name(value['instances'], old_config['instances']) # Sort before compare, if instances have no name the sort will fail making order changes significant try: value['instances'].sort(key=lambda k: k['name']) old_config['instances'].sort(key=lambda k: k['name']) except Exception: pass value_urls = [i['url'] for i in value['instances'] if 'url' in i] if key == "http_check": if value_urls == old_config_urls: # Don't write config if no change continue else: if value == old_config: continue changes = True if args.dry_run: log.info("Changes would be made to the config file for the {0} check plugin".format(key)) else: agent_config.save_plugin_config(args.config_dir, key, args.user, value) return changes def parse_arguments(parser): parser.add_argument( '-u', '--username', help="Username used for keystone authentication. Required for basic configuration.") parser.add_argument( '-p', '--password', help="Password used for keystone authentication. Required for basic configuration.") parser.add_argument('--user_domain_id', help="User domain id for keystone authentication", default='') parser.add_argument('--user_domain_name', help="User domain name for keystone authentication", default='') parser.add_argument('--keystone_url', help="Keystone url. Required for basic configuration.") parser.add_argument('--project_name', help="Project name for keystone authentication", default='') parser.add_argument('--project_domain_id', help="Project domain id for keystone authentication", default='') parser.add_argument('--project_domain_name', help="Project domain name for keystone authentication", default='') parser.add_argument('--project_id', help="Keystone project id for keystone authentication", default='') parser.add_argument('--monasca_url', help="Monasca API url, if not defined the url is pulled from keystone", default='') parser.add_argument('--system_only', help="Setup the service but only configure the base config and system " + "metrics (cpu, disk, load, memory, network).", action="store_true", default=False) parser.add_argument('-d', '--detection_plugins', nargs='*', help="Skip base config and service setup and only configure this space separated list. " + "This assumes the base config has already run.") parser.add_argument('-a', '--detection_args', help="A string of arguments that will be passed to detection " + "plugins. Only certain detection plugins use arguments.") parser.add_argument('--check_frequency', help="How often to run metric collection in seconds", type=int, default=30) parser.add_argument('--dimensions', help="Additional dimensions to set for all metrics. A comma separated list " + "of name/value pairs, 'name:value,name2:value2'") parser.add_argument('--ca_file', help="Sets the path to the ca certs file if using certificates. " + "Required only if insecure is set to False", default='') parser.add_argument('--insecure', help="Set whether certificates are used for Keystone authentication", default=False) parser.add_argument('--config_dir', help="Configuration directory", default='/etc/monasca/agent') parser.add_argument('--log_dir', help="monasca-agent log directory", default='/var/log/monasca/agent') parser.add_argument('--log_level', help="monasca-agent logging level (ERROR, WARNING, INFO, DEBUG)", required=False, default='WARN') parser.add_argument('--template_dir', help="Alternative template directory", default=os.path.join(PREFIX_DIR, 'share/monasca/agent')) parser.add_argument('--overwrite', help="Overwrite existing plugin configuration. " + "The default is to merge. agent.yaml is always overwritten.", action="store_true") parser.add_argument('-r', '--remove', help="Rather than add the detected configuration remove it.", action="store_true", default=False) parser.add_argument('--skip_enable', help="By default the service is enabled, " + "which requires the script run as root. Set this to skip that step.", action="store_true") parser.add_argument('--install_plugins_only', help="Only update plugin " "configuration, do not configure services, users, etc." " or restart services", action="store_true") parser.add_argument('--user', help="User name to run monasca-agent as", default='mon-agent') parser.add_argument('-s', '--service', help="Service this node is associated with, added as a dimension.") parser.add_argument('--amplifier', help="Integer for the number of additional measurements to create. " + "Additional measurements contain the 'amplifier' dimension. " + "Useful for load testing; not for production use.", default=0) parser.add_argument('-v', '--verbose', help="Verbose Output", action="store_true") parser.add_argument('--dry_run', help="Make no changes just report on changes", action="store_true") return parser.parse_args() def plugin_detection(plugins, template_dir, detection_args, skip_failed=True, remove=False): """Runs the detection step for each plugin in the list and returns the complete detected agent config. :param plugins: A list of detection plugin classes :param template_dir: Location of plugin configuration templates :param detection_args: Arguments passed to each detection plugin :param skip_failed: When False any detection failure causes the run to halt and return None :return: An agent_config instance representing the total configuration from all detection plugins run. """ plugin_config = agent_config.Plugins() for detect_class in plugins: # todo add option to install dependencies detect = detect_class(template_dir, False, detection_args) if detect.available: new_config = detect.build_config_with_name() if not remove: log.info('Configuring {0}'.format(detect.name)) if new_config is not None: plugin_config.merge(new_config) elif not skip_failed: log.warn('Failed detection of plugin {0}.'.format(detect.name) + "\n\tPossible causes: Service not found or missing arguments.") return None return plugin_config def remove_config(args, plugin_names): """Parse all configuration removing any configuration built by plugins in plugin_names Note there is no concept of overwrite for removal. :param args: specified arguments :param plugin_names: A list of the plugin names to remove from the config :return: True if changes, false otherwise """ changes = False existing_config_files = glob(os.path.join(args.config_dir, 'conf.d', '*.yaml')) detected_plugins = utils.discover_plugins(CUSTOM_PLUGIN_PATH) plugins = utils.select_plugins(args.detection_plugins, detected_plugins) if args.detection_args is not None: detected_config = plugin_detection( plugins, args.template_dir, args.detection_args, skip_failed=(args.detection_plugins is None), remove=True) for file_path in existing_config_files: deletes = False plugin_name = os.path.splitext(os.path.basename(file_path))[0] config = agent_config.read_plugin_config_from_disk(args.config_dir, plugin_name) new_instances = [] # To avoid odd issues from iterating over a list you delete from, build a new instead if args.detection_args is None: for inst in config['instances']: if 'built_by' in inst and inst['built_by'] in plugin_names: changes = True deletes = True continue new_instances.append(inst) config['instances'] = new_instances else: for detected_key in detected_config.keys(): for inst in detected_config[detected_key]['instances']: if inst in config['instances']: changes = True deletes = True config['instances'].remove(inst) if deletes: agent_config.delete_from_config(args, config, file_path, plugin_name) return changes if __name__ == "__main__": sys.exit(main())
e96ceb65864385e40bf500f14c613be2609612c7
ec1059f4ccea10deb2cb8fd7f9458700a5e6ca4c
/venv/Lib/site-packages/qiskit/chemistry/algorithms/ground_state_solvers/minimum_eigensolver_factories/vqe_uvccsd_factory.py
b6420e3857c9e732031dfb81131ff2bd2a339632
[ "MIT", "Apache-2.0" ]
permissive
shivam675/Quantum-CERN
b60c697a3a7ad836b3653ee9ce3875a6eafae3ba
ce02d9198d9f5a1aa828482fea9b213a725b56bb
refs/heads/main
2023-01-06T20:07:15.994294
2020-11-13T10:01:38
2020-11-13T10:01:38
330,435,191
1
0
MIT
2021-01-17T16:29:26
2021-01-17T16:29:25
null
UTF-8
Python
false
false
6,073
py
# This code is part of Qiskit. # # (C) Copyright IBM 2020. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """The minimum eigensolver factory for ground state calculation algorithms.""" from typing import Optional import numpy as np from qiskit.aqua import QuantumInstance from qiskit.aqua.algorithms import MinimumEigensolver, VQE from qiskit.aqua.operators import ExpectationBase from qiskit.aqua.components.optimizers import Optimizer from qiskit.chemistry.components.initial_states import VSCF from qiskit.chemistry.components.variational_forms import UVCC from qiskit.chemistry.transformations import BosonicTransformation from .minimum_eigensolver_factory import MinimumEigensolverFactory class VQEUVCCSDFactory(MinimumEigensolverFactory): """A factory to construct a VQE minimum eigensolver with UVCCSD ansatz wavefunction.""" def __init__(self, quantum_instance: QuantumInstance, optimizer: Optional[Optimizer] = None, initial_point: Optional[np.ndarray] = None, expectation: Optional[ExpectationBase] = None, include_custom: bool = False) -> None: """ Args: quantum_instance: The quantum instance used in the minimum eigensolver. optimizer: A classical optimizer. initial_point: An optional initial point (i.e. initial parameter values) for the optimizer. If ``None`` then VQE will look to the variational form for a preferred point and if not will simply compute a random one. expectation: The Expectation converter for taking the average value of the Observable over the var_form state function. When ``None`` (the default) an :class:`~qiskit.aqua.operators.expectations.ExpectationFactory` is used to select an appropriate expectation based on the operator and backend. When using Aer qasm_simulator backend, with paulis, it is however much faster to leverage custom Aer function for the computation but, although VQE performs much faster with it, the outcome is ideal, with no shot noise, like using a state vector simulator. If you are just looking for the quickest performance when choosing Aer qasm_simulator and the lack of shot noise is not an issue then set `include_custom` parameter here to ``True`` (defaults to ``False``). include_custom: When `expectation` parameter here is None setting this to ``True`` will allow the factory to include the custom Aer pauli expectation. """ self._quantum_instance = quantum_instance self._optimizer = optimizer self._initial_point = initial_point self._expectation = expectation self._include_custom = include_custom @property def quantum_instance(self) -> QuantumInstance: """Getter of the quantum instance.""" return self._quantum_instance @quantum_instance.setter def quantum_instance(self, q_instance: QuantumInstance) -> None: """Setter of the quantum instance.""" self._quantum_instance = q_instance @property def optimizer(self) -> Optimizer: """Getter of the optimizer.""" return self._optimizer @optimizer.setter def optimizer(self, optimizer: Optimizer) -> None: """Setter of the optimizer.""" self._optimizer = optimizer @property def initial_point(self) -> np.ndarray: """Getter of the initial point.""" return self._initial_point @initial_point.setter def initial_point(self, initial_point: np.ndarray) -> None: """Setter of the initial point.""" self._initial_point = initial_point @property def expectation(self) -> ExpectationBase: """Getter of the expectation.""" return self._expectation @expectation.setter def expectation(self, expectation: ExpectationBase) -> None: """Setter of the expectation.""" self._expectation = expectation @property def include_custom(self) -> bool: """Getter of the ``include_custom`` setting for the ``expectation`` setting.""" return self._include_custom @include_custom.setter def include_custom(self, include_custom: bool) -> None: """Setter of the ``include_custom`` setting for the ``expectation`` setting.""" self._include_custom = include_custom def get_solver(self, transformation: BosonicTransformation) -> MinimumEigensolver: """Returns a VQE with a UVCCSD wavefunction ansatz, based on ``transformation``. This works only with a ``BosonicTransformation``. Args: transformation: a bosonic qubit operator transformation. Returns: A VQE suitable to compute the ground state of the molecule transformed by ``transformation``. """ basis = transformation.basis num_modes = transformation.num_modes if isinstance(basis, int): basis = [basis] * num_modes num_qubits = sum(basis) initial_state = VSCF(basis) var_form = UVCC(num_qubits, basis, [0, 1], initial_state=initial_state) vqe = VQE(var_form=var_form, quantum_instance=self._quantum_instance, optimizer=self._optimizer, initial_point=self._initial_point, expectation=self._expectation, include_custom=self._include_custom) return vqe def supports_aux_operators(self): return VQE.supports_aux_operators()
a1b924f5f49eed987ae00f6718ac5bb748d24885
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
/cases/synthetic/sieve-big-2962.py
7ac76ceb1003da851367d979a20444b8c1c279b0
[]
no_license
Virtlink/ccbench-chocopy
c3f7f6af6349aff6503196f727ef89f210a1eac8
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
refs/heads/main
2023-04-07T15:07:12.464038
2022-02-03T15:42:39
2022-02-03T15:42:39
451,969,776
0
0
null
null
null
null
UTF-8
Python
false
false
31,740
py
# A resizable list of integers class Vector(object): items: [int] = None size: int = 0 def __init__(self:"Vector"): self.items = [0] # Returns current capacity def capacity(self:"Vector") -> int: return len(self.items) # Increases capacity of vector by one element def increase_capacity(self:"Vector") -> int: self.items = self.items + [0] return self.capacity() # Appends one item to end of vector def append(self:"Vector", item: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends many items to end of vector def append_all(self:"Vector", new_items: [int]) -> object: item:int = 0 for item in new_items: self.append(item) # Removes an item from the middle of vector def remove_at(self:"Vector", idx: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Retrieves an item at a given index def get(self:"Vector", idx: int) -> int: return self.items[idx] # Retrieves the current size of the vector def length(self:"Vector") -> int: return self.size # A resizable list of integers class Vector2(object): items: [int] = None items2: [int] = None size: int = 0 size2: int = 0 def __init__(self:"Vector2"): self.items = [0] # Returns current capacity def capacity(self:"Vector2") -> int: return len(self.items) # Returns current capacity def capacity2(self:"Vector2") -> int: return len(self.items) # Increases capacity of vector by one element def increase_capacity(self:"Vector2") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity2(self:"Vector2") -> int: self.items = self.items + [0] return self.capacity() # Appends one item to end of vector def append(self:"Vector2", item: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append2(self:"Vector2", item: int, item2: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends many items to end of vector def append_all(self:"Vector2", new_items: [int]) -> object: item:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object: item:int = 0 item2:int = 0 for item in new_items: self.append(item) # Removes an item from the middle of vector def remove_at(self:"Vector2", idx: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at2(self:"Vector2", idx: int, idx2: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Retrieves an item at a given index def get(self:"Vector2", idx: int) -> int: return self.items[idx] # Retrieves an item at a given index def get2(self:"Vector2", idx: int, idx2: int) -> int: return self.items[idx] # Retrieves the current size of the vector def length(self:"Vector2") -> int: return self.size # Retrieves the current size of the vector def length2(self:"Vector2") -> int: return self.size # A resizable list of integers class Vector3(object): items: [int] = None items2: [int] = None items3: [int] = None size: int = 0 size2: int = 0 size3: int = 0 def __init__(self:"Vector3"): self.items = [0] # Returns current capacity def capacity(self:"Vector3") -> int: return len(self.items) # Returns current capacity def capacity2(self:"Vector3") -> int: return len(self.items) # Returns current capacity def capacity3(self:"Vector3") -> int: return len(self.items) # Increases capacity of vector by one element def increase_capacity(self:"Vector3") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity2(self:"Vector3") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity3(self:"Vector3") -> int: self.items = self.items + [0] return self.capacity() # Appends one item to end of vector def append(self:"Vector3", item: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append2(self:"Vector3", item: int, item2: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append3(self:"Vector3", item: int, item2: int, item3: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends many items to end of vector def append_all(self:"Vector3", new_items: [int]) -> object: item:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object: item:int = 0 item2:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object: item:int = 0 item2:int = 0 item3:int = 0 for item in new_items: self.append(item) # Removes an item from the middle of vector def remove_at(self:"Vector3", idx: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at2(self:"Vector3", idx: int, idx2: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Retrieves an item at a given index def get(self:"Vector3", idx: int) -> int: return self.items[idx] # Retrieves an item at a given index def get2(self:"Vector3", idx: int, idx2: int) -> int: return self.items[idx] # Retrieves an item at a given index def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int: return self.items[idx] # Retrieves the current size of the vector def length(self:"Vector3") -> int: return self.size # Retrieves the current size of the vector def length2(self:"Vector3") -> int: return self.size # Retrieves the current size of the vector def length3(self:"Vector3") -> int: return self.size # A resizable list of integers class Vector4(object): items: [int] = None items2: [int] = None items3: [int] = None items4: [int] = None size: int = 0 size2: int = 0 size3: int = 0 size4: int = 0 def __init__(self:"Vector4"): self.items = [0] # Returns current capacity def capacity(self:"Vector4") -> int: return len(self.items) # Returns current capacity def capacity2(self:"Vector4") -> int: return len(self.items) # Returns current capacity def capacity3(self:"Vector4") -> int: return len(self.items) # Returns current capacity def capacity4(self:"Vector4") -> int: return len(self.items) # Increases capacity of vector by one element def increase_capacity(self:"Vector4") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity2(self:"Vector4") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity3(self:"Vector4") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity4(self:"Vector4") -> int: self.items = self.items + [0] return self.capacity() # Appends one item to end of vector def append(self:"Vector4", item: int) -> object: if self.size == self.capacity(): self.increase_capacity() $Index = item self.size = self.size + 1 # Appends one item to end of vector def append2(self:"Vector4", item: int, item2: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append3(self:"Vector4", item: int, item2: int, item3: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends many items to end of vector def append_all(self:"Vector4", new_items: [int]) -> object: item:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object: item:int = 0 item2:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object: item:int = 0 item2:int = 0 item3:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object: item:int = 0 item2:int = 0 item3:int = 0 item4:int = 0 for item in new_items: self.append(item) # Removes an item from the middle of vector def remove_at(self:"Vector4", idx: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at2(self:"Vector4", idx: int, idx2: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Retrieves an item at a given index def get(self:"Vector4", idx: int) -> int: return self.items[idx] # Retrieves an item at a given index def get2(self:"Vector4", idx: int, idx2: int) -> int: return self.items[idx] # Retrieves an item at a given index def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int: return self.items[idx] # Retrieves an item at a given index def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int: return self.items[idx] # Retrieves the current size of the vector def length(self:"Vector4") -> int: return self.size # Retrieves the current size of the vector def length2(self:"Vector4") -> int: return self.size # Retrieves the current size of the vector def length3(self:"Vector4") -> int: return self.size # Retrieves the current size of the vector def length4(self:"Vector4") -> int: return self.size # A resizable list of integers class Vector5(object): items: [int] = None items2: [int] = None items3: [int] = None items4: [int] = None items5: [int] = None size: int = 0 size2: int = 0 size3: int = 0 size4: int = 0 size5: int = 0 def __init__(self:"Vector5"): self.items = [0] # Returns current capacity def capacity(self:"Vector5") -> int: return len(self.items) # Returns current capacity def capacity2(self:"Vector5") -> int: return len(self.items) # Returns current capacity def capacity3(self:"Vector5") -> int: return len(self.items) # Returns current capacity def capacity4(self:"Vector5") -> int: return len(self.items) # Returns current capacity def capacity5(self:"Vector5") -> int: return len(self.items) # Increases capacity of vector by one element def increase_capacity(self:"Vector5") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity2(self:"Vector5") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity3(self:"Vector5") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity4(self:"Vector5") -> int: self.items = self.items + [0] return self.capacity() # Increases capacity of vector by one element def increase_capacity5(self:"Vector5") -> int: self.items = self.items + [0] return self.capacity() # Appends one item to end of vector def append(self:"Vector5", item: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append2(self:"Vector5", item: int, item2: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append3(self:"Vector5", item: int, item2: int, item3: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends one item to end of vector def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object: if self.size == self.capacity(): self.increase_capacity() self.items[self.size] = item self.size = self.size + 1 # Appends many items to end of vector def append_all(self:"Vector5", new_items: [int]) -> object: item:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object: item:int = 0 item2:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object: item:int = 0 item2:int = 0 item3:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object: item:int = 0 item2:int = 0 item3:int = 0 item4:int = 0 for item in new_items: self.append(item) # Appends many items to end of vector def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object: item:int = 0 item2:int = 0 item3:int = 0 item4:int = 0 item5:int = 0 for item in new_items: self.append(item) # Removes an item from the middle of vector def remove_at(self:"Vector5", idx: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at2(self:"Vector5", idx: int, idx2: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Removes an item from the middle of vector def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object: if idx < 0: return while idx < self.size - 1: self.items[idx] = self.items[idx + 1] idx = idx + 1 self.size = self.size - 1 # Retrieves an item at a given index def get(self:"Vector5", idx: int) -> int: return self.items[idx] # Retrieves an item at a given index def get2(self:"Vector5", idx: int, idx2: int) -> int: return self.items[idx] # Retrieves an item at a given index def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int: return self.items[idx] # Retrieves an item at a given index def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int: return self.items[idx] # Retrieves an item at a given index def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int: return self.items[idx] # Retrieves the current size of the vector def length(self:"Vector5") -> int: return self.size # Retrieves the current size of the vector def length2(self:"Vector5") -> int: return self.size # Retrieves the current size of the vector def length3(self:"Vector5") -> int: return self.size # Retrieves the current size of the vector def length4(self:"Vector5") -> int: return self.size # Retrieves the current size of the vector def length5(self:"Vector5") -> int: return self.size # A faster (but more memory-consuming) implementation of vector class DoublingVector(Vector): doubling_limit:int = 1000 # Overriding to do fewer resizes def increase_capacity(self:"DoublingVector") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # A faster (but more memory-consuming) implementation of vector class DoublingVector2(Vector): doubling_limit:int = 1000 doubling_limit2:int = 1000 # Overriding to do fewer resizes def increase_capacity(self:"DoublingVector2") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity2(self:"DoublingVector2") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # A faster (but more memory-consuming) implementation of vector class DoublingVector3(Vector): doubling_limit:int = 1000 doubling_limit2:int = 1000 doubling_limit3:int = 1000 # Overriding to do fewer resizes def increase_capacity(self:"DoublingVector3") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity2(self:"DoublingVector3") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity3(self:"DoublingVector3") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # A faster (but more memory-consuming) implementation of vector class DoublingVector4(Vector): doubling_limit:int = 1000 doubling_limit2:int = 1000 doubling_limit3:int = 1000 doubling_limit4:int = 1000 # Overriding to do fewer resizes def increase_capacity(self:"DoublingVector4") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity2(self:"DoublingVector4") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity3(self:"DoublingVector4") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity4(self:"DoublingVector4") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # A faster (but more memory-consuming) implementation of vector class DoublingVector5(Vector): doubling_limit:int = 1000 doubling_limit2:int = 1000 doubling_limit3:int = 1000 doubling_limit4:int = 1000 doubling_limit5:int = 1000 # Overriding to do fewer resizes def increase_capacity(self:"DoublingVector5") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity2(self:"DoublingVector5") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity3(self:"DoublingVector5") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity4(self:"DoublingVector5") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Overriding to do fewer resizes def increase_capacity5(self:"DoublingVector5") -> int: if (self.capacity() <= self.doubling_limit // 2): self.items = self.items + self.items else: # If doubling limit has been reached, fall back to # standard capacity increases self.items = self.items + [0] return self.capacity() # Makes a vector in the range [i, j) def vrange(i:int, j:int) -> Vector: v:Vector = None v = DoublingVector() while i < j: v.append(i) i = i + 1 return v def vrange2(i:int, j:int, i2:int, j2:int) -> Vector: v:Vector = None v2:Vector = None v = DoublingVector() while i < j: v.append(i) i = i + 1 return v def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector: v:Vector = None v2:Vector = None v3:Vector = None v = DoublingVector() while i < j: v.append(i) i = i + 1 return v def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector: v:Vector = None v2:Vector = None v3:Vector = None v4:Vector = None v = DoublingVector() while i < j: v.append(i) i = i + 1 return v def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector: v:Vector = None v2:Vector = None v3:Vector = None v4:Vector = None v5:Vector = None v = DoublingVector() while i < j: v.append(i) i = i + 1 return v # Sieve of Eratosthenes (not really) def sieve(v:Vector) -> object: i:int = 0 j:int = 0 k:int = 0 while i < v.length(): k = v.get(i) j = i + 1 while j < v.length(): if v.get(j) % k == 0: v.remove_at(j) else: j = j + 1 i = i + 1 def sieve2(v:Vector, v2:Vector) -> object: i:int = 0 i2:int = 0 j:int = 0 j2:int = 0 k:int = 0 k2:int = 0 while i < v.length(): k = v.get(i) j = i + 1 while j < v.length(): if v.get(j) % k == 0: v.remove_at(j) else: j = j + 1 i = i + 1 def sieve3(v:Vector, v2:Vector, v3:Vector) -> object: i:int = 0 i2:int = 0 i3:int = 0 j:int = 0 j2:int = 0 j3:int = 0 k:int = 0 k2:int = 0 k3:int = 0 while i < v.length(): k = v.get(i) j = i + 1 while j < v.length(): if v.get(j) % k == 0: v.remove_at(j) else: j = j + 1 i = i + 1 def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object: i:int = 0 i2:int = 0 i3:int = 0 i4:int = 0 j:int = 0 j2:int = 0 j3:int = 0 j4:int = 0 k:int = 0 k2:int = 0 k3:int = 0 k4:int = 0 while i < v.length(): k = v.get(i) j = i + 1 while j < v.length(): if v.get(j) % k == 0: v.remove_at(j) else: j = j + 1 i = i + 1 def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object: i:int = 0 i2:int = 0 i3:int = 0 i4:int = 0 i5:int = 0 j:int = 0 j2:int = 0 j3:int = 0 j4:int = 0 j5:int = 0 k:int = 0 k2:int = 0 k3:int = 0 k4:int = 0 k5:int = 0 while i < v.length(): k = v.get(i) j = i + 1 while j < v.length(): if v.get(j) % k == 0: v.remove_at(j) else: j = j + 1 i = i + 1 # Input parameter n:int = 50 n2:int = 50 n3:int = 50 n4:int = 50 n5:int = 50 # Data v:Vector = None v2:Vector = None v3:Vector = None v4:Vector = None v5:Vector = None i:int = 0 i2:int = 0 i3:int = 0 i4:int = 0 i5:int = 0 # Crunch v = vrange(2, n) v2 = vrange(2, n) v3 = vrange(2, n) v4 = vrange(2, n) v5 = vrange(2, n) sieve(v) # Print while i < v.length(): print(v.get(i)) i = i + 1
4bc43308877b9e7be2dfda43ed082c330f7aad38
16e69196886254bc0fe9d8dc919ebcfa844f326a
/edc/dashboard/search/tests/__init__.py
18d1d4cedf76dc10af1643019f786e7ccc20952b
[]
no_license
botswana-harvard/edc
b54edc305e7f4f6b193b4498c59080a902a6aeee
4f75336ff572babd39d431185677a65bece9e524
refs/heads/master
2021-01-23T19:15:08.070350
2015-12-07T09:36:41
2015-12-07T09:36:41
35,820,838
0
0
null
null
null
null
UTF-8
Python
false
false
53
py
from .search_methods_tests import SearchMethodsTests
f69e4d4f4288a29c8508e0ac794a2eaa0995a2b8
353def93fa77384ee3a5e3de98cfed318c480634
/.history/week02/1/proxy/proxy/spiders/maoyan_20200705155728.py
f4c680398e9df42ea33eb12d51a8bd6097744c5a
[]
no_license
ydbB/Python001-class01
d680abc3ea1ccaeb610751e3488421417d381156
ad80037ccfc68d39125fa94d2747ab7394ac1be8
refs/heads/master
2022-11-25T11:27:45.077139
2020-07-19T12:35:12
2020-07-19T12:35:12
272,783,233
0
0
null
2020-06-16T18:28:15
2020-06-16T18:28:15
null
UTF-8
Python
false
false
2,559
py
import scrapy from proxy.items import ProxyItem import lxml.etree class MaoyanSpider(scrapy.Spider): name = 'maoyan' allowed_domains = ['maoyan.com'] start_urls = ['http://maoyan.com/'] header = { 'Content-Type': 'text/plain; charset=UTF-8', 'Cookie' : '__mta=251934006.1593072991075.1593315374931.1593349407197.45; uuid_n_v=v1; uuid=2395D3F0B6BC11EA9F28E30FF5FFF73C9A16AE2FA53A448DA75AEAA9D715CB59; _csrf=8557626db9b655cf9050ae7e5b2aab69278c8061c21eca95e1c3cf2130b0b64c; _lxsdk_cuid=172ea8cb247c8-0a73066b1c0a8b-4353760-100200-172ea8cb248c8; _lxsdk=2395D3F0B6BC11EA9F28E30FF5FFF73C9A16AE2FA53A448DA75AEAA9D715CB59; mojo-uuid=c457eacb7c1eb59d3d2f6c1f8d75b9c9; Hm_lvt_703e94591e87be68cc8da0da7cbd0be2=1593072989,1593073002; _lx_utm=utm_source%3Dgoogle%26utm_medium%3Dorganic; __mta=251934006.1593072991075.1593140975947.1593145813576.21; mojo-session-id={"id":"afe2ef89c10d6e1c8fc94e26d831b20e","time":1593349078441}; mojo-trace-id=4; Hm_lpvt_703e94591e87be68cc8da0da7cbd0be2=1593349407; _lxsdk_s=172fb017d51-4c4-303-783%7C%7C8', # 'Host' : 'http://www.baidu.com', 'Origin': 'https://maoyan.com', 'Referer': 'https://maoyan.com/board/4', 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36', } # def parse(self, response): # pass def start_requests(self): url = f'https://maoyan.com/board/4' yield scrapy.Request(url=url,headers=self.header,callback=self.parse) def parse(self, response): selector = lxml.etree.HTML(response.text) item =ProxyItem() for i in range(0,10): link = selector.xpath('//*[@id="app"]/div/div/div[1]/dl/dd['+i'+']/div/div/div[1]/p[1]/a').get('href') name = selector.xpath('//*[@id="app"]/div/div/div[1]/dl/dd['i]/div/div/div[1]/p[1]/a').get('title') time = selector.xpath('//*[@id="app"]/div/div/div[1]/dl/dd[i]/div/div/div[1]/p[3]').text item['films_name'] = name item['release_time'] = time print(link) yield scrapy.Request(url=link, headers = self.header, meta={'item':item},callback=self.parse1) def parse1(self, response): item = response.meta['item'] selector = lxml.etree.HTML(response.text) type = selector.xpath('/html/body/div[3]/div/div[2]/div[1]/ul/li[1]').text.replace('\n',' ') print(type) item['films_type'] = type print(item) yield item
fe29ddbba5045df30a1934eb9bcd7574cdc67eb7
71c9bf59320d5a67d1395c02ee7d68805f820db7
/solutions/A_h_djikshtra's_algorithm.py
28868dd91a4a652195450a6db4e23dc99f6283b9
[]
no_license
AnjalBam/iw-assignment-python-iii
34609eef05f4b57a3cc17166603c121c1635b2a9
c1a05c1a0091de3b718fcb476bbc906817492294
refs/heads/master
2022-11-13T20:23:39.642580
2020-07-13T09:45:00
2020-07-13T09:45:00
279,208,648
0
0
null
null
null
null
UTF-8
Python
false
false
3,349
py
""" A. Make pythonic solutions for each of the following data structure and algorithm problems. e) Dijkshtra's Algorithm """ from collections import deque, namedtuple # we'll use infinity as a default distance to nodes. inf = float('inf') Edge = namedtuple('Edge', 'start, end, cost') def make_edge(start, end, cost=1): return Edge(start, end, cost) class Graph: def __init__(self, edges): # check if the data is right wrong_edges = [i for i in edges if len(i) not in [2, 3]] if wrong_edges: raise ValueError('Wrong edges data: {}'.format(wrong_edges)) self.edges = [make_edge(*edge) for edge in edges] @property def vertices(self): return set( sum( ([edge.start, edge.end] for edge in self.edges), [] ) ) def get_node_pairs(self, n1, n2, both_ends=True): if both_ends: node_pairs = [[n1, n2], [n2, n1]] else: node_pairs = [[n1, n2]] return node_pairs def remove_edge(self, n1, n2, both_ends=True): node_pairs = self.get_node_pairs(n1, n2, both_ends) edges = self.edges[:] for edge in edges: if [edge.start, edge.end] in node_pairs: self.edges.remove(edge) def add_edge(self, n1, n2, cost=1, both_ends=True): node_pairs = self.get_node_pairs(n1, n2, both_ends) for edge in self.edges: if [edge.start, edge.end] in node_pairs: return ValueError('Edge {} {} already exists'.format(n1, n2)) self.edges.append(Edge(start=n1, end=n2, cost=cost)) if both_ends: self.edges.append(Edge(start=n2, end=n1, cost=cost)) @property def neighbours(self): neighbours = {vertex: set() for vertex in self.vertices} for edge in self.edges: neighbours[edge.start].add((edge.end, edge.cost)) return neighbours def dijkstra(self, source, dest): assert source in self.vertices, 'Such source node doesn\'t exist' distances = {vertex: inf for vertex in self.vertices} previous_vertices = { vertex: None for vertex in self.vertices } distances[source] = 0 vertices = self.vertices.copy() while vertices: current_vertex = min( vertices, key=lambda vertex: distances[vertex]) vertices.remove(current_vertex) if distances[current_vertex] == inf: break for neighbour, cost in self.neighbours[current_vertex]: alternative_route = distances[current_vertex] + cost if alternative_route < distances[neighbour]: distances[neighbour] = alternative_route previous_vertices[neighbour] = current_vertex path, current_vertex = deque(), dest while previous_vertices[current_vertex] is not None: path.appendleft(current_vertex) current_vertex = previous_vertices[current_vertex] if path: path.appendleft(current_vertex) return path graph = Graph([ ("a", "b", 7), ("a", "c", 9), ("a", "f", 14), ("b", "c", 10), ("b", "d", 15), ("c", "d", 11), ("c", "f", 2), ("d", "e", 6), ("e", "f", 9)]) print(graph.dijkstra("a", "e"))
9299880d2374060c24d9a6fd117a920d11784c44
ea2cf796332879d86561f80882da93b672966448
/configs/rotated_retinanet/ssdd/rotated_retinanet_hbb_r50_adamw_fpn_6x_ssdd_oc.py
6b6e85da78c1deeca585141b1fa61f5de0d0f2e7
[ "Apache-2.0" ]
permissive
yangxue0827/h2rbox-mmrotate
968c34adf22eca073ab147b670226884ea80ac61
cfd7f1fef6ae4d4e17cb891d1ec144ece8b5d7f5
refs/heads/main
2023-05-23T10:02:58.344148
2023-02-14T05:28:38
2023-02-14T05:28:38
501,580,810
68
8
null
null
null
null
UTF-8
Python
false
false
394
py
_base_ = ['./rotated_retinanet_hbb_r50_fpn_6x_ssdd_oc.py'] optimizer = dict( _delete_=True, type='AdamW', lr=0.0001, betas=(0.9, 0.999), weight_decay=0.05, paramwise_cfg=dict( custom_keys={ 'absolute_pos_embed': dict(decay_mult=0.), 'relative_position_bias_table': dict(decay_mult=0.), 'norm': dict(decay_mult=0.) }))
bfa0210d76e2501a11b68119104d1f8ddab47ced
e76ea38dbe5774fccaf14e1a0090d9275cdaee08
/src/xwalk/tools/build/win/FILES.cfg
446675943eaebc95282133c60583a38067c11deb
[ "BSD-3-Clause" ]
permissive
eurogiciel-oss/Tizen_Crosswalk
efc424807a5434df1d5c9e8ed51364974643707d
a68aed6e29bd157c95564e7af2e3a26191813e51
refs/heads/master
2021-01-18T19:19:04.527505
2014-02-06T13:43:21
2014-02-06T13:43:21
16,070,101
1
3
null
null
null
null
UTF-8
Python
false
false
3,094
cfg
# -*- python -*- # ex: set syntax=python: # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # This is a buildbot configuration file containing a tagged list of files # processed by the stage/archive scripts. The known tags are: # # filename: Name of the file in the build output directory. # arch: List of CPU architectures for which this file should be processed # Leave this unspecified to prcoess for all architectures. # Acceptable values are 64bit, 32bit and arm. # buildtype: List of build types for which this file should be processed. # archive: The name of the archive file to store filename in. If not specified, # filename is added to the default archive (e.g. platform.zip). If # archive == filename, filename is archived directly, not zipped. # direct_archive: Force a file to be archived as-is, bypassing zip creation. # NOTE: This flag will not apply if more than one file has the # same 'archive' name, which will create a zip of all the # files instead. # filegroup: List of named groups to which this file belongs. # default: Legacy "default archive". TODO(mmoss): These should # be updated to specify an 'archive' name and then this # filegroup and the related archive_utils.ParseLegacyList() # should go away. # symsrc: Files to upload to the symbol server. # optional: List of buildtypes for which the file might not exist, and it's not # considered an error. FILES = [ { 'filename': 'xwalk.exe', 'buildtype': ['dev', 'official'], }, { 'filename': 'xwalk.pak', 'buildtype': ['dev', 'official'], }, { 'filename': 'ffmpegsumo.dll', 'buildtype': ['dev'], }, { 'filename': 'icudt.dll', 'buildtype': ['dev', 'official'], }, { 'filename': 'D3DCompiler_46.dll', 'buildtype': ['dev'], }, { 'filename': 'libEGL.dll', 'buildtype': ['dev', 'official'], }, { 'filename': 'libGLESv2.dll', 'buildtype': ['dev', 'official'], }, # installer creation scripts { 'filename': 'create_windows_installer.bat', 'buildtype': ['dev', 'official'], }, { 'filename': 'app.wxs.templ', 'buildtype': ['dev', 'official'], }, { 'filename': 'guid.vbs', 'buildtype': ['dev', 'official'], }, # syms files { 'filename': 'xwalk.exe.pdb', 'buildtype': ['dev', 'official'], 'archive': 'xwalk-win32-syms.zip', }, { 'filename': 'ffmpegsumo.dll.pdb', 'buildtype': ['dev'], 'archive': 'xwalk-win32-syms.zip', }, { 'filename': 'libEGL.dll.pdb', 'buildtype': ['dev', 'official'], 'archive': 'xwalk-win32-syms.zip', }, { 'filename': 'libGLESv2.dll.pdb', 'buildtype': ['dev', 'official'], 'archive': 'xwalk-win32-syms.zip', }, # XPK package generator { 'filename': 'tools/make_xpk.py', 'buildtype': ['dev', 'official'], }, ]
7ff073702298c100dd9cd88ef17679ad6ee1850e
412b699e0f497ac03d6618fe349f4469646c6f2d
/env/lib/python3.8/site-packages/Crypto/Util/Padding.py
da69e55987227357a55f8e1b57fae5f7eb8cac74
[ "MIT" ]
permissive
EtienneBrJ/Portfolio
7c70573f02a5779f9070d6d9df58d460828176e3
6b8d8cf9622eadef47bd10690c1bf1e7fd892bfd
refs/heads/main
2023-09-03T15:03:43.698518
2021-11-04T01:02:33
2021-11-04T01:02:33
411,076,325
1
0
MIT
2021-10-31T13:43:09
2021-09-27T23:48:59
HTML
UTF-8
Python
false
false
4,313
py
# # Util/Padding.py : Functions to manage padding # # =================================================================== # # Copyright (c) 2014, Legrandin <[email protected]> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # =================================================================== __all__ = [ 'pad', 'unpad' ] from Crypto.Util.py3compat import * def pad(data_to_pad, block_size, style='pkcs7'): """Apply standard padding. Args: data_to_pad (byte string): The data that needs to be padded. block_size (integer): The block boundary to use for padding. The output length is guaranteed to be a multiple of :data:`block_size`. style (string): Padding algorithm. It can be *'pkcs7'* (default), *'iso7816'* or *'x923'*. Return: byte string : the original data with the appropriate padding added at the end. """ padding_len = block_size-len(data_to_pad)%block_size if style == 'pkcs7': padding = bchr(padding_len)*padding_len elif style == 'x923': padding = bchr(0)*(padding_len-1) + bchr(padding_len) elif style == 'iso7816': padding = bchr(128) + bchr(0)*(padding_len-1) else: raise ValueError("Unknown padding style") return data_to_pad + padding def unpad(padded_data, block_size, style='pkcs7'): """Remove standard padding. Args: padded_data (byte string): A piece of data with padding that needs to be stripped. block_size (integer): The block boundary to use for padding. The input length must be a multiple of :data:`block_size`. style (string): Padding algorithm. It can be *'pkcs7'* (default), *'iso7816'* or *'x923'*. Return: byte string : data without padding. Raises: ValueError: if the padding is incorrect. """ pdata_len = len(padded_data) if pdata_len == 0: raise ValueError("Zero-length input cannot be unpadded") if pdata_len % block_size: raise ValueError("Input data is not padded") if style in ('pkcs7', 'x923'): padding_len = bord(padded_data[-1]) if padding_len<1 or padding_len>min(block_size, pdata_len): raise ValueError("Padding is incorrect.") if style == 'pkcs7': if padded_data[-padding_len:]!=bchr(padding_len)*padding_len: raise ValueError("PKCS#7 padding is incorrect.") else: if padded_data[-padding_len:-1]!=bchr(0)*(padding_len-1): raise ValueError("ANSI X.923 padding is incorrect.") elif style == 'iso7816': padding_len = pdata_len - padded_data.rfind(bchr(128)) if padding_len<1 or padding_len>min(block_size, pdata_len): raise ValueError("Padding is incorrect.") if padding_len>1 and padded_data[1-padding_len:]!=bchr(0)*(padding_len-1): raise ValueError("ISO 7816-4 padding is incorrect.") else: raise ValueError("Unknown padding style") return padded_data[:-padding_len]
fd7b4afd97e2ab6d8426692b6eb6f7be4be4d1e6
6b1fd67270b150ec3a4983945f6374c532e0c3b5
/pymesh/examples/md5/main.py
6ab99a96b6f2a2fccd3c3a7466c0a19d02bdf929
[ "BSD-2-Clause-Views", "BSD-2-Clause" ]
permissive
adamlwgriffiths/PyMesh
8078e2ff4cc93e9def368b4fbbbf52b9f8b3292e
dbed5c7a226b820fc3adb33e30f750ab1ffbd892
refs/heads/master
2016-09-05T09:42:43.068368
2013-02-03T14:52:11
2013-02-03T14:52:11
5,817,565
5
1
null
null
null
null
UTF-8
Python
false
false
3,099
py
import os from pymesh.md5 import MD5_Mesh, MD5_Anim def load_mesh( filename ): print 'Loading', filename md5 = MD5_Mesh() md5.load( filename ) # print the data out for verification print 'version', md5.md5_version print 'num_joints', md5.num_joints print 'num_meshes', md5.num_meshes print 'joints' for joint in md5.joints: print '\tname', joint.name print '\tparent', joint.parent print '\tposition', joint.position print '\torientation', joint.orientation print 'meshes' for mesh in md5.meshes: print '\tshader', mesh.shader print '\tnumverts', mesh.num_verts for vert in mesh.vertices: print '\t\ttcs', vert.tcs print '\t\tstart_weight', vert.start_weight print '\t\tweight_count', vert.weight_count print '\tnumtris', mesh.num_tris for tri in mesh.tris: print '\t\ttri', tri print '\tnumweights', mesh.num_weights for weight in mesh.weights: print '\t\tjoint', weight.joint print '\t\tbias', weight.bias print '\t\tposition', weight.position def load_anim( filename ): print 'Loading', filename md5 = MD5_Anim() md5.load( filename ) print 'version', md5.md5_version print 'frame_rate', md5.frame_rate print 'hierarchy' print 'num_joints', md5.hierarchy.num_joints for joint in md5.hierarchy: print '\tname', joint.name print '\tparent', joint.parent print '\tnum_components', joint.num_components print '\tframe', joint.frame print 'bounds' print 'num_bounds', md5.bounds.num_bounds for bounds in md5.bounds: print '\tminimum', bounds[ 0 ] print '\tmaximum', bounds[ 1 ] print 'base frame' print 'num_bones', md5.base_frame.num_bones for bone in md5.base_frame: print '\tposition', bone.position print '\torientation', bone.orientation print 'frames' print 'num_frames', md5.num_frames for frame in md5.frames: print '\tjoints' print '\tnum_joints', frame.num_joints for joint in frame: print '\t\tposition', joint.position print '\t\torientation', joint.orientation def main(): # load all md5 files in our data directory # get the path relative to our examples file path = os.path.join( os.path.dirname( __file__ ), '../data/md5' ) # get the directory contents contents = os.listdir(path) # iterate through the contents and load # each file that is a .md5mesh or .md5anim file for filename in contents: name, extension = os.path.splitext( filename ) # reattach our current directory path = os.path.join( os.path.dirname( __file__ ), '../data/md5', filename ) if extension.lower() == '.md5mesh': load_mesh( path ) if extension.lower() == '.md5anim': load_anim( path ) if __name__ == '__main__': main()
1e121291a3990554553ee44c813bbf62b7e065e9
8cdbd5986a40eeb0a14099acb0674cfd4ebe5524
/venv/lib/python2.7/site-packages/textblob/blob.py
d4e53698d0d188b176c256aa2b025a1223ecabb9
[ "MIT" ]
permissive
saiflakhani/twitter-stock-recommendation
23a24d330ab15dc1d2c960836e4b68d0f5e66c0d
50e7177375560895b31371593d03e88b07d24afd
refs/heads/master
2023-04-22T02:45:53.739377
2021-05-05T14:36:25
2021-05-05T14:36:25
362,073,163
0
0
MIT
2021-04-27T10:33:02
2021-04-27T10:33:01
null
UTF-8
Python
false
false
29,309
py
# -*- coding: utf-8 -*- """Wrappers for various units of text, including the main :class:`TextBlob <textblob.blob.TextBlob>`, :class:`Word <textblob.blob.Word>`, and :class:`WordList <textblob.blob.WordList>` classes. Example usage: :: >>> from textblob import TextBlob >>> b = TextBlob("Simple is better than complex.") >>> b.tags [(u'Simple', u'NN'), (u'is', u'VBZ'), (u'better', u'JJR'), (u'than', u'IN'), (u'complex', u'NN')] >>> b.noun_phrases WordList([u'simple']) >>> b.words WordList([u'Simple', u'is', u'better', u'than', u'complex']) >>> b.sentiment (0.06666666666666667, 0.41904761904761906) >>> b.words[0].synsets()[0] Synset('simple.n.01') .. versionchanged:: 0.8.0 These classes are now imported from ``textblob`` rather than ``text.blob``. """ from __future__ import unicode_literals, absolute_import import sys import json from collections import defaultdict import nltk from textblob.decorators import cached_property, requires_nltk_corpus from textblob.utils import lowerstrip, PUNCTUATION_REGEX from textblob.inflect import singularize as _singularize, pluralize as _pluralize from textblob.mixins import BlobComparableMixin, StringlikeMixin from textblob.compat import unicode, basestring from textblob.base import (BaseNPExtractor, BaseTagger, BaseTokenizer, BaseSentimentAnalyzer, BaseParser) from textblob.np_extractors import FastNPExtractor from textblob.taggers import NLTKTagger from textblob.tokenizers import WordTokenizer, sent_tokenize, word_tokenize from textblob.sentiments import PatternAnalyzer from textblob.parsers import PatternParser from textblob.translate import Translator from textblob.en import suggest # Wordnet interface # NOTE: textblob.wordnet is not imported so that the wordnet corpus can be lazy-loaded _wordnet = nltk.corpus.wordnet def _penn_to_wordnet(tag): """Converts a Penn corpus tag into a Wordnet tag.""" if tag in ("NN", "NNS", "NNP", "NNPS"): return _wordnet.NOUN if tag in ("JJ", "JJR", "JJS"): return _wordnet.ADJ if tag in ("VB", "VBD", "VBG", "VBN", "VBP", "VBZ"): return _wordnet.VERB if tag in ("RB", "RBR", "RBS"): return _wordnet.ADV return None class Word(unicode): """A simple word representation. Includes methods for inflection, translation, and WordNet integration. """ translator = Translator() def __new__(cls, string, pos_tag=None): """Return a new instance of the class. It is necessary to override this method in order to handle the extra pos_tag argument in the constructor. """ return super(Word, cls).__new__(cls, string) def __init__(self, string, pos_tag=None): self.string = string self.pos_tag = pos_tag def __repr__(self): return repr(self.string) def __str__(self): return self.string def singularize(self): """Return the singular version of the word as a string.""" return Word(_singularize(self.string)) def pluralize(self): '''Return the plural version of the word as a string.''' return Word(_pluralize(self.string)) def translate(self, from_lang='auto', to="en"): '''Translate the word to another language using Google's Translate API. .. versionadded:: 0.5.0 ''' return self.translator.translate(self.string, from_lang=from_lang, to_lang=to) def detect_language(self): '''Detect the word's language using Google's Translate API. .. versionadded:: 0.5.0 ''' return self.translator.detect(self.string) def spellcheck(self): '''Return a list of (word, confidence) tuples of spelling corrections. Based on: Peter Norvig, "How to Write a Spelling Corrector" (http://norvig.com/spell-correct.html) as implemented in the pattern library. .. versionadded:: 0.6.0 ''' return suggest(self.string) def correct(self): '''Correct the spelling of the word. Returns the word with the highest confidence using the spelling corrector. .. versionadded:: 0.6.0 ''' return Word(self.spellcheck()[0][0]) @cached_property @requires_nltk_corpus def lemma(self): """Return the lemma of this word using Wordnet's morphy function. """ return self.lemmatize(pos=self.pos_tag) @requires_nltk_corpus def lemmatize(self, pos=None): """Return the lemma for a word using WordNet's morphy function. :param pos: Part of speech to filter upon. If `None`, defaults to ``_wordnet.NOUN``. .. versionadded:: 0.8.1 """ if pos is None: tag = _wordnet.NOUN elif pos in _wordnet._FILEMAP.keys(): tag = pos else: tag = _penn_to_wordnet(pos) lemmatizer = nltk.stem.WordNetLemmatizer() return lemmatizer.lemmatize(self.string, tag) PorterStemmer = nltk.stem.porter.PorterStemmer() LancasterStemmer = nltk.stem.lancaster.LancasterStemmer() SnowballStemmer = nltk.stem.snowball.SnowballStemmer("english") #added 'stemmer' on lines of lemmatizer #based on nltk def stem(self, stemmer=PorterStemmer): """Stem a word using various NLTK stemmers. (Default: Porter Stemmer) .. versionadded:: 0.12.0 """ return stemmer.stem(self.string) @cached_property def synsets(self): """The list of Synset objects for this Word. :rtype: list of Synsets .. versionadded:: 0.7.0 """ return self.get_synsets(pos=None) @cached_property def definitions(self): """The list of definitions for this word. Each definition corresponds to a synset. .. versionadded:: 0.7.0 """ return self.define(pos=None) def get_synsets(self, pos=None): """Return a list of Synset objects for this word. :param pos: A part-of-speech tag to filter upon. If ``None``, all synsets for all parts of speech will be loaded. :rtype: list of Synsets .. versionadded:: 0.7.0 """ return _wordnet.synsets(self.string, pos) def define(self, pos=None): """Return a list of definitions for this word. Each definition corresponds to a synset for this word. :param pos: A part-of-speech tag to filter upon. If ``None``, definitions for all parts of speech will be loaded. :rtype: List of strings .. versionadded:: 0.7.0 """ return [syn.definition() for syn in self.get_synsets(pos=pos)] class WordList(list): """A list-like collection of words.""" def __init__(self, collection): """Initialize a WordList. Takes a collection of strings as its only argument. """ self._collection = [Word(w) for w in collection] super(WordList, self).__init__(self._collection) def __str__(self): return str(self._collection) def __repr__(self): """Returns a string representation for debugging.""" class_name = self.__class__.__name__ return '{cls}({lst})'.format(cls=class_name, lst=repr(self._collection)) def __getitem__(self, key): """Returns a string at the given index.""" if isinstance(key, slice): return self.__class__(self._collection[key]) else: return self._collection[key] def __getslice__(self, i, j): # This is included for Python 2.* compatibility return self.__class__(self._collection[i:j]) def __iter__(self): return iter(self._collection) def count(self, strg, case_sensitive=False, *args, **kwargs): """Get the count of a word or phrase `s` within this WordList. :param strg: The string to count. :param case_sensitive: A boolean, whether or not the search is case-sensitive. """ if not case_sensitive: return [word.lower() for word in self].count(strg.lower(), *args, **kwargs) return self._collection.count(strg, *args, **kwargs) def append(self, obj): """Append an object to end. If the object is a string, appends a :class:`Word <Word>` object. """ if isinstance(obj, basestring): return self._collection.append(Word(obj)) else: return self._collection.append(obj) def extend(self, iterable): """Extend WordList by appending elements from ``iterable``. If an element is a string, appends a :class:`Word <Word>` object. """ [self._collection.append(Word(e) if isinstance(e, basestring) else e) for e in iterable] return self def upper(self): """Return a new WordList with each word upper-cased.""" return self.__class__([word.upper() for word in self]) def lower(self): """Return a new WordList with each word lower-cased.""" return self.__class__([word.lower() for word in self]) def singularize(self): """Return the single version of each word in this WordList.""" return self.__class__([word.singularize() for word in self]) def pluralize(self): """Return the plural version of each word in this WordList.""" return self.__class__([word.pluralize() for word in self]) def lemmatize(self): """Return the lemma of each word in this WordList.""" return self.__class__([word.lemmatize() for word in self]) def stem(self, *args, **kwargs): """Return the stem for each word in this WordList.""" return self.__class__([word.stem(*args, **kwargs) for word in self]) def _validated_param(obj, name, base_class, default, base_class_name=None): """Validates a parameter passed to __init__. Makes sure that obj is the correct class. Return obj if it's not None or falls back to default :param obj: The object passed in. :param name: The name of the parameter. :param base_class: The class that obj must inherit from. :param default: The default object to fall back upon if obj is None. """ base_class_name = base_class_name if base_class_name else base_class.__name__ if obj is not None and not isinstance(obj, base_class): raise ValueError('{name} must be an instance of {cls}' .format(name=name, cls=base_class_name)) return obj or default def _initialize_models(obj, tokenizer, pos_tagger, np_extractor, analyzer, parser, classifier): """Common initialization between BaseBlob and Blobber classes.""" # tokenizer may be a textblob or an NLTK tokenizer obj.tokenizer = _validated_param(tokenizer, "tokenizer", base_class=(BaseTokenizer, nltk.tokenize.api.TokenizerI), default=BaseBlob.tokenizer, base_class_name="BaseTokenizer") obj.np_extractor = _validated_param(np_extractor, "np_extractor", base_class=BaseNPExtractor, default=BaseBlob.np_extractor) obj.pos_tagger = _validated_param(pos_tagger, "pos_tagger", BaseTagger, BaseBlob.pos_tagger) obj.analyzer = _validated_param(analyzer, "analyzer", BaseSentimentAnalyzer, BaseBlob.analyzer) obj.parser = _validated_param(parser, "parser", BaseParser, BaseBlob.parser) obj.classifier = classifier class BaseBlob(StringlikeMixin, BlobComparableMixin): """An abstract base class that all textblob classes will inherit from. Includes words, POS tag, NP, and word count properties. Also includes basic dunder and string methods for making objects like Python strings. :param text: A string. :param tokenizer: (optional) A tokenizer instance. If ``None``, defaults to :class:`WordTokenizer() <textblob.tokenizers.WordTokenizer>`. :param np_extractor: (optional) An NPExtractor instance. If ``None``, defaults to :class:`FastNPExtractor() <textblob.en.np_extractors.FastNPExtractor>`. :param pos_tagger: (optional) A Tagger instance. If ``None``, defaults to :class:`NLTKTagger <textblob.en.taggers.NLTKTagger>`. :param analyzer: (optional) A sentiment analyzer. If ``None``, defaults to :class:`PatternAnalyzer <textblob.en.sentiments.PatternAnalyzer>`. :param parser: A parser. If ``None``, defaults to :class:`PatternParser <textblob.en.parsers.PatternParser>`. :param classifier: A classifier. .. versionchanged:: 0.6.0 ``clean_html`` parameter deprecated, as it was in NLTK. """ np_extractor = FastNPExtractor() pos_tagger = NLTKTagger() tokenizer = WordTokenizer() translator = Translator() analyzer = PatternAnalyzer() parser = PatternParser() def __init__(self, text, tokenizer=None, pos_tagger=None, np_extractor=None, analyzer=None, parser=None, classifier=None, clean_html=False): if not isinstance(text, basestring): raise TypeError('The `text` argument passed to `__init__(text)` ' 'must be a string, not {0}'.format(type(text))) if clean_html: raise NotImplementedError("clean_html has been deprecated. " "To remove HTML markup, use BeautifulSoup's " "get_text() function") self.raw = self.string = text self.stripped = lowerstrip(self.raw, all=True) _initialize_models(self, tokenizer, pos_tagger, np_extractor, analyzer, parser, classifier) @cached_property def words(self): """Return a list of word tokens. This excludes punctuation characters. If you want to include punctuation characters, access the ``tokens`` property. :returns: A :class:`WordList <WordList>` of word tokens. """ return WordList(word_tokenize(self.raw, include_punc=False)) @cached_property def tokens(self): """Return a list of tokens, using this blob's tokenizer object (defaults to :class:`WordTokenizer <textblob.tokenizers.WordTokenizer>`). """ return WordList(self.tokenizer.tokenize(self.raw)) def tokenize(self, tokenizer=None): """Return a list of tokens, using ``tokenizer``. :param tokenizer: (optional) A tokenizer object. If None, defaults to this blob's default tokenizer. """ t = tokenizer if tokenizer is not None else self.tokenizer return WordList(t.tokenize(self.raw)) def parse(self, parser=None): """Parse the text. :param parser: (optional) A parser instance. If ``None``, defaults to this blob's default parser. .. versionadded:: 0.6.0 """ p = parser if parser is not None else self.parser return p.parse(self.raw) def classify(self): """Classify the blob using the blob's ``classifier``.""" if self.classifier is None: raise NameError("This blob has no classifier. Train one first!") return self.classifier.classify(self.raw) @cached_property def sentiment(self): """Return a tuple of form (polarity, subjectivity ) where polarity is a float within the range [-1.0, 1.0] and subjectivity is a float within the range [0.0, 1.0] where 0.0 is very objective and 1.0 is very subjective. :rtype: namedtuple of the form ``Sentiment(polarity, subjectivity)`` """ return self.analyzer.analyze(self.raw) @cached_property def sentiment_assessments(self): """Return a tuple of form (polarity, subjectivity, assessments ) where polarity is a float within the range [-1.0, 1.0], subjectivity is a float within the range [0.0, 1.0] where 0.0 is very objective and 1.0 is very subjective, and assessments is a list of polarity and subjectivity scores for the assessed tokens. :rtype: namedtuple of the form ``Sentiment(polarity, subjectivity, assessments)`` """ return self.analyzer.analyze(self.raw, keep_assessments=True) @cached_property def polarity(self): """Return the polarity score as a float within the range [-1.0, 1.0] :rtype: float """ return PatternAnalyzer().analyze(self.raw)[0] @cached_property def subjectivity(self): """Return the subjectivity score as a float within the range [0.0, 1.0] where 0.0 is very objective and 1.0 is very subjective. :rtype: float """ return PatternAnalyzer().analyze(self.raw)[1] @cached_property def noun_phrases(self): """Returns a list of noun phrases for this blob.""" return WordList([phrase.strip().lower() for phrase in self.np_extractor.extract(self.raw) if len(phrase) > 1]) @cached_property def pos_tags(self): """Returns an list of tuples of the form (word, POS tag). Example: :: [('At', 'IN'), ('eight', 'CD'), ("o'clock", 'JJ'), ('on', 'IN'), ('Thursday', 'NNP'), ('morning', 'NN')] :rtype: list of tuples """ if isinstance(self, TextBlob): return [val for sublist in [s.pos_tags for s in self.sentences] for val in sublist] else: return [(Word(word, pos_tag=t), unicode(t)) for word, t in self.pos_tagger.tag(self) if not PUNCTUATION_REGEX.match(unicode(t))] tags = pos_tags @cached_property def word_counts(self): """Dictionary of word frequencies in this text. """ counts = defaultdict(int) stripped_words = [lowerstrip(word) for word in self.words] for word in stripped_words: counts[word] += 1 return counts @cached_property def np_counts(self): """Dictionary of noun phrase frequencies in this text. """ counts = defaultdict(int) for phrase in self.noun_phrases: counts[phrase] += 1 return counts def ngrams(self, n=3): """Return a list of n-grams (tuples of n successive words) for this blob. :rtype: List of :class:`WordLists <WordList>` """ if n <= 0: return [] grams = [WordList(self.words[i:i + n]) for i in range(len(self.words) - n + 1)] return grams def translate(self, from_lang="auto", to="en"): """Translate the blob to another language. Uses the Google Translate API. Returns a new TextBlob. Requires an internet connection. Usage: :: >>> b = TextBlob("Simple is better than complex") >>> b.translate(to="es") TextBlob('Lo simple es mejor que complejo') Language code reference: https://developers.google.com/translate/v2/using_rest#language-params .. versionadded:: 0.5.0. :param str from_lang: Language to translate from. If ``None``, will attempt to detect the language. :param str to: Language to translate to. :rtype: :class:`BaseBlob <BaseBlob>` """ return self.__class__(self.translator.translate(self.raw, from_lang=from_lang, to_lang=to)) def detect_language(self): """Detect the blob's language using the Google Translate API. Requires an internet connection. Usage: :: >>> b = TextBlob("bonjour") >>> b.detect_language() u'fr' Language code reference: https://developers.google.com/translate/v2/using_rest#language-params .. versionadded:: 0.5.0 :rtype: str """ return self.translator.detect(self.raw) def correct(self): """Attempt to correct the spelling of a blob. .. versionadded:: 0.6.0 :rtype: :class:`BaseBlob <BaseBlob>` """ # regex matches: word or punctuation or whitespace tokens = nltk.tokenize.regexp_tokenize(self.raw, "\w+|[^\w\s]|\s") corrected = (Word(w).correct() for w in tokens) ret = ''.join(corrected) return self.__class__(ret) def _cmpkey(self): """Key used by ComparableMixin to implement all rich comparison operators. """ return self.raw def _strkey(self): """Key used by StringlikeMixin to implement string methods.""" return self.raw def __hash__(self): return hash(self._cmpkey()) def __add__(self, other): '''Concatenates two text objects the same way Python strings are concatenated. Arguments: - `other`: a string or a text object ''' if isinstance(other, basestring): return self.__class__(self.raw + other) elif isinstance(other, BaseBlob): return self.__class__(self.raw + other.raw) else: raise TypeError('Operands must be either strings or {0} objects' .format(self.__class__.__name__)) def split(self, sep=None, maxsplit=sys.maxsize): """Behaves like the built-in str.split() except returns a WordList. :rtype: :class:`WordList <WordList>` """ return WordList(self._strkey().split(sep, maxsplit)) class TextBlob(BaseBlob): """A general text block, meant for larger bodies of text (esp. those containing sentences). Inherits from :class:`BaseBlob <BaseBlob>`. :param str text: A string. :param tokenizer: (optional) A tokenizer instance. If ``None``, defaults to :class:`WordTokenizer() <textblob.tokenizers.WordTokenizer>`. :param np_extractor: (optional) An NPExtractor instance. If ``None``, defaults to :class:`FastNPExtractor() <textblob.en.np_extractors.FastNPExtractor>`. :param pos_tagger: (optional) A Tagger instance. If ``None``, defaults to :class:`NLTKTagger <textblob.en.taggers.NLTKTagger>`. :param analyzer: (optional) A sentiment analyzer. If ``None``, defaults to :class:`PatternAnalyzer <textblob.en.sentiments.PatternAnalyzer>`. :param classifier: (optional) A classifier. """ @cached_property def sentences(self): """Return list of :class:`Sentence <Sentence>` objects.""" return self._create_sentence_objects() @cached_property def words(self): """Return a list of word tokens. This excludes punctuation characters. If you want to include punctuation characters, access the ``tokens`` property. :returns: A :class:`WordList <WordList>` of word tokens. """ return WordList(word_tokenize(self.raw, include_punc=False)) @property def raw_sentences(self): """List of strings, the raw sentences in the blob.""" return [sentence.raw for sentence in self.sentences] @property def serialized(self): """Returns a list of each sentence's dict representation.""" return [sentence.dict for sentence in self.sentences] def to_json(self, *args, **kwargs): '''Return a json representation (str) of this blob. Takes the same arguments as json.dumps. .. versionadded:: 0.5.1 ''' return json.dumps(self.serialized, *args, **kwargs) @property def json(self): '''The json representation of this blob. .. versionchanged:: 0.5.1 Made ``json`` a property instead of a method to restore backwards compatibility that was broken after version 0.4.0. ''' return self.to_json() def _create_sentence_objects(self): '''Returns a list of Sentence objects from the raw text. ''' sentence_objects = [] sentences = sent_tokenize(self.raw) char_index = 0 # Keeps track of character index within the blob for sent in sentences: # Compute the start and end indices of the sentence # within the blob start_index = self.raw.index(sent, char_index) char_index += len(sent) end_index = start_index + len(sent) # Sentences share the same models as their parent blob s = Sentence(sent, start_index=start_index, end_index=end_index, tokenizer=self.tokenizer, np_extractor=self.np_extractor, pos_tagger=self.pos_tagger, analyzer=self.analyzer, parser=self.parser, classifier=self.classifier) sentence_objects.append(s) return sentence_objects class Sentence(BaseBlob): """A sentence within a TextBlob. Inherits from :class:`BaseBlob <BaseBlob>`. :param sentence: A string, the raw sentence. :param start_index: An int, the index where this sentence begins in a TextBlob. If not given, defaults to 0. :param end_index: An int, the index where this sentence ends in a TextBlob. If not given, defaults to the length of the sentence - 1. """ def __init__(self, sentence, start_index=0, end_index=None, *args, **kwargs): super(Sentence, self).__init__(sentence, *args, **kwargs) #: The start index within a TextBlob self.start = self.start_index = start_index #: The end index within a textBlob self.end = self.end_index = end_index or len(sentence) - 1 @property def dict(self): '''The dict representation of this sentence.''' return { 'raw': self.raw, 'start_index': self.start_index, 'end_index': self.end_index, 'stripped': self.stripped, 'noun_phrases': self.noun_phrases, 'polarity': self.polarity, 'subjectivity': self.subjectivity, } class Blobber(object): """A factory for TextBlobs that all share the same tagger, tokenizer, parser, classifier, and np_extractor. Usage: >>> from textblob import Blobber >>> from textblob.taggers import NLTKTagger >>> from textblob.tokenizers import SentenceTokenizer >>> tb = Blobber(pos_tagger=NLTKTagger(), tokenizer=SentenceTokenizer()) >>> blob1 = tb("This is one blob.") >>> blob2 = tb("This blob has the same tagger and tokenizer.") >>> blob1.pos_tagger is blob2.pos_tagger True :param tokenizer: (optional) A tokenizer instance. If ``None``, defaults to :class:`WordTokenizer() <textblob.tokenizers.WordTokenizer>`. :param np_extractor: (optional) An NPExtractor instance. If ``None``, defaults to :class:`FastNPExtractor() <textblob.en.np_extractors.FastNPExtractor>`. :param pos_tagger: (optional) A Tagger instance. If ``None``, defaults to :class:`NLTKTagger <textblob.en.taggers.NLTKTagger>`. :param analyzer: (optional) A sentiment analyzer. If ``None``, defaults to :class:`PatternAnalyzer <textblob.en.sentiments.PatternAnalyzer>`. :param parser: A parser. If ``None``, defaults to :class:`PatternParser <textblob.en.parsers.PatternParser>`. :param classifier: A classifier. .. versionadded:: 0.4.0 """ np_extractor = FastNPExtractor() pos_tagger = NLTKTagger() tokenizer = WordTokenizer() analyzer = PatternAnalyzer() parser = PatternParser() def __init__(self, tokenizer=None, pos_tagger=None, np_extractor=None, analyzer=None, parser=None, classifier=None): _initialize_models(self, tokenizer, pos_tagger, np_extractor, analyzer, parser, classifier) def __call__(self, text): """Return a new TextBlob object with this Blobber's ``np_extractor``, ``pos_tagger``, ``tokenizer``, ``analyzer``, and ``classifier``. :returns: A new :class:`TextBlob <TextBlob>`. """ return TextBlob(text, tokenizer=self.tokenizer, pos_tagger=self.pos_tagger, np_extractor=self.np_extractor, analyzer=self.analyzer, parser=self.parser, classifier=self.classifier) def __repr__(self): classifier_name = self.classifier.__class__.__name__ + "()" if self.classifier else "None" return ("Blobber(tokenizer={0}(), pos_tagger={1}(), " "np_extractor={2}(), analyzer={3}(), parser={4}(), classifier={5})")\ .format(self.tokenizer.__class__.__name__, self.pos_tagger.__class__.__name__, self.np_extractor.__class__.__name__, self.analyzer.__class__.__name__, self.parser.__class__.__name__, classifier_name) __str__ = __repr__
bdc878fa3955dd1fde5a6902a51dd5a0b6813972
4804802271994dc54b8524ab6b22e0ef1dd68dec
/demo/__init__.py
ebc62bbce25fdbfe61be14e55801f8e954806625
[]
no_license
addron/Django_study
3393185dd7d6c65445670553e63fe600a36fdfa6
885f6e49b4c068090ab443b61723bf9f3e65040c
refs/heads/master
2020-04-16T09:15:25.774771
2019-01-15T12:50:42
2019-01-15T12:50:42
165,454,869
0
0
null
null
null
null
UTF-8
Python
false
false
116
py
# 让Django的ORM能以mysqldb的方式来调用PyMySQL from pymysql import install_as_MySQLdb install_as_MySQLdb()
516e94a8bd08fc5bf28a46f5f4ec6ff8a5dce10d
e7164d44058a06331c034cc17eefe1521d6c95a2
/tools/tieba/urate/python-wrapper/melt_predict.py
3805c92f412dc70ce42b65f942925d6170288160
[]
no_license
chenghuige/gezi
fbc1e655396fbc365fffacc10409d35d20e3952c
4fc8f9a3c5837e8add720bf6954a4f52abfff8b5
refs/heads/master
2021-01-20T01:57:18.362413
2016-11-08T15:34:07
2016-11-08T15:34:07
101,304,774
0
3
null
null
null
null
UTF-8
Python
false
false
2,988
py
import os import sys import glob from pyplusplus import module_builder root = '/home/users/chenghuige/rsc/' name = 'melt_predict' #define_symbols = ['GCCXML','PYTHON_WRAPPER','NO_BAIDU_DEP'] define_symbols = ['GCCXML','PYTHON_WRAPPER'] files = [ './gezi.include.python/common_util.h', './include.python/MLCore/Predictor.h', './include.python/MLCore/TextPredictor.h', './gezi.include.python/Identifer.h', './include.python/MLCore/PredictorFactory.h', './gezi.include.python/Numeric/Vector/Vector.h', './gezi.include.python/Numeric/Vector/WeightVector.h', './gezi.include.python/Numeric/Vector/vector_util.h', './gezi.include.python/feature/FeatureVector.h', './gezi.include.python/feature/features_util.h', './gezi.include.python/feature/FeatureExtractor.h', './gezi.include.python/feature/FeaturesExtractorMgr.h', ] paths = [ #'./gezi.include.python/Numeric/Vector/', #'./include.python/MLCore/', #'./include.python/Prediction/Instances/', ] #import gezi #for path in paths: # files += [f for f in gezi.get_filepaths(path) if f.endswith('.h')] include_paths=[ 'third-64/glog', 'third-64/gflags', 'third-64/gtest', 'third-64/boost.1.53', 'lib2-64/bsl', 'lib2-64/postag', 'lib2-64/dict', 'lib2-64/libcrf', 'lib2-64/others-ex', 'lib2-64/ullib', 'lib2-64/ccode', 'public/odict/output', 'public/uconv/output', 'public/configure/output', 'app/search/sep/anti-spam/gezi/third/rabit', ] include_paths_python = [ 'app/search/sep/anti-spam/melt/python-wrapper', ] include_paths_obsolute = [ 'app/search/sep/anti-spam/melt/python-wrapper/gezi.include.python', 'lib2-64/wordseg', 'public/comlog-plugin', 'app/search/sep/anti-spam/gezi/third', ] mb = module_builder.module_builder_t( gccxml_path = '~/.jumbo/bin/gccxml', define_symbols = define_symbols, files = files, include_paths = [root + f + '/include' for f in include_paths] + [root + f + '/include.python' for f in include_paths_python] + [root + f for f in include_paths_obsolute] ) mb.build_code_creator( module_name='lib%s'%name ) mb.code_creator.user_defined_directories.append( os.path.abspath('.') ) mb.write_module( os.path.join( os.path.abspath('./'), '%s_py.cc'%name) )
483e5aadac9f2d40958e1167b76c220a451edcaf
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p02403/s798093890.py
f1e8cb17072b4b550e16ce6c68a1df20760dc0e6
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
173
py
import sys x=y=1 while True: x,y=map(int,input().split()) if x==0: break for i in range (1,x+1): for j in range (1,y+1): sys.stdout.write('#') print('') print('')
38f127795cd68bfa908c221ad7402f05e9488ba3
bcdb7d8ca25854eaa7f1177504b2f9531872daef
/mvpenv/bin/gunicorn
0a06c55ea79d0ecb76a532945306cb06a92244bb
[]
no_license
rajinder-esfera/fashion-circle
ba9d8d1612821d9076958a9699a9f9d794ac4f67
e7876589a19f7f92f95bdb5cb20f07a2b272c2cd
refs/heads/master
2022-12-12T02:42:59.714131
2018-01-29T12:23:05
2018-01-29T12:23:05
117,091,007
0
2
null
2022-11-18T10:15:01
2018-01-11T11:25:06
Python
UTF-8
Python
false
false
241
#!/var/www/html/mvp/mvpenv/bin/python3 # -*- coding: utf-8 -*- import re import sys from gunicorn.app.wsgiapp import run if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) sys.exit(run())
acaa717cc2fa9068d3df78207efae985e278cb90
ab3d5ea4bf0e48914ed14fcf16e5b1d752f199ba
/pcg_libraries/src/pcg_gazebo/parsers/sdf/type.py
cb1a1eed44a58d4c93e52ce2c604b9f40f8ec665
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference", "CC0-1.0", "MIT", "BSD-3-Clause", "LicenseRef-scancode-public-domain" ]
permissive
boschresearch/pcg_gazebo_pkgs
5f1004d0de874d4d1abc4eb695777013027158b2
1c112d01847ca4f8da61ce9b273e13d13bc7eb73
refs/heads/master
2020-06-11T06:28:36.228431
2020-02-07T13:05:28
2020-02-07T13:05:28
193,876,180
44
3
NOASSERTION
2020-02-07T12:00:55
2019-06-26T09:45:05
Python
UTF-8
Python
false
false
862
py
# Copyright (c) 2019 - The Procedural Generation for Gazebo authors # For information on the respective copyright owner see the NOTICE file # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from ..types import XMLString class Type(XMLString): _NAME = 'type' _TYPE = 'sdf' def __init__(self, default=''): XMLString.__init__(self, default)
0e29b7eea6f44cbedda24a54fc2477b33a4ff5fb
62166473c5b237bbe97c7abcafd2623a1bfc3a77
/linked_list_queue.py
e2b18155e138b3ebc3ba7bd14cca4538c8bf0f96
[]
no_license
ananyajana/practice_problems
bc70bd55b9012fa7fafc2037ea606bc34bd63cad
0071a8a3753c8a9135c21fecf6b175ee3774c177
refs/heads/master
2023-05-15T01:23:35.146811
2021-06-11T21:27:40
2021-06-11T21:27:40
109,902,077
1
1
null
null
null
null
UTF-8
Python
false
false
975
py
# problems from geeksforgeeks must do coding questions T = int(input()) st_list =[] N_list = [] for t in range(T): N_list.append(int(input())) st_list.append(input()) class Node: # fn to initialize the node object def __init__(self, data): self.data = data self.next = None class MyQueue: def __init__(self): self.front = None # method to add an item to the queue def push(self, item): node = Node(item) if self.front is None: self.front = node else: temp = self.front while temp.next is not None: temp = temp.next temp.next = node # method to remove an item from the queue def pop(self): if self.front is None: return -1 else: temp = self.front self.front = self.front.next return temp.data for t in range(T): s1 = st_list[t] n = N_list[t]
280d995579a3904e0a74306a09310df360636dd4
163bbb4e0920dedd5941e3edfb2d8706ba75627d
/Code/CodeRecords/2182/61406/303898.py
0fcb1dffc2380878a68d6135d58d8e3a3cb4ebba
[]
no_license
AdamZhouSE/pythonHomework
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
ffc5606817a666aa6241cfab27364326f5c066ff
refs/heads/master
2022-11-24T08:05:22.122011
2020-07-28T16:21:24
2020-07-28T16:21:24
259,576,640
2
1
null
null
null
null
UTF-8
Python
false
false
569
py
T = int(input()) for a in range(0,T): nk = input().split(' ') n = int(nk[0]) k = int(nk[1]) circle = [] for b in range(0,n): circle.append(1) ptr = 0 count = 0 m=0 while count<n-1: if ptr>n-1: ptr = ptr-n while circle[ptr]==-1: ptr+=1 if ptr > n - 1: ptr = ptr - n if circle[ptr]==1: m += 1 if m==k: circle[ptr]=-1 count+=1 m=0 ptr+=1 print(circle.index(1)+1)
9eacff23f758535c87a7687002c988a9cad9bb7d
db575f3401a5e25494e30d98ec915158dd7e529b
/BIO_Stocks/RZLT.py
79c2560d453f46f7fd8351357550d6986f2de632
[]
no_license
andisc/StockWebScraping
b10453295b4b16f065064db6a1e3bbcba0d62bad
41db75e941cfccaa7043a53b0e23ba6e5daa958a
refs/heads/main
2023-08-08T01:33:33.495541
2023-07-22T21:41:08
2023-07-22T21:41:08
355,332,230
0
0
null
null
null
null
UTF-8
Python
false
false
2,097
py
import requests from lxml import html from bs4 import BeautifulSoup import os from datetime import date, datetime from ValidationTools import validateday from Database_Connections import InsertData, Insert_Logging def main(id_control): try: url = 'https://www.rezolutebio.com/news/press-releases' headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36'} result = requests.get(url, headers=headers) #print(result.content.decode()) html_content = result.content.decode() soup = BeautifulSoup(html_content, 'html.parser') #print(soup) articles_panel = soup.find('ul', attrs={'class':'news'}) articles = articles_panel.findAll('li') # get first article FIRST_ARTICLE = articles[0] article_date = FIRST_ARTICLE.find('time') article_desc = FIRST_ARTICLE.find('a') v_article_date = article_date.text.lstrip().rstrip() #if the process find any article with the today date istoday, v_art_date = validateday(v_article_date) if (istoday == True): v_ticker = os.path.basename(__file__).replace(".py", "") v_url = article_desc.get('href') v_description = article_desc.text.lstrip().rstrip() now = datetime.now() print("URL: " + v_url) print("DESCRIPTION: " + v_description) print("ARTICLE_DATE: " + str(now)) # Insert articles if "https://" in v_url: InsertData(v_ticker, v_description, v_url, v_art_date) else: InsertData(v_ticker, v_description, url, v_art_date) except Exception: error_message = "Entrou na excepção ao tratar " + os.path.basename(__file__) + "..." print(error_message) Insert_Logging(id_control, 'Detail', error_message) pass #InsertData() if __name__ == "__main__": main()
75ad1cf37bc5ac16b62c19cd737a317a998c3347
f47ac8d59fe1c0f807d699fe5b5991ed3662bfdb
/binary23.py
6f1107467849f636bfca3fa944aa05d18d2fb699
[]
no_license
YanglanWang/jianzhi_offer
5561d8a29881d8504b23446353e9f969c01ed0c5
1c568f399ed6ac1017671c40c765e609c1b6d178
refs/heads/master
2020-06-16T10:41:44.979558
2019-08-03T09:07:37
2019-08-03T09:07:37
195,543,754
0
0
null
null
null
null
UTF-8
Python
false
false
1,290
py
class Solution: def VerifySquenceOfBST(self, sequence): # write code here if len(sequence) == 0: return False if len(sequence) == 1: return True root = sequence[-1] for i in range(len(sequence)): if sequence[i] > sequence[-1]: break k = i for j in range(i,len(sequence)-1): if sequence[j]<sequence[-1]: return False # if k==len(sequence)-1: # if sequence[k - 1] > sequence[-1]: # return False # else: # return self.VerifySquenceOfBST(sequence[:k]) # elif k==0: # if sequence[-2] < sequence[-1]: # return False # else: # return self.VerifySquenceOfBST(sequence[:-1]) # else: # if sequence[k - 1] > sequence[-1] or sequence[-2] < sequence[-1]: # return False left=True right=True if k>0: left=self.VerifySquenceOfBST(sequence[:k]) if k<len(sequence)-1: right=self.VerifySquenceOfBST(sequence[k:-1]) return left and right a=Solution() # b=a.VerifySquenceOfBST([4,6,7,5]) b=a.VerifySquenceOfBST([4,3,5,8,10,9,7]) print(b)
f288784a0437bf94488c1a422eb9e0f0ca36f3e1
b5a9d42f7ea5e26cd82b3be2b26c324d5da79ba1
/tensorflow/python/data/kernel_tests/repeat_test.py
88e83da5bc27a9aeaa9b63ef9e314e6f97cb074f
[ "Apache-2.0" ]
permissive
uve/tensorflow
e48cb29f39ed24ee27e81afd1687960682e1fbef
e08079463bf43e5963acc41da1f57e95603f8080
refs/heads/master
2020-11-29T11:30:40.391232
2020-01-11T13:43:10
2020-01-11T13:43:10
230,088,347
0
0
Apache-2.0
2019-12-25T10:49:15
2019-12-25T10:49:14
null
UTF-8
Python
false
false
3,445
py
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for `tf.data.Dataset.repeat()`.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.data.kernel_tests import test_base from tensorflow.python.data.ops import dataset_ops from tensorflow.python.framework import test_util from tensorflow.python.platform import test @test_util.run_all_in_graph_and_eager_modes class RepeatTest(test_base.DatasetTestBase): def testRepeatTensorDataset(self): """Test a dataset that repeats its input multiple times.""" components = (np.array(1), np.array([1, 2, 3]), np.array(37.0)) # This placeholder can be fed when dataset-definition subgraph # runs (i.e. `init_op` below) to configure the number of # repetitions used in a particular iterator. def do_test(count): dataset = dataset_ops.Dataset.from_tensors(components).repeat(count) self.assertEqual( [c.shape for c in components], [shape for shape in dataset_ops.get_legacy_output_shapes(dataset)]) self.assertDatasetProduces(dataset, [components] * count) # Test a finite repetition. do_test(3) # test a different finite repetition. do_test(7) # Test an empty repetition. do_test(0) # Test an infinite repetition. # NOTE(mrry): There's not a good way to test that the sequence # actually is infinite. dataset = dataset_ops.Dataset.from_tensors(components).repeat(-1) self.assertEqual( [c.shape for c in components], [shape for shape in dataset_ops.get_legacy_output_shapes(dataset)]) get_next = self.getNext(dataset) for _ in range(17): results = self.evaluate(get_next()) for component, result_component in zip(components, results): self.assertAllEqual(component, result_component) def testRepeatRepeatTensorDataset(self): """Test the composition of repeat datasets.""" components = (np.array(1), np.array([1, 2, 3]), np.array(37.0)) inner_count, outer_count = 7, 14 dataset = dataset_ops.Dataset.from_tensors(components).repeat( inner_count).repeat(outer_count) self.assertEqual( [c.shape for c in components], [shape for shape in dataset_ops.get_legacy_output_shapes(dataset)]) self.assertDatasetProduces(dataset, [components] * (inner_count * outer_count)) def testRepeatEmptyDataset(self): """Test that repeating an empty dataset does not hang.""" dataset = dataset_ops.Dataset.from_tensors(0).repeat(10).skip(10).repeat(-1) self.assertDatasetProduces(dataset, []) if __name__ == "__main__": test.main()
b3e56b32c4b8350754af374e43a7c8207d17fd73
f0f56524d54b924eda0bc1abcc386589ccf2a026
/dittodemo/settings/defaults.py
40f9289fd1b893ec30d6ee20799ba3c67e0154ac
[ "MIT" ]
permissive
vanderwal/django-ditto-demo
052d116419b8f67db40cd7a71b793bd702672c0f
abb251d44de48191b32ef54768f638920d39d081
refs/heads/master
2022-02-03T21:07:14.860800
2017-02-09T13:50:36
2017-02-09T13:50:36
66,157,322
0
0
null
2016-08-20T16:43:14
2016-08-20T16:43:13
null
UTF-8
Python
false
false
5,235
py
""" Django settings for dittodemo project. Generated by 'django-admin startproject' using Django 1.9.7. For more information on this file, see https://docs.djangoproject.com/en/1.9/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ """ import os from os import environ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Make this unique, and don't share it with anybody. # http://www.miniwebtool.com/django-secret-key-generator/ SECRET_KEY = environ.get('SECRET_KEY', '') # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False ADMINS = [ ('Phil Gyford', '[email protected]'), ] MANAGERS = ADMINS # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', # Our Django app. 'demo', # For Django Ditto. 'imagekit', 'sortedm2m', 'taggit', 'ditto.core', 'ditto.flickr', 'ditto.lastfm', 'ditto.pinboard', 'ditto.twitter', ] MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'dittodemo.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', 'ditto.core.context_processors.ditto', ], }, }, ] WSGI_APPLICATION = 'dittodemo.wsgi.application' # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME': environ.get('DB_NAME'), 'USER': environ.get('DB_USERNAME'), 'PASSWORD': environ.get('DB_PASSWORD'), 'HOST': environ.get('DB_HOST'), 'PORT': '', } } # Password validation # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'en-gb' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True USE_THOUSAND_SEPARATOR = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATIC_URL = '/static/' STATIC_ROOT = os.path.join(BASE_DIR, 'static_collected/') # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://media.lawrence.com/media/", "http://example.com/media/" MEDIA_URL = '/media/' # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/home/media/media.lawrence.com/media/" MEDIA_ROOT = os.path.join(BASE_DIR, 'media/') ALLOWED_HOSTS = environ.get('ALLOWED_HOSTS', '*').split(',') CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache', 'LOCATION': '127.0.0.1:11211', 'TIMEOUT': 500, # milliseconds } } # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 error when DEBUG=False. # See http://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } } # DJANGO DITTO SETTINGS DITTO_FLICKR_DIR_BASE = 'flickr' DITTO_FLICKR_DIR_PHOTOS_FORMAT = '%Y/%m/%d' DITTO_FLICKR_USE_LOCAL_MEDIA = False DITTO_TWITTER_DIR_BASE = 'twitter' DITTO_TWITTER_USE_LOCAL_MEDIA = False
916e183c70b1243b4a91f925cfb582f468642add
c8cd63041471e7a20bf3a15c3ca96b7573f5f727
/load_test_parser.py
2aa9ecfacbaac77a83ac5488be6c85d639cefd4b
[]
no_license
ansonmiu0214/airbus_prototype
a992b9a04ba35da54088ff399975aac4efc7046a
0647ec3cee330e4c58a40e10a946e57478e316ad
refs/heads/master
2020-04-26T09:24:12.565650
2019-03-09T11:42:39
2019-03-09T11:42:39
173,453,998
0
1
null
null
null
null
UTF-8
Python
false
false
541
py
#!/bin/python3 if __name__ == "__main__": with open('output_2.txt', 'r') as f: lines = f.readlines() relevant = [line.strip().split('Current latency')[1] for line in lines if line.startswith('Current latency')] relevant = list(map(float, map(lambda x: x.strip(), relevant))) max_latency = max(relevant) min_latency = min(relevant) avg_latency = sum(relevant) / len(relevant) print("Max latency (ms):", max_latency) print("Min latency (ms):", min_latency) print("Avg latency (ms):", avg_latency)
56d0e51585b52f3fa20e1d49f5c8d541621a07ec
85a9ffeccb64f6159adbd164ff98edf4ac315e33
/pysnmp-with-texts/HUAWEI-PWE3-MIB.py
3e123a0d7702a64554c396c962cbbcdad8e83ff7
[ "Apache-2.0", "LicenseRef-scancode-warranty-disclaimer", "LicenseRef-scancode-proprietary-license", "LicenseRef-scancode-unknown-license-reference" ]
permissive
agustinhenze/mibs.snmplabs.com
5d7d5d4da84424c5f5a1ed2752f5043ae00019fb
1fc5c07860542b89212f4c8ab807057d9a9206c7
refs/heads/master
2020-12-26T12:41:41.132395
2019-08-16T15:51:41
2019-08-16T15:53:57
237,512,469
0
0
Apache-2.0
2020-01-31T20:41:36
2020-01-31T20:41:35
null
UTF-8
Python
false
false
126,657
py
# # PySNMP MIB module HUAWEI-PWE3-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HUAWEI-PWE3-MIB # Produced by pysmi-0.3.4 at Wed May 1 13:45:54 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint", "ValueRangeConstraint") hwDatacomm, = mibBuilder.importSymbols("HUAWEI-MIB", "hwDatacomm") HWL2VpnVcEncapsType, HWEnableValue, HWL2VpnStateChangeReason = mibBuilder.importSymbols("HUAWEI-VPLS-EXT-MIB", "HWL2VpnVcEncapsType", "HWEnableValue", "HWL2VpnStateChangeReason") InterfaceIndexOrZero, ifName = mibBuilder.importSymbols("IF-MIB", "InterfaceIndexOrZero", "ifName") InetAddressType, = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType") EnabledStatus, = mibBuilder.importSymbols("P-BRIDGE-MIB", "EnabledStatus") NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance") sysUpTime, = mibBuilder.importSymbols("SNMPv2-MIB", "sysUpTime") iso, NotificationType, Integer32, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, ModuleIdentity, Counter64, Bits, TimeTicks, Gauge32, Bits, ObjectIdentity, MibIdentifier, Unsigned32, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "NotificationType", "Integer32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "ModuleIdentity", "Counter64", "Bits", "TimeTicks", "Gauge32", "Bits", "ObjectIdentity", "MibIdentifier", "Unsigned32", "Counter32") DisplayString, TextualConvention, RowStatus, TruthValue = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention", "RowStatus", "TruthValue") hwL2VpnPwe3 = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4)) if mibBuilder.loadTexts: hwL2VpnPwe3.setLastUpdated('200704120900Z') if mibBuilder.loadTexts: hwL2VpnPwe3.setOrganization('Huawei Technologies Co., Ltd.') if mibBuilder.loadTexts: hwL2VpnPwe3.setContactInfo('R&D BeiJing, Huawei Technologies co.,Ltd. Huawei Bld.,NO.3 Xinxi Rd., Shang-Di Information Industry Base, Hai-Dian District Beijing P.R. China Zip:100085 Http://www.huawei.com E-mail:[email protected]') if mibBuilder.loadTexts: hwL2VpnPwe3.setDescription('The HUAWEI-PWE3-MIB contains objects to manage PWE3.') class HWLdpPwStateChangeReason(TextualConvention, Integer32): description = "The type indicates the reason of LDP PW VC's status change: LDP session down (1) AC interface down (2) PSN tunnel state down (3) Mapping message not received (4) PW interface parameter not match (5) Notification not forwarding (6) " status = 'current' subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6)) namedValues = NamedValues(("ldpSessionDown", 1), ("interfaceDown", 2), ("tunnelDown", 3), ("receivedNoMapping", 4), ("paraUnMatched", 5), ("notifiNotForward", 6)) hwL2Vpn = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119)) hwPwe3MIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1)) hwPwe3Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1)) hwPWVcTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1), ) if mibBuilder.loadTexts: hwPWVcTable.setStatus('current') if mibBuilder.loadTexts: hwPWVcTable.setDescription('This table is the VC configuration table. Users can create or delete a VC by it.') hwPWVcEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1), ).setIndexNames((0, "HUAWEI-PWE3-MIB", "hwPWVcID"), (0, "HUAWEI-PWE3-MIB", "hwPWVcType")) if mibBuilder.loadTexts: hwPWVcEntry.setStatus('current') if mibBuilder.loadTexts: hwPWVcEntry.setDescription('Provides the information of a VC entry.') hwPWVcID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 1), Unsigned32()) if mibBuilder.loadTexts: hwPWVcID.setStatus('current') if mibBuilder.loadTexts: hwPWVcID.setDescription("Index for the conceptual row identifying a PW within this PW Emulation table.Used in the outgoing PW ID field within the 'Virtual Circuit FEC Element'.") hwPWVcType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 2), HWL2VpnVcEncapsType()) if mibBuilder.loadTexts: hwPWVcType.setStatus('current') if mibBuilder.loadTexts: hwPWVcType.setDescription('The type of the Virtual Circuit.This value indicate the service to be carried over this PW.') hwPWVcPeerAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 3), InetAddressType().clone('ipv4')).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcPeerAddrType.setStatus('current') if mibBuilder.loadTexts: hwPWVcPeerAddrType.setDescription("Denotes the address type of the peer node. It should be set to 'unknown' if PE/PW maintenance protocol is not used and the address is unknown. Currently, support 'ipv4' only.") hwPWVcPeerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 4), IpAddress()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcPeerAddr.setStatus('current') if mibBuilder.loadTexts: hwPWVcPeerAddr.setDescription("This object contain the value of the peer node address of the PW/PE maintenance protocol entity. This object SHOULD contain a value of all zeroes if not applicable (hwPWVcPeerAddrType is 'unknown').") hwPWVcStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("plugout", 3), ("backup", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcStatus.setStatus('current') if mibBuilder.loadTexts: hwPWVcStatus.setDescription("Indicates the status of the PW in the local node. Currently, can't support 'plugout'.") hwPWVcInboundLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 6), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcInboundLabel.setStatus('current') if mibBuilder.loadTexts: hwPWVcInboundLabel.setDescription('For ldp vc, the value will be created by system automatically.') hwPWVcOutboundLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 7), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcOutboundLabel.setStatus('current') if mibBuilder.loadTexts: hwPWVcOutboundLabel.setDescription('For ldp vc, the value will be created by system automatically.') hwPWVcSwitchSign = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("staticTostatic", 1), ("ldpTostatic", 2), ("ldpToldp", 3), ("upe", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcSwitchSign.setStatus('current') if mibBuilder.loadTexts: hwPWVcSwitchSign.setDescription('The sign of switch.') hwPWVcSwitchID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 9), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcSwitchID.setStatus('current') if mibBuilder.loadTexts: hwPWVcSwitchID.setDescription("Used in the outgoing PW ID field within the 'Virtual Circuit FEC Element' of the switch PW.") hwPWVcSwitchPeerAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 10), InetAddressType().clone('ipv4')).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcSwitchPeerAddrType.setStatus('current') if mibBuilder.loadTexts: hwPWVcSwitchPeerAddrType.setDescription("Denotes the address type of the peer node of the switch PW. It should be set to 'unknown' if PE/PW maintenance protocol is not used and the address is unknown. Currently, support 'ipv4' only.") hwPWVcSwitchPeerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 11), IpAddress()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcSwitchPeerAddr.setStatus('current') if mibBuilder.loadTexts: hwPWVcSwitchPeerAddr.setDescription("This object contain the value of the peer node address of the switch PW of the PW/PE maintenance protocol entity. This object SHOULD contain a value of all zeroes if not applicable (hwPWVcSwitchPeerAddrType is 'unknown').") hwPWVcSwitchInboundLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 12), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcSwitchInboundLabel.setStatus('current') if mibBuilder.loadTexts: hwPWVcSwitchInboundLabel.setDescription('For ldp vc, the value will be created by system automatically.') hwPWVcSwitchOutboundLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 13), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcSwitchOutboundLabel.setStatus('current') if mibBuilder.loadTexts: hwPWVcSwitchOutboundLabel.setDescription('For ldp vc, the value will be created by system automatically.') hwPWVcGroupID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 14), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcGroupID.setStatus('current') if mibBuilder.loadTexts: hwPWVcGroupID.setDescription("Used in the Group ID field sent to the peer PWES within the maintenance protocol used for PW setup. Applicable if pwVcOwner equal 'pwIdFecSignaling' or 'l2tpControlProtocol', should be set to zero otherwise. Currently, this value always be zero.") hwPWVcIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 15), InterfaceIndexOrZero()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcIfIndex.setStatus('current') if mibBuilder.loadTexts: hwPWVcIfIndex.setDescription('Index of the interface (or the virtual interface) associated with the PW.') hwPWVcAcStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("plugout", 3), ("notify", 4), ("notifyDown", 5), ("downNotify", 6)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcAcStatus.setStatus('current') if mibBuilder.loadTexts: hwPWVcAcStatus.setDescription("Local AC status. Currently, can't support 'plugout'.") hwPWVcACOAMStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcACOAMStatus.setStatus('current') if mibBuilder.loadTexts: hwPWVcACOAMStatus.setDescription("Denotes the AC's protocol is operational or not.") hwPWVcMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(46, 9600), ))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcMtu.setStatus('current') if mibBuilder.loadTexts: hwPWVcMtu.setDescription('If not equal zero, the optional Mtu object in the signaling protocol will be sent with this value, representing the locally supported MTU size over the interface (or the virtual interface) associated with the PW.') hwPWVcCtrlWord = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 19), HWEnableValue()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcCtrlWord.setStatus('current') if mibBuilder.loadTexts: hwPWVcCtrlWord.setDescription('If signaling is used for PW establishment, this object indicates the status of the control word negotiation, and in both signaling or manual configuration indicates if CW is to be present or not for this PW.') hwPWVcVCCV = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 20), Bits().clone(namedValues=NamedValues(("ccCw", 0), ("ccAlert", 1), ("ccLabel", 2), ("cvIcmpping", 3), ("cvLspping", 4), ("cvBfd", 5), ("ccTtl", 6)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcVCCV.setStatus('current') if mibBuilder.loadTexts: hwPWVcVCCV.setDescription('Indicates the optional VCCV capabilities of the PW. According to whether the control word is enabled, the value can be ccCw(0)|ccAlert(1)|ccTtl(6)|cvLspping(4)|cvBfd(5) or ccAlert(1)|ccTtl(6)|cvLspping(4)|cvBfd(5). The default value is ccAlert(1)|ccTtl(6)|cvLspping(4)|cvBfd(5).') hwPWVcBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 21), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 32000000))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcBandWidth.setStatus('current') if mibBuilder.loadTexts: hwPWVcBandWidth.setDescription("This object indicates the bandwidth. '0' is the default value.") hwPWVcMaxAtmCells = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 22), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 28))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcMaxAtmCells.setStatus('current') if mibBuilder.loadTexts: hwPWVcMaxAtmCells.setDescription('Indicates the max cell supported when vc type is atm.') hwPWVcTnlPolicyName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 23), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 39))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcTnlPolicyName.setStatus('current') if mibBuilder.loadTexts: hwPWVcTnlPolicyName.setDescription('Indicates the tunnel policy name used.') hwPWVcQoSBehaviorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 24), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcQoSBehaviorIndex.setStatus('current') if mibBuilder.loadTexts: hwPWVcQoSBehaviorIndex.setDescription("Indicates the traffic behavior Index when QOS is implemented. Currently,can't support.Return the default value is '0'.") hwPWVcExplicitPathName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 25), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 31))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcExplicitPathName.setStatus('current') if mibBuilder.loadTexts: hwPWVcExplicitPathName.setDescription("Indicates the explicit path name set by the operator.Currently, can't support.") hwPWVcTemplateName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 26), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 19))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcTemplateName.setStatus('current') if mibBuilder.loadTexts: hwPWVcTemplateName.setDescription('Indicates the PW template index referenced.') hwPWVcSecondary = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 27), TruthValue()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcSecondary.setStatus('current') if mibBuilder.loadTexts: hwPWVcSecondary.setDescription('Indicates whether or not the secondary PW is used.') hwPWVcUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 28), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcUpTime.setStatus('current') if mibBuilder.loadTexts: hwPWVcUpTime.setDescription('Indicates the duration when the PW keeps Up for the last time, in seconds.') hwPWOAMSync = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 29), TruthValue()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWOAMSync.setStatus('current') if mibBuilder.loadTexts: hwPWOAMSync.setDescription('Denotes the AC and PSN are enable or not.') hwPWVCForBfdIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 30), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVCForBfdIndex.setStatus('current') if mibBuilder.loadTexts: hwPWVCForBfdIndex.setDescription('The index of PW for BFD.') hwPWVcDelayTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 31), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcDelayTime.setStatus('current') if mibBuilder.loadTexts: hwPWVcDelayTime.setDescription('The reroute delay time.') hwPWVcReroutePolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 32), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("delay", 1), ("immediately", 2), ("never", 3), ("none", 4), ("err", 5), ("invalid", 6), ("immediatelySwitch", 7)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcReroutePolicy.setStatus('current') if mibBuilder.loadTexts: hwPWVcReroutePolicy.setDescription('Reroute policy.') hwPWVcResumeTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 33), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcResumeTime.setStatus('current') if mibBuilder.loadTexts: hwPWVcResumeTime.setDescription('The reroute resume time.') hwPWVcRerouteReason = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 34), HWL2VpnStateChangeReason()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcRerouteReason.setStatus('current') if mibBuilder.loadTexts: hwPWVcRerouteReason.setDescription('Last reroute reason.') hwPWVcLastRerouteTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 35), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcLastRerouteTime.setStatus('current') if mibBuilder.loadTexts: hwPWVcLastRerouteTime.setDescription('Last reroute time.') hwPWVcManualSetFault = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 36), TruthValue()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcManualSetFault.setStatus('current') if mibBuilder.loadTexts: hwPWVcManualSetFault.setDescription('Denotes the manual has been set fault or not.') hwPWVcActive = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 37), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcActive.setStatus('current') if mibBuilder.loadTexts: hwPWVcActive.setDescription('Denotes the current vc is active or not.') hwPWVcVrIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 38), InterfaceIndexOrZero()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcVrIfIndex.setStatus('current') if mibBuilder.loadTexts: hwPWVcVrIfIndex.setDescription('Denotes the VRRP interface this PW binding to.') hwPWVcVrID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 39), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcVrID.setStatus('current') if mibBuilder.loadTexts: hwPWVcVrID.setDescription('Denotes the VrID this PW binding to.') hwPWBFDDetectMultiplier = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 40), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(3, 50), ))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWBFDDetectMultiplier.setStatus('current') if mibBuilder.loadTexts: hwPWBFDDetectMultiplier.setDescription('The multiple of detection time.') hwPWBFDMinReceiveInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 41), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(3, 1000), ))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWBFDMinReceiveInterval.setStatus('current') if mibBuilder.loadTexts: hwPWBFDMinReceiveInterval.setDescription('The interval of bfd messages to be received.') hwPWBFDMinTransmitInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 42), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(3, 1000), ))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWBFDMinTransmitInterval.setStatus('current') if mibBuilder.loadTexts: hwPWBFDMinTransmitInterval.setDescription('The interval of bfd messages to be sent.') hwPWDynamicBFDDetect = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 43), TruthValue()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWDynamicBFDDetect.setStatus('current') if mibBuilder.loadTexts: hwPWDynamicBFDDetect.setDescription('This value indicates the capacitability to support dynamic BFD detect.') hwPWBFDRemoteVcID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 44), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWBFDRemoteVcID.setStatus('current') if mibBuilder.loadTexts: hwPWBFDRemoteVcID.setDescription('In the multiple-hop model, the value of remote VC id.') hwPWEthOamType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 45), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("ethOam1ag", 1), ("ethOam3ah", 2), ("noEthOamCfg", 3)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWEthOamType.setStatus('current') if mibBuilder.loadTexts: hwPWEthOamType.setDescription('This value indicates the type of ETH OAM.') hwPWCfmMaIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 46), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 4095), ValueRangeConstraint(4294967295, 4294967295), ))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWCfmMaIndex.setStatus('current') if mibBuilder.loadTexts: hwPWCfmMaIndex.setDescription('This value indicates the current CFM MA index.') hwPWVcUpStartTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 47), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcUpStartTime.setStatus('current') if mibBuilder.loadTexts: hwPWVcUpStartTime.setDescription('Specifies the time this PW status was Up(1).') hwPWVcUpSumTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 48), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcUpSumTime.setStatus('current') if mibBuilder.loadTexts: hwPWVcUpSumTime.setDescription('Indicates the accumulated time when the VC is Up, in seconds.') hwPWVcIfName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 49), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcIfName.setStatus('current') if mibBuilder.loadTexts: hwPWVcIfName.setDescription('Name of the interface (or the virtual interface) associated with the PW.') hwPWVcRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 51), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcRowStatus.setStatus('current') if mibBuilder.loadTexts: hwPWVcRowStatus.setDescription("RowStatus for this Table. Restriction: The row must be created by 'createAndGo' handle only. Handle 'createAndWait' is forbidden. Not support modifying configuration.") hwPWVcAtmPackOvertime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 52), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(100, 50000), ))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcAtmPackOvertime.setStatus('current') if mibBuilder.loadTexts: hwPWVcAtmPackOvertime.setDescription('Specifies the AtmPackOvertime.') hwPWVcPwJitterBufferDepth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 53), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 64))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcPwJitterBufferDepth.setStatus('current') if mibBuilder.loadTexts: hwPWVcPwJitterBufferDepth.setDescription('Specifies the PwJitterBufferDepth.') hwPWVcPwTdmEncapsulationNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 54), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 40))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcPwTdmEncapsulationNum.setStatus('current') if mibBuilder.loadTexts: hwPWVcPwTdmEncapsulationNum.setDescription('Specifies the PwTdmEncapsulationNum.') hwPWVcPwIdleCode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 55), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 255), ValueRangeConstraint(65535, 65535), ))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcPwIdleCode.setStatus('current') if mibBuilder.loadTexts: hwPWVcPwIdleCode.setDescription('Specifies the PwIdleCode.') hwPWVcPwRtpHeader = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 56), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcPwRtpHeader.setStatus('current') if mibBuilder.loadTexts: hwPWVcPwRtpHeader.setDescription('Specifies the PwRtpHeader.') hwPWVcSwitchTnlPolicyName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 57), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 39))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcSwitchTnlPolicyName.setStatus('current') if mibBuilder.loadTexts: hwPWVcSwitchTnlPolicyName.setDescription('Indicates the switch tunnel policy name used.') hwPWVcCfmMdIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 58), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 4095), ValueRangeConstraint(4294967295, 4294967295), ))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcCfmMdIndex.setStatus('current') if mibBuilder.loadTexts: hwPWVcCfmMdIndex.setDescription('This value indicates the current CFM MD index.') hwPWVcCfmMaName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 59), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 43))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcCfmMaName.setStatus('current') if mibBuilder.loadTexts: hwPWVcCfmMaName.setDescription('This value indicates the current CFM MA name used.') hwPWVcCfmMdName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 60), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 43))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcCfmMdName.setStatus('current') if mibBuilder.loadTexts: hwPWVcCfmMdName.setDescription('This value indicates the current CFM MD name used.') hwPWVcRawOrTagged = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 61), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("raw", 1), ("tagged", 2), ("rawTagNotConfiged", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcRawOrTagged.setStatus('current') if mibBuilder.loadTexts: hwPWVcRawOrTagged.setDescription('Specifies whether the raw or tagged is configured.') hwPWVcInterworkingType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 62), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("ipInterWorking", 1), ("ipLayer2", 2), ("ipUnknown", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcInterworkingType.setStatus('current') if mibBuilder.loadTexts: hwPWVcInterworkingType.setDescription('Specifies the interworking type of the VC entry.') hwPWVcCir = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 63), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcCir.setStatus('current') if mibBuilder.loadTexts: hwPWVcCir.setDescription('Specifies the committed information rate, based on the VC entry.') hwPWVcPir = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 64), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcPir.setStatus('current') if mibBuilder.loadTexts: hwPWVcPir.setDescription('Specifies the peak information rate, based on the VC entry.') hwPWVcQosProfile = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 65), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 31))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcQosProfile.setStatus('current') if mibBuilder.loadTexts: hwPWVcQosProfile.setDescription("Specifies the QoS profile's name, based on the VC entry.") hwPWVcSwitchCir = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 66), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcSwitchCir.setStatus('current') if mibBuilder.loadTexts: hwPWVcSwitchCir.setDescription('Specifies the committed information rate, based on the switch VC entry.') hwPWVcSwitchPir = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 67), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcSwitchPir.setStatus('current') if mibBuilder.loadTexts: hwPWVcSwitchPir.setDescription('Specifies the peak information rate, based on the switch VC entry.') hwPWVcSwitchQosProfile = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 68), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 31))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcSwitchQosProfile.setStatus('current') if mibBuilder.loadTexts: hwPWVcSwitchQosProfile.setDescription("Specifies the QoS profile's name, based on the switch VC entry.") hwPWVcTrigger = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 69), TruthValue()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcTrigger.setStatus('current') if mibBuilder.loadTexts: hwPWVcTrigger.setDescription('Specifies whether the PW remote interface shutdown or not.') hwPWVcEnableACOAM = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 70), EnabledStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcEnableACOAM.setStatus('current') if mibBuilder.loadTexts: hwPWVcEnableACOAM.setDescription('Specifies whether ACOAM detection and notification are all enabled or not.') hwPWVcSwitchVrIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 71), InterfaceIndexOrZero()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcSwitchVrIfIndex.setStatus('current') if mibBuilder.loadTexts: hwPWVcSwitchVrIfIndex.setDescription('Denotes the VRRP interface the switch PW binding to.') hwPWVcSwitchVrID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 72), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWVcSwitchVrID.setStatus('current') if mibBuilder.loadTexts: hwPWVcSwitchVrID.setDescription('Denotes the VrID the switch PW binding to.') hwPWVcQosParaFromPWT = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 73), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("cliOrMib", 1), ("pwTemplate", 2), ("unknown", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcQosParaFromPWT.setStatus('current') if mibBuilder.loadTexts: hwPWVcQosParaFromPWT.setDescription('This object indicates the configuration of the Qos parameters managed through command line or PW template.') hwPWVcBfdParaFromPWT = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 74), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("cliOrMib", 1), ("pwTemplate", 2), ("unknown", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcBfdParaFromPWT.setStatus('current') if mibBuilder.loadTexts: hwPWVcBfdParaFromPWT.setDescription('This object indicates the configuration of the Bfd parameters managed through command line or PW template.') hwPwVcNegotiateMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 75), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("slaveOrMaster", 1), ("independent", 2), ("unknown", 3), ("frr", 4)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPwVcNegotiateMode.setStatus('current') if mibBuilder.loadTexts: hwPwVcNegotiateMode.setDescription('This object indicates the negotiation mode of the PW on the local node.') hwPwVcIsBypass = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 76), TruthValue()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPwVcIsBypass.setStatus('current') if mibBuilder.loadTexts: hwPwVcIsBypass.setDescription('This object indicates whether the PW is the bypass PW.') hwPwVcIsAdmin = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 77), TruthValue()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPwVcIsAdmin.setStatus('current') if mibBuilder.loadTexts: hwPwVcIsAdmin.setDescription('This object indicates whether the PW is the administrator PW.') hwPwVcAdminPwIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 78), InterfaceIndexOrZero()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPwVcAdminPwIfIndex.setStatus('current') if mibBuilder.loadTexts: hwPwVcAdminPwIfIndex.setDescription('This object indicates the index of the interface on which the administrator PW resides after it is being tracked by the service PW.') hwPwVcAdminPwLinkStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 79), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("unknown", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPwVcAdminPwLinkStatus.setStatus('current') if mibBuilder.loadTexts: hwPwVcAdminPwLinkStatus.setDescription('This object indicates the status of the administrator PW after it is being tracked by the service PW.') hwPwVcSwitchAdminPwIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 80), InterfaceIndexOrZero()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPwVcSwitchAdminPwIfIndex.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchAdminPwIfIndex.setDescription('This object indicates the index of the interface on which the administrator PW resides after it is being tracked by the switch PW.') hwPwVcSwitchAdminPwLinkStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 81), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("unknown", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPwVcSwitchAdminPwLinkStatus.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchAdminPwLinkStatus.setDescription('This object indicates the status of the administrator PW after it is being tracked by the switch PW.') hwPwVcSwitchBackupAdminPwIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 82), InterfaceIndexOrZero()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPwVcSwitchBackupAdminPwIfIndex.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchBackupAdminPwIfIndex.setDescription('This object indicates the index of the interface on which the administrator PW resides after it is being tracked by the switch backup PW.') hwPwVcSwitchBackupAdminPwLinkStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 83), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("unknown", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPwVcSwitchBackupAdminPwLinkStatus.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchBackupAdminPwLinkStatus.setDescription('This object indicates the status of the administrator PW after it is being tracked by the switch backup PW.') hwPwVcSwitchBackupVcId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 84), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPwVcSwitchBackupVcId.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchBackupVcId.setDescription('This object indicates the VC ID of the switch backup PW.') hwPwVcSwitchBackupVcPeerAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 85), InetAddressType()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPwVcSwitchBackupVcPeerAddrType.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchBackupVcPeerAddrType.setDescription('This object indicates type of the IP address of the peer on the switch backup PW. Currently, only IPv4 addresss are supported.') hwPwVcSwitchBackupVcPeerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 86), IpAddress()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPwVcSwitchBackupVcPeerAddr.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchBackupVcPeerAddr.setDescription('This object indicates the IP address of the peer on the switch backup PW.') hwPwVcSwitchBackupVcReceiveLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 87), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPwVcSwitchBackupVcReceiveLabel.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchBackupVcReceiveLabel.setDescription('This object indicates the inbound label of the switch backup VC. For a static VC, the value of the inbound label ranges from 16 to 1023. For a dynamic VC, the inbound label is automatically generated by the system.') hwPwVcSwitchBackupVcSendLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 88), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPwVcSwitchBackupVcSendLabel.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchBackupVcSendLabel.setDescription('This object indicates the outbound label of the switch backup VC. For a static VC, the value of the outbound label ranges from 0 to 1048575. For a dynamic VC, the outbound label is automatically generated by the system.') hwPwVcSwitchBackupVcTnlPolicyName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 89), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 19))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPwVcSwitchBackupVcTnlPolicyName.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchBackupVcTnlPolicyName.setDescription('This object indicates the name of the tunnel policy of the switch backup VC.') hwPwVcSwitchBackupVcCir = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 90), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPwVcSwitchBackupVcCir.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchBackupVcCir.setDescription('This object indicates the CIR of the switch backup VC.') hwPwVcSwitchBackupVcPir = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 91), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPwVcSwitchBackupVcPir.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchBackupVcPir.setDescription('This object indicates the PIR of the switch backup VC.') hwPwVcSwitchBackupVcQosProfile = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 92), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 31))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPwVcSwitchBackupVcQosProfile.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchBackupVcQosProfile.setDescription('This object indicates the name of the QoS profile of the switch backup VC.') hwPwVcSlaveMasterMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 93), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("slave", 1), ("master", 2), ("unknown", 3), ("bypass", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPwVcSlaveMasterMode.setStatus('current') if mibBuilder.loadTexts: hwPwVcSlaveMasterMode.setDescription('This object indicates whether the status of the VC is master or slave.') hwPwVcSwitchVcSlaveMasterMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 94), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("slave", 1), ("master", 2), ("unknown", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPwVcSwitchVcSlaveMasterMode.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchVcSlaveMasterMode.setDescription('This object indicates whether the status of the switch VC is master or slave.') hwPwVcSwitchBackupVcSlaveMasterMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 95), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("slave", 1), ("master", 2), ("unknown", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPwVcSwitchBackupVcSlaveMasterMode.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchBackupVcSlaveMasterMode.setDescription('This object indicates whether the status of the switch backup VC is master or slave.') hwPwVcSwitchVcActive = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 96), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPwVcSwitchVcActive.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchVcActive.setDescription('This object indicates whether the status of the switch VC is active or not.') hwPwVcSwitchBackupVcActive = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 97), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPwVcSwitchBackupVcActive.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchBackupVcActive.setDescription('This object indicates whether the status of the switch backup VC is active or not.') hwPwVcSwitchCwTrans = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 98), TruthValue()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPwVcSwitchCwTrans.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchCwTrans.setDescription('This object indicates whether the SPE support Control Word Transparent or not,default is false.') hwPwVcSwitchVcServiceName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 99), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 100))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPwVcSwitchVcServiceName.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchVcServiceName.setDescription('This object indicates the service name of the switch VC.') hwPwVcSwitchBackupVcServiceName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 1, 1, 100), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 100))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPwVcSwitchBackupVcServiceName.setStatus('current') if mibBuilder.loadTexts: hwPwVcSwitchBackupVcServiceName.setDescription('This object indicates the service name of the switch backup VC.') hwPWVcTnlTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 2), ) if mibBuilder.loadTexts: hwPWVcTnlTable.setStatus('current') if mibBuilder.loadTexts: hwPWVcTnlTable.setDescription('This table is used to search the tunnel index of a VC.') hwPWVcTnlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 2, 1), ).setIndexNames((0, "HUAWEI-PWE3-MIB", "hwPWVcID"), (0, "HUAWEI-PWE3-MIB", "hwPWVcType"), (0, "HUAWEI-PWE3-MIB", "hwPWVcTnlIndex")) if mibBuilder.loadTexts: hwPWVcTnlEntry.setStatus('current') if mibBuilder.loadTexts: hwPWVcTnlEntry.setDescription('Provides the information of a VC tunnel entry.') hwPWVcTnlIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 2, 1, 1), Unsigned32()) if mibBuilder.loadTexts: hwPWVcTnlIndex.setStatus('current') if mibBuilder.loadTexts: hwPWVcTnlIndex.setDescription('This object indicates the tunnel index of the VC.') hwPWVcTnlType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("lsp", 1), ("gre", 2), ("ipsec", 3), ("crLsp", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcTnlType.setStatus('current') if mibBuilder.loadTexts: hwPWVcTnlType.setDescription('This object indicates the tunnel type.') hwPWTnlForBfdIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 2, 1, 3), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWTnlForBfdIndex.setStatus('current') if mibBuilder.loadTexts: hwPWTnlForBfdIndex.setDescription('This object indicates the index of LSP for BFD.') hwPWVcStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 3), ) if mibBuilder.loadTexts: hwPWVcStatisticsTable.setStatus('current') if mibBuilder.loadTexts: hwPWVcStatisticsTable.setDescription("This table contains the Pwe3's VC packets statistics.") hwPWVcStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 3, 1), ).setIndexNames((0, "HUAWEI-PWE3-MIB", "hwPWVcID"), (0, "HUAWEI-PWE3-MIB", "hwPWVcType")) if mibBuilder.loadTexts: hwPWVcStatisticsEntry.setStatus('current') if mibBuilder.loadTexts: hwPWVcStatisticsEntry.setDescription("Provides the information of the Pwe3's VC packets statistics.") hwPWVcStatisticsRcvPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 3, 1, 1), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcStatisticsRcvPkts.setStatus('current') if mibBuilder.loadTexts: hwPWVcStatisticsRcvPkts.setDescription('The total number of packets received on this VC.') hwPWVcStatisticsRcvBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 3, 1, 2), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcStatisticsRcvBytes.setStatus('current') if mibBuilder.loadTexts: hwPWVcStatisticsRcvBytes.setDescription('The total number of bytes received on this VC.') hwPWVcStatisticsSndPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 3, 1, 3), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcStatisticsSndPkts.setStatus('current') if mibBuilder.loadTexts: hwPWVcStatisticsSndPkts.setDescription('The total number of packets sent on this VC.') hwPWVcStatisticsSndBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 3, 1, 4), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcStatisticsSndBytes.setStatus('current') if mibBuilder.loadTexts: hwPWVcStatisticsSndBytes.setDescription('The total number of bytes sent on the VC.') hwPWRemoteVcTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 4), ) if mibBuilder.loadTexts: hwPWRemoteVcTable.setStatus('current') if mibBuilder.loadTexts: hwPWRemoteVcTable.setDescription('This table provides remote PW information for each local PW.') hwPWRemoteVcEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 4, 1), ).setIndexNames((0, "HUAWEI-PWE3-MIB", "hwPWVcID"), (0, "HUAWEI-PWE3-MIB", "hwPWVcType")) if mibBuilder.loadTexts: hwPWRemoteVcEntry.setStatus('current') if mibBuilder.loadTexts: hwPWRemoteVcEntry.setDescription('An entry in this table is created by the agent for every PW.') hwPWRemoteVcID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 4, 1, 1), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWRemoteVcID.setStatus('current') if mibBuilder.loadTexts: hwPWRemoteVcID.setDescription("Used in the outgoing PW ID field within the 'Virtual Circuit FEC Element' of the remote PW.") hwPWRemoteVcType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 4, 1, 2), HWL2VpnVcEncapsType()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWRemoteVcType.setStatus('current') if mibBuilder.loadTexts: hwPWRemoteVcType.setDescription('This value indicate the service to be carried over the remote PW.') hwPWRemoteVcStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 4, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("plugout", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWRemoteVcStatus.setStatus('current') if mibBuilder.loadTexts: hwPWRemoteVcStatus.setDescription('Indicates the forwarding status of the remote VC.') hwPWRemoteVcGroupID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 4, 1, 4), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWRemoteVcGroupID.setStatus('current') if mibBuilder.loadTexts: hwPWRemoteVcGroupID.setDescription('Indicates the Group ID field of the remote PW. Currently, this value always be zero.') hwPWRemoteVcMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 4, 1, 5), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(46, 9600), ))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWRemoteVcMtu.setStatus('current') if mibBuilder.loadTexts: hwPWRemoteVcMtu.setDescription('Indicates the supported MTU size of the remote PW.') hwPWRemoteVcCtrlword = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 4, 1, 6), HWEnableValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWRemoteVcCtrlword.setStatus('current') if mibBuilder.loadTexts: hwPWRemoteVcCtrlword.setDescription('Indicates the control word capability of the remote PW.') hwPWRemoteVcMaxAtmCells = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 4, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWRemoteVcMaxAtmCells.setStatus('current') if mibBuilder.loadTexts: hwPWRemoteVcMaxAtmCells.setDescription('Indicates the max cell supported of the remote PW when vctype is atm.') hwPWRemoteVcNotif = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 4, 1, 8), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWRemoteVcNotif.setStatus('current') if mibBuilder.loadTexts: hwPWRemoteVcNotif.setDescription('Indicates notification is supported by the remote PW.') hwPWVcSwitchNotifEnable = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 5), HWEnableValue()).setMaxAccess("readwrite") if mibBuilder.loadTexts: hwPWVcSwitchNotifEnable.setStatus('current') if mibBuilder.loadTexts: hwPWVcSwitchNotifEnable.setDescription('If this object is set to enable(1), then it enables the emission of hwPWVcSwitchWtoP and hwPWVcSwitchPtoW notifications; otherwise these notifications are not emitted. The default value is disable (2).') hwPWVcUpDownNotifEnable = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 6), HWEnableValue()).setMaxAccess("readwrite") if mibBuilder.loadTexts: hwPWVcUpDownNotifEnable.setStatus('current') if mibBuilder.loadTexts: hwPWVcUpDownNotifEnable.setDescription('This object indicates the enable sign of PW VC state change notification. The default value is disable (2).') hwPWVcDeletedNotifEnable = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 7), HWEnableValue()).setMaxAccess("readwrite") if mibBuilder.loadTexts: hwPWVcDeletedNotifEnable.setStatus('current') if mibBuilder.loadTexts: hwPWVcDeletedNotifEnable.setDescription('This object indicates the enable sign of PW VC deletion notification. The default value is disable (2).') hwPWVcStateChangeReason = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 8), HWL2VpnStateChangeReason()).setMaxAccess("accessiblefornotify") if mibBuilder.loadTexts: hwPWVcStateChangeReason.setStatus('current') if mibBuilder.loadTexts: hwPWVcStateChangeReason.setDescription('This object indicates the reason of PE VC state change.') hwPWVcSwitchRmtID = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 9), Unsigned32()).setMaxAccess("accessiblefornotify") if mibBuilder.loadTexts: hwPWVcSwitchRmtID.setStatus('current') if mibBuilder.loadTexts: hwPWVcSwitchRmtID.setDescription('This object indicates the VC ID of PW switch between working PW and protect PW .') hwLdpPWStateChangeReason = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 10), HWLdpPwStateChangeReason()).setMaxAccess("accessiblefornotify") if mibBuilder.loadTexts: hwLdpPWStateChangeReason.setStatus('current') if mibBuilder.loadTexts: hwLdpPWStateChangeReason.setDescription("This object indicates the reason of LDP PW VC's state change.") hwPWVcTDMPerfCurrentTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 11), ) if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentTable.setStatus('current') if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentTable.setDescription('This table provides per TDM PW performance information. The contents of this table entry are reset to zero and gotten new information every 15 minutes.') hwPWVcTDMPerfCurrentEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 11, 1), ).setIndexNames((0, "HUAWEI-PWE3-MIB", "hwPWVcID"), (0, "HUAWEI-PWE3-MIB", "hwPWVcType")) if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentEntry.setStatus('current') if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentEntry.setDescription('An entry in this table is created by the agent for every TDM PW entry.') hwPWVcTDMPerfCurrentMissingPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 11, 1, 1), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentMissingPkts.setStatus('current') if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentMissingPkts.setDescription('Number of missing packets (as detected via control word sequence number gaps).') hwPWVcTDMPerfCurrentJtrBfrOverruns = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 11, 1, 2), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentJtrBfrOverruns.setStatus('current') if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentJtrBfrOverruns.setDescription('Number of times the jitter buffer was overrun.') hwPWVcTDMPerfCurrentJtrBfrUnderruns = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 11, 1, 3), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentJtrBfrUnderruns.setStatus('current') if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentJtrBfrUnderruns.setDescription('Number of times a packet needed to be played out and the jitter buffer was empty.') hwPWVcTDMPerfCurrentMisOrderDropped = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 11, 1, 4), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentMisOrderDropped.setStatus('current') if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentMisOrderDropped.setDescription('Number of packets detected out of order (via control word sequence numbers) that could not be re-ordered or could not fit in the jitter buffer.') hwPWVcTDMPerfCurrentMalformedPkt = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 11, 1, 5), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentMalformedPkt.setStatus('current') if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentMalformedPkt.setDescription("Number of packets detected with unexpected size or bad headers' stack.") hwPWVcTDMPerfCurrentESs = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 11, 1, 6), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentESs.setStatus('current') if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentESs.setDescription('The counter associated with the number of Error Seconds encountered. Any malformed packet, sequence error, LOPS, and the like are considered as Error Seconds.') hwPWVcTDMPerfCurrentSESs = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 11, 1, 7), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentSESs.setStatus('current') if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentSESs.setDescription('The counter associated with the number of Severely Error Seconds encountered.') hwPWVcTDMPerfCurrentUASs = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 1, 11, 1, 8), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentUASs.setStatus('current') if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentUASs.setDescription('The counter associated with the number of Unavailable Seconds encountered. Any consecutive ten seconds of SES are counted as one Unavailable Seconds (UAS).') hwPwe3MIBTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 2)) hwPWVcSwitchWtoP = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 2, 1)).setObjects(("HUAWEI-PWE3-MIB", "hwPWVcCtrlWord"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchRmtID"), ("HUAWEI-PWE3-MIB", "hwPWVcStateChangeReason"), ("HUAWEI-PWE3-MIB", "hwPWVcIfName")) if mibBuilder.loadTexts: hwPWVcSwitchWtoP.setStatus('current') if mibBuilder.loadTexts: hwPWVcSwitchWtoP.setDescription('This notification is generated when switch from working PW to protect PW happens.') hwPWVcSwitchPtoW = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 2, 2)).setObjects(("HUAWEI-PWE3-MIB", "hwPWVcCtrlWord"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchRmtID"), ("HUAWEI-PWE3-MIB", "hwPWVcStateChangeReason"), ("HUAWEI-PWE3-MIB", "hwPWVcIfName")) if mibBuilder.loadTexts: hwPWVcSwitchPtoW.setStatus('current') if mibBuilder.loadTexts: hwPWVcSwitchPtoW.setDescription('This notification is generated when switch from protect PW to working PW happens.') hwPWVcDown = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 2, 3)).setObjects(("HUAWEI-PWE3-MIB", "hwPWVcPeerAddr"), ("HUAWEI-PWE3-MIB", "hwPWVcIfIndex"), ("HUAWEI-PWE3-MIB", "hwPWVcInboundLabel"), ("HUAWEI-PWE3-MIB", "hwPWVcOutboundLabel"), ("HUAWEI-PWE3-MIB", "hwPWVcSecondary"), ("HUAWEI-PWE3-MIB", "hwPWVcStateChangeReason"), ("SNMPv2-MIB", "sysUpTime"), ("HUAWEI-PWE3-MIB", "hwPWVcIfName"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchID"), ("HUAWEI-PWE3-MIB", "hwPWVcTnlPolicyName")) if mibBuilder.loadTexts: hwPWVcDown.setStatus('current') if mibBuilder.loadTexts: hwPWVcDown.setDescription("This notification indicates the VC's state changes to down.") hwPWVcUp = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 2, 4)).setObjects(("HUAWEI-PWE3-MIB", "hwPWVcPeerAddr"), ("HUAWEI-PWE3-MIB", "hwPWVcIfIndex"), ("HUAWEI-PWE3-MIB", "hwPWVcInboundLabel"), ("HUAWEI-PWE3-MIB", "hwPWVcOutboundLabel"), ("HUAWEI-PWE3-MIB", "hwPWVcSecondary"), ("HUAWEI-PWE3-MIB", "hwPWVcStateChangeReason"), ("SNMPv2-MIB", "sysUpTime"), ("HUAWEI-PWE3-MIB", "hwPWVcIfName"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchID"), ("HUAWEI-PWE3-MIB", "hwPWVcTnlPolicyName")) if mibBuilder.loadTexts: hwPWVcUp.setStatus('current') if mibBuilder.loadTexts: hwPWVcUp.setDescription("This notification indicates the VC's state changes to up.") hwPWVcDeleted = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 2, 5)).setObjects(("HUAWEI-PWE3-MIB", "hwPWVcPeerAddr"), ("HUAWEI-PWE3-MIB", "hwPWVcIfIndex"), ("HUAWEI-PWE3-MIB", "hwPWVcInboundLabel"), ("HUAWEI-PWE3-MIB", "hwPWVcOutboundLabel"), ("HUAWEI-PWE3-MIB", "hwPWVcSecondary"), ("HUAWEI-PWE3-MIB", "hwPWVcIfName"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchID")) if mibBuilder.loadTexts: hwPWVcDeleted.setStatus('current') if mibBuilder.loadTexts: hwPWVcDeleted.setDescription('This notification indicates the VC is deleted.') hwPWVcBackup = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 2, 6)).setObjects(("HUAWEI-PWE3-MIB", "hwPWVcPeerAddr"), ("HUAWEI-PWE3-MIB", "hwPWVcIfIndex"), ("HUAWEI-PWE3-MIB", "hwPWVcInboundLabel"), ("HUAWEI-PWE3-MIB", "hwPWVcOutboundLabel"), ("HUAWEI-PWE3-MIB", "hwPWVcSecondary"), ("HUAWEI-PWE3-MIB", "hwPWVcStateChangeReason"), ("SNMPv2-MIB", "sysUpTime"), ("HUAWEI-PWE3-MIB", "hwPWVcIfName"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchID")) if mibBuilder.loadTexts: hwPWVcBackup.setStatus('current') if mibBuilder.loadTexts: hwPWVcBackup.setDescription("This notification indicates the VC's state changes to backup.") hwLdpPWVcDown = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 2, 7)).setObjects(("HUAWEI-PWE3-MIB", "hwPWVcPeerAddr"), ("HUAWEI-PWE3-MIB", "hwLdpPWStateChangeReason")) if mibBuilder.loadTexts: hwLdpPWVcDown.setStatus('current') if mibBuilder.loadTexts: hwLdpPWVcDown.setDescription("This notification indicates the LDP PW VC's state changes to down.") hwLdpPWVcUp = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 2, 8)).setObjects(("HUAWEI-PWE3-MIB", "hwPWVcPeerAddr"), ("HUAWEI-PWE3-MIB", "hwLdpPWStateChangeReason")) if mibBuilder.loadTexts: hwLdpPWVcUp.setStatus('current') if mibBuilder.loadTexts: hwLdpPWVcUp.setDescription("This notification indicates the Ldp PW VC's state changes to up.") hwSvcObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3)) hwSvcTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1), ) if mibBuilder.loadTexts: hwSvcTable.setStatus('current') if mibBuilder.loadTexts: hwSvcTable.setDescription('This table is the SVC configuration table. Users can create or delete a SVC by it.') hwSvcEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1), ).setIndexNames((0, "HUAWEI-PWE3-MIB", "hwSvcIfIndex")) if mibBuilder.loadTexts: hwSvcEntry.setStatus('current') if mibBuilder.loadTexts: hwSvcEntry.setDescription('Provides the information of a SVC entry.') hwSvcIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 1), InterfaceIndexOrZero()) if mibBuilder.loadTexts: hwSvcIfIndex.setStatus('current') if mibBuilder.loadTexts: hwSvcIfIndex.setDescription('Index of the interface (or the virtual interface) associated with the PW.') hwSvcID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 2), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcID.setStatus('current') if mibBuilder.loadTexts: hwSvcID.setDescription("Index for the conceptual row identifying a PW within this PW Emulation table.Used in the outgoing PW ID field within the 'Virtual Circuit FEC Element'.") hwSvcType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 3), HWL2VpnVcEncapsType()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcType.setStatus('current') if mibBuilder.loadTexts: hwSvcType.setDescription('Index for the conceptual row identifying a PW within this PW Emulation table.This value indicate the service to be carried over this PW.') hwSvcPeerAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 4), InetAddressType().clone('ipv4')).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcPeerAddrType.setStatus('current') if mibBuilder.loadTexts: hwSvcPeerAddrType.setDescription("Denotes the address type of the peer node. It should be set to 'unknown' if PE/PW maintenance protocol is not used and the address is unknown. Currently, support 'ipv4' only.") hwSvcPeerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 5), IpAddress()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcPeerAddr.setStatus('current') if mibBuilder.loadTexts: hwSvcPeerAddr.setDescription("This object contain the value of the peer node address of the PW/PE maintenance protocol entity. This object SHOULD contain a value of all zeroes if not applicable (hwSvcPeerAddrType is 'unknown').") hwSvcStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("plugout", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcStatus.setStatus('current') if mibBuilder.loadTexts: hwSvcStatus.setDescription("Indicates the status of the PW in the local node. Currently, can't support 'plugout'.") hwSvcInboundLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 7), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcInboundLabel.setStatus('current') if mibBuilder.loadTexts: hwSvcInboundLabel.setDescription('This object indicates the inbound label.') hwSvcOutboundLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 8), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcOutboundLabel.setStatus('current') if mibBuilder.loadTexts: hwSvcOutboundLabel.setDescription('This object indicates the outbound label.') hwSvcGroupID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 9), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcGroupID.setStatus('current') if mibBuilder.loadTexts: hwSvcGroupID.setDescription("Used in the Group ID field sent to the peer PWES within the maintenance protocol used for PW setup. Applicable if SvcOwner equal 'pwIdFecSignaling' or 'l2tpControlProtocol', should be set to zero otherwise. Currently, this value always be zero.") hwSvcAcStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("plugout", 3)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcAcStatus.setStatus('current') if mibBuilder.loadTexts: hwSvcAcStatus.setDescription("Local AC status. Currently, can't support 'plugout'.") hwSvcACOAMStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcACOAMStatus.setStatus('current') if mibBuilder.loadTexts: hwSvcACOAMStatus.setDescription("Denotes the AC's protocol is operational or not.") hwSvcMtu = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(46, 9600), ))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcMtu.setStatus('current') if mibBuilder.loadTexts: hwSvcMtu.setDescription("If not equal zero, the optional Mtu object in the signaling protocol will be sent with this value, representing the locally supported MTU size over the interface (or the virtual interface) associated with the PW.Currently, can't support.'0' is the default value.") hwSvcCtrlWord = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 13), HWEnableValue()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcCtrlWord.setStatus('current') if mibBuilder.loadTexts: hwSvcCtrlWord.setDescription('If signaling is used for PW establishment, this object indicates the status of the control word negotiation, and in both signaling or manual configuration indicates if CW is to be present or not for this PW.') hwSvcVCCV = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 14), Bits().clone(namedValues=NamedValues(("ccCw", 0), ("ccAlert", 1), ("ccLabel", 2), ("cvIcmpping", 3), ("cvLspping", 4), ("cvBfd", 5)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcVCCV.setStatus('current') if mibBuilder.loadTexts: hwSvcVCCV.setDescription('Indicates the optional VCCV capabilities of the SVC. According to whether the control word is enabled, the value can be ccCw(0)|ccAlert(1)|cvLspping(4)|cvBfd(5) or ccAlert(1)|cvLspping(4)|cvBfd(5). The default value is ccAlert(1)|cvLspping(4)|cvBfd(5).') hwSvcBandWidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 15), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 32000000))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcBandWidth.setStatus('current') if mibBuilder.loadTexts: hwSvcBandWidth.setDescription("This object indicates the bandwidth.Currently, can't support.'0' is the default value.") hwSvcMaxAtmCells = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 16), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 28))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcMaxAtmCells.setStatus('current') if mibBuilder.loadTexts: hwSvcMaxAtmCells.setDescription('Indicates the max cell supported when vc type is atm.') hwSvcTnlPolicyName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 17), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 39))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcTnlPolicyName.setStatus('current') if mibBuilder.loadTexts: hwSvcTnlPolicyName.setDescription('Indicates the tunnel policy name used.') hwSvcQoSBehaviorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 18), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcQoSBehaviorIndex.setStatus('current') if mibBuilder.loadTexts: hwSvcQoSBehaviorIndex.setDescription("Indicates the traffic behavior Index when QOS is implemented. Currently, can't support.'0' is the default value.") hwSvcPWTemplateName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 19), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 19))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcPWTemplateName.setStatus('current') if mibBuilder.loadTexts: hwSvcPWTemplateName.setDescription('Indicates the PW template index referenced.') hwSvcUpTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 20), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcUpTime.setStatus('current') if mibBuilder.loadTexts: hwSvcUpTime.setDescription('Indicates the duration when the SVC keeps Up for the last time, in seconds.') hwSvcOAMSync = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 21), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcOAMSync.setStatus('current') if mibBuilder.loadTexts: hwSvcOAMSync.setDescription('Denotes the AC and PSN are enable or not.') hwSvcForBfdIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 22), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcForBfdIndex.setStatus('current') if mibBuilder.loadTexts: hwSvcForBfdIndex.setDescription("The index of PW for BFD.Currently, can't support.Return the default value is '0'.") hwSvcSecondary = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 23), TruthValue()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcSecondary.setStatus('current') if mibBuilder.loadTexts: hwSvcSecondary.setDescription("Indicates whether or not the secondary PW is used.Currently, can't support.Return the default value is 'false'.") hwSvcDelayTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 24), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcDelayTime.setStatus('current') if mibBuilder.loadTexts: hwSvcDelayTime.setDescription("The reroute delay time.Currently, can't support.Return the default value is '0'.") hwSvcReroutePolicy = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("delay", 1), ("immediately", 2), ("never", 3), ("none", 4), ("err", 5), ("invalid", 6)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcReroutePolicy.setStatus('current') if mibBuilder.loadTexts: hwSvcReroutePolicy.setDescription("Reroute policy.Currently, can't support.Return the default value is 'invalid(6)'.") hwSvcResumeTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 26), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcResumeTime.setStatus('current') if mibBuilder.loadTexts: hwSvcResumeTime.setDescription("The reroute resume time.Currently, can't support.Return the default value is '0'.") hwSvcRerouteReason = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 27), HWL2VpnStateChangeReason()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcRerouteReason.setStatus('current') if mibBuilder.loadTexts: hwSvcRerouteReason.setDescription("Last reroute reason.Currently, can't support.Return the default value is 'invalidReason(1)'.") hwSvcLastRerouteTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 28), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcLastRerouteTime.setStatus('current') if mibBuilder.loadTexts: hwSvcLastRerouteTime.setDescription("Last reroute time.Currently, can't support.Return the default value is '0'.") hwSvcManualSetFault = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 29), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcManualSetFault.setStatus('current') if mibBuilder.loadTexts: hwSvcManualSetFault.setDescription("Denotes the manual has been set fault or not.Currently, can't support.Return the default value is 'false'.") hwSvcActive = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 30), TruthValue()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcActive.setStatus('current') if mibBuilder.loadTexts: hwSvcActive.setDescription("Denotes the current vc is active or not.Currently, can't support.Return the default value is 'false'.") hwSvcUpStartTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 31), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcUpStartTime.setStatus('current') if mibBuilder.loadTexts: hwSvcUpStartTime.setDescription('Specifies the time this PW status was Up(1).') hwSvcUpSumTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 32), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcUpSumTime.setStatus('current') if mibBuilder.loadTexts: hwSvcUpSumTime.setDescription('Indicates the accumulated time when the SVC is Up, in seconds.') hwSvcAtmPackOvertime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 33), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(100, 50000), ))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcAtmPackOvertime.setStatus('current') if mibBuilder.loadTexts: hwSvcAtmPackOvertime.setDescription('Specifies the AtmPackOvertime.') hwSvcPwJitterBufferDepth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 34), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 64))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcPwJitterBufferDepth.setStatus('current') if mibBuilder.loadTexts: hwSvcPwJitterBufferDepth.setDescription('Specifies the PwJitterBufferDepth.') hwSvcPwTdmEncapsulationNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 35), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 40))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcPwTdmEncapsulationNum.setStatus('current') if mibBuilder.loadTexts: hwSvcPwTdmEncapsulationNum.setDescription('Specifies the PwTdmEncapsulationNum.') hwSvcPwIdleCode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 36), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 255), ValueRangeConstraint(65535, 65535), ))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcPwIdleCode.setStatus('current') if mibBuilder.loadTexts: hwSvcPwIdleCode.setDescription('Specifies the PwIdleCode.') hwSvcPwRtpHeader = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 37), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcPwRtpHeader.setStatus('current') if mibBuilder.loadTexts: hwSvcPwRtpHeader.setDescription('Specifies the PwRtpHeader.') hwSvcRawOrTagged = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 38), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("raw", 1), ("tagged", 2), ("rawTagNotConfiged", 3)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcRawOrTagged.setStatus('current') if mibBuilder.loadTexts: hwSvcRawOrTagged.setDescription('Specifies whether the VLAN tag of the SVC entry is attached or stripped.') hwSvcInterworkingType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 39), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("ipInterWorking", 1), ("ipLayer2", 2), ("ipUnknown", 3)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcInterworkingType.setStatus('current') if mibBuilder.loadTexts: hwSvcInterworkingType.setDescription('Specifies the interworking type of the SVC entry.') hwSvcCir = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 40), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcCir.setStatus('current') if mibBuilder.loadTexts: hwSvcCir.setDescription('Specifies the committed information rate, based on the SVC entry.') hwSvcPir = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 41), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcPir.setStatus('current') if mibBuilder.loadTexts: hwSvcPir.setDescription('Specifies the peak information rate, based on the SVC entry.') hwSvcQosProfile = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 42), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 31))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcQosProfile.setStatus('current') if mibBuilder.loadTexts: hwSvcQosProfile.setDescription("Specifies the QoS profile's name, based on the SVC entry.") hwSvcRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 1, 1, 51), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwSvcRowStatus.setStatus('current') if mibBuilder.loadTexts: hwSvcRowStatus.setDescription("RowStatus for this Table. Restriction: The row must be created by 'createAndGo' handle only. Handle 'createAndWait' is forbidden. Not support modifying configuration.") hwSvcTnlTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 2), ) if mibBuilder.loadTexts: hwSvcTnlTable.setStatus('current') if mibBuilder.loadTexts: hwSvcTnlTable.setDescription('This table is used to search the tunnel index of a SVC.') hwSvcTnlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 2, 1), ).setIndexNames((0, "HUAWEI-PWE3-MIB", "hwSvcIfIndex"), (0, "HUAWEI-PWE3-MIB", "hwSvcTnlIndex")) if mibBuilder.loadTexts: hwSvcTnlEntry.setStatus('current') if mibBuilder.loadTexts: hwSvcTnlEntry.setDescription('Provides the information of a SVC tunnel entry.') hwSvcTnlIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 2, 1, 1), Unsigned32()) if mibBuilder.loadTexts: hwSvcTnlIndex.setStatus('current') if mibBuilder.loadTexts: hwSvcTnlIndex.setDescription('This object indicates the tunnel index of the SVC.') hwSvcTnlType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("lsp", 1), ("gre", 2), ("ipsec", 3), ("crLsp", 4)))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcTnlType.setStatus('current') if mibBuilder.loadTexts: hwSvcTnlType.setDescription('This object indicates the tunnel type.') hwSvcTnlForBfdIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 2, 1, 3), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcTnlForBfdIndex.setStatus('current') if mibBuilder.loadTexts: hwSvcTnlForBfdIndex.setDescription("This object indicates the index of LSP for BFD. Currently, can't support.Return the default value is '0'.") hwSvcStatisticsTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 3), ) if mibBuilder.loadTexts: hwSvcStatisticsTable.setStatus('current') if mibBuilder.loadTexts: hwSvcStatisticsTable.setDescription("This table contains the L2vpn's SVC packets statistics.") hwSvcStatisticsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 3, 1), ).setIndexNames((0, "HUAWEI-PWE3-MIB", "hwSvcIfIndex")) if mibBuilder.loadTexts: hwSvcStatisticsEntry.setStatus('current') if mibBuilder.loadTexts: hwSvcStatisticsEntry.setDescription("Provides the information of the L2VPN's SVC packets Statistics.") hwSvcStatisticsRcvPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 3, 1, 1), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcStatisticsRcvPkts.setStatus('current') if mibBuilder.loadTexts: hwSvcStatisticsRcvPkts.setDescription('The total number of packets received on this SVC.') hwSvcStatisticsRcvBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 3, 1, 2), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcStatisticsRcvBytes.setStatus('current') if mibBuilder.loadTexts: hwSvcStatisticsRcvBytes.setDescription('The total number of bytes received on this SVC.') hwSvcStatisticsSndPkts = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 3, 1, 3), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcStatisticsSndPkts.setStatus('current') if mibBuilder.loadTexts: hwSvcStatisticsSndPkts.setDescription('The total number of packets sent on this SVC.') hwSvcStatisticsSndBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 3, 1, 4), Counter64()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwSvcStatisticsSndBytes.setStatus('current') if mibBuilder.loadTexts: hwSvcStatisticsSndBytes.setDescription('The total number of bytes sent on the SVC.') hwSvcSwitchNotifEnable = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 4), HWEnableValue()).setMaxAccess("readwrite") if mibBuilder.loadTexts: hwSvcSwitchNotifEnable.setStatus('current') if mibBuilder.loadTexts: hwSvcSwitchNotifEnable.setDescription("If this object is set to enable(1), then it enables the emission of hwSvcSwitchWtoP and hwSvcSwitchPtoW notifications; otherwise these notifications are not emitted.Currently, can't support. The default value is disable (2).") hwSvcUpDownNotifEnable = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 5), HWEnableValue()).setMaxAccess("readwrite") if mibBuilder.loadTexts: hwSvcUpDownNotifEnable.setStatus('current') if mibBuilder.loadTexts: hwSvcUpDownNotifEnable.setDescription('This object indicates the enable sign of PW VC state change notification. The default value is disable (2).') hwSvcDeletedNotifEnable = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 6), HWEnableValue()).setMaxAccess("readwrite") if mibBuilder.loadTexts: hwSvcDeletedNotifEnable.setStatus('current') if mibBuilder.loadTexts: hwSvcDeletedNotifEnable.setDescription('This object indicates the enable sign of PW VC deletion notification. The default value is disable (2).') hwSvcStateChangeReason = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 3, 7), HWL2VpnStateChangeReason()).setMaxAccess("accessiblefornotify") if mibBuilder.loadTexts: hwSvcStateChangeReason.setStatus('current') if mibBuilder.loadTexts: hwSvcStateChangeReason.setDescription('This object indicates the reason of PE VC state change.') hwL2vpnSvcMIBTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 4)) hwSvcSwitchWtoP = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 4, 1)).setObjects(("HUAWEI-PWE3-MIB", "hwSvcID"), ("HUAWEI-PWE3-MIB", "hwSvcType"), ("HUAWEI-PWE3-MIB", "hwSvcCtrlWord"), ("HUAWEI-PWE3-MIB", "hwSvcStateChangeReason"), ("IF-MIB", "ifName")) if mibBuilder.loadTexts: hwSvcSwitchWtoP.setStatus('current') if mibBuilder.loadTexts: hwSvcSwitchWtoP.setDescription("This notification is generated when switch from working PW to protect PW happens.Currently, can't support.") hwSvcSwitchPtoW = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 4, 2)).setObjects(("HUAWEI-PWE3-MIB", "hwSvcID"), ("HUAWEI-PWE3-MIB", "hwSvcType"), ("HUAWEI-PWE3-MIB", "hwSvcCtrlWord"), ("HUAWEI-PWE3-MIB", "hwSvcStateChangeReason"), ("IF-MIB", "ifName")) if mibBuilder.loadTexts: hwSvcSwitchPtoW.setStatus('current') if mibBuilder.loadTexts: hwSvcSwitchPtoW.setDescription("This notification is generated when switch from protect PW to working PW happens.Currently, can't support.") hwSvcDown = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 4, 3)).setObjects(("HUAWEI-PWE3-MIB", "hwSvcID"), ("HUAWEI-PWE3-MIB", "hwSvcType"), ("HUAWEI-PWE3-MIB", "hwSvcPeerAddr"), ("HUAWEI-PWE3-MIB", "hwSvcInboundLabel"), ("HUAWEI-PWE3-MIB", "hwSvcOutboundLabel"), ("HUAWEI-PWE3-MIB", "hwSvcStateChangeReason"), ("IF-MIB", "ifName"), ("HUAWEI-PWE3-MIB", "hwSvcTnlPolicyName")) if mibBuilder.loadTexts: hwSvcDown.setStatus('current') if mibBuilder.loadTexts: hwSvcDown.setDescription("This notification indicates the SVC's state changes to down.") hwSvcUp = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 4, 4)).setObjects(("HUAWEI-PWE3-MIB", "hwSvcID"), ("HUAWEI-PWE3-MIB", "hwSvcType"), ("HUAWEI-PWE3-MIB", "hwSvcPeerAddr"), ("HUAWEI-PWE3-MIB", "hwSvcInboundLabel"), ("HUAWEI-PWE3-MIB", "hwSvcOutboundLabel"), ("HUAWEI-PWE3-MIB", "hwSvcStateChangeReason"), ("IF-MIB", "ifName"), ("HUAWEI-PWE3-MIB", "hwSvcTnlPolicyName")) if mibBuilder.loadTexts: hwSvcUp.setStatus('current') if mibBuilder.loadTexts: hwSvcUp.setDescription("This notification indicates the SVC's state changes to up.") hwSvcDeleted = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 4, 5)).setObjects(("HUAWEI-PWE3-MIB", "hwSvcID"), ("HUAWEI-PWE3-MIB", "hwSvcType"), ("HUAWEI-PWE3-MIB", "hwSvcPeerAddr"), ("HUAWEI-PWE3-MIB", "hwSvcInboundLabel"), ("HUAWEI-PWE3-MIB", "hwSvcOutboundLabel")) if mibBuilder.loadTexts: hwSvcDeleted.setStatus('current') if mibBuilder.loadTexts: hwSvcDeleted.setDescription('This notification indicates the SVC is deleted.') hwPWTemplateTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5), ) if mibBuilder.loadTexts: hwPWTemplateTable.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateTable.setDescription('This table specifies information for configuring and status monitoring to PW tempalte.') hwPWTemplateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1), ).setIndexNames((0, "HUAWEI-PWE3-MIB", "hwPWTemplateName")) if mibBuilder.loadTexts: hwPWTemplateEntry.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateEntry.setDescription('A row in this table represents a pseudo wire (PW) template. It is indexed by hwPWCmdTemplateIndex, which uniquely identifying a singular tempalte.') hwPWTemplateName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 19))) if mibBuilder.loadTexts: hwPWTemplateName.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateName.setDescription("The name of the PW template. Set by the operator to indicate the protocol responsible for establishing this PW. The value 'static' is used in all cases where no maintenance protocol (PW signaling) is used to set-up the PW, i.e. require configuration of entries in the PW tables including PW labels, etc. The value 'ldp' is used in case of signaling with the PWid FEC element with LDP signaling. The value 'rsvp' indicate the use of rsvp control protocol.") hwPWTemplatePeerAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 2), InetAddressType().clone('ipv4')).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplatePeerAddrType.setStatus('current') if mibBuilder.loadTexts: hwPWTemplatePeerAddrType.setDescription("Denotes the address type of the peer node. It should be set to 'unknown' if PE/PW maintenance protocol is not used and the address is unknown. Currently, support 'ipv4' only.") hwPWTemplatePeerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 3), IpAddress()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplatePeerAddr.setStatus('current') if mibBuilder.loadTexts: hwPWTemplatePeerAddr.setDescription('This object contain the value of the peer node address of the PW/PE maintenance protocol entity. ') hwPWTemplateCtrlword = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 4), HWEnableValue()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplateCtrlword.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateCtrlword.setDescription('Indicates the control word capability of the switch PW.') hwPWTemplateVCCV = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 5), Bits().clone(namedValues=NamedValues(("ccCw", 0), ("ccAlert", 1), ("ccLabel", 2), ("cvIcmpping", 3), ("cvLspping", 4), ("cvBfd", 5), ("ccTtl", 6)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplateVCCV.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateVCCV.setDescription('Indicates the optional VCCV capabilities of the PW template. According to whether the control word is enabled, the value can be ccCw(0)|ccAlert(1)|ccTtl(6)|cvLspping(4)|cvBfd(5) or ccAlert(1)|ccTtl(6)|cvLspping(4)|cvBfd(5). The default value is ccAlert(1)|ccTtl(6)|cvLspping(4)|cvBfd(5).') hwPWTemplateFrag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 6), TruthValue()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplateFrag.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateFrag.setDescription('Indicates whether or not fragmentaion is supported.') hwPWTemplateBandwidth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 32000000))).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWTemplateBandwidth.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateBandwidth.setDescription("Indicates the bandwitdh when signaling protocol is rsvp. Currently, can't support.'0' is the default value.") hwPWTemplateTnlPolicyName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 39))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplateTnlPolicyName.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateTnlPolicyName.setDescription('Indicates the tunnel policy name used.') hwPWTemplateQoSBehaviorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 9), Integer32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplateQoSBehaviorIndex.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateQoSBehaviorIndex.setDescription("Indicates the traffic behavior Index when QOS is implemented.Currently, can't support.'0' is the default value.") hwPWTemplateExplicitPathName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 10), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 31))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplateExplicitPathName.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateExplicitPathName.setDescription("Indicates the explicit path name set by the operator.Currently, can't support.") hwPWTemplateBFDDetectMultiplier = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 11), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(3, 50), ))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplateBFDDetectMultiplier.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateBFDDetectMultiplier.setDescription('The multiple of detection time.') hwPWTemplateBFDMinReceiveInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 12), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(3, 1000), ))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplateBFDMinReceiveInterval.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateBFDMinReceiveInterval.setDescription('The interval of bfd messages to be received.') hwPWTemplateBFDMinTransmitInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 13), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(3, 1000), ))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplateBFDMinTransmitInterval.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateBFDMinTransmitInterval.setDescription('The interval of bfd messages to be sent.') hwPWTemplateDynamicBFDDetect = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 14), TruthValue()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplateDynamicBFDDetect.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateDynamicBFDDetect.setDescription('This value indicates the capacitability to support dynamic BFD detect.') hwPWTemplateMaxAtmCells = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 15), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 28))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplateMaxAtmCells.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateMaxAtmCells.setDescription('Specifies the MaxAtmCells.') hwPWTemplateAtmPackOvertime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 16), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 0), ValueRangeConstraint(100, 50000), ))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplateAtmPackOvertime.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateAtmPackOvertime.setDescription('Specifies the AtmPackOvertime.') hwPWTemplatePwJitterBufferDepth = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 17), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 64))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplatePwJitterBufferDepth.setStatus('current') if mibBuilder.loadTexts: hwPWTemplatePwJitterBufferDepth.setDescription('Specifies the PwJitterBufferDepth.') hwPWTemplatePwTdmEncapsulationNum = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 18), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 40))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplatePwTdmEncapsulationNum.setStatus('current') if mibBuilder.loadTexts: hwPWTemplatePwTdmEncapsulationNum.setDescription('Specifies the PwTdmEncapsulationNum.') hwPWTemplatePwIdleCode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 19), Unsigned32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(0, 255), ValueRangeConstraint(65535, 65535), ))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplatePwIdleCode.setStatus('current') if mibBuilder.loadTexts: hwPWTemplatePwIdleCode.setDescription('Specifies the PwIdleCode.') hwPWTemplatePwRtpHeader = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 20), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplatePwRtpHeader.setStatus('current') if mibBuilder.loadTexts: hwPWTemplatePwRtpHeader.setDescription('Specifies the PwRtpHeader.') hwPWTemplatePwCCSeqEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 21), HWEnableValue()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplatePwCCSeqEnable.setStatus('current') if mibBuilder.loadTexts: hwPWTemplatePwCCSeqEnable.setDescription('Specifies the CC Sequence is enable or not.') hwPWTemplateCir = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 22), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplateCir.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateCir.setDescription('Specifies the committed information rate, based on the PW template entry.') hwPWTemplatePir = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 23), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplatePir.setStatus('current') if mibBuilder.loadTexts: hwPWTemplatePir.setDescription('Specifies the peak information rate, based on the PW template entry.') hwPWTemplateQosProfile = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 24), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 31))).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplateQosProfile.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateQosProfile.setDescription("Specifies the QoS profile's name, based on the PW template entry.") hwPWTemplateFlowLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 25), EnabledStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplateFlowLabel.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateFlowLabel.setDescription('The value of this object identifies whether the PW FlowLabel is enabled.') hwPWTemplateRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 5, 1, 51), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: hwPWTemplateRowStatus.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateRowStatus.setDescription("RowStatus for this Table. Restriction: The row must be created by 'createAndGo' handle only. Handle 'createAndWait' is forbidden.") hwPWTemplateMIBTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 6)) hwPWTemplateCannotDeleted = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 6, 1)).setObjects(("HUAWEI-PWE3-MIB", "hwPWTemplateName")) if mibBuilder.loadTexts: hwPWTemplateCannotDeleted.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateCannotDeleted.setDescription('This notification indicates the PWTemplate cannot be deleted.') hwPWTableObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 7)) hwPWTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 7, 1), ) if mibBuilder.loadTexts: hwPWTable.setStatus('current') if mibBuilder.loadTexts: hwPWTable.setDescription('This table indicates a PW, that is Static PW or LDP PW') hwPWEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 7, 1, 1), ).setIndexNames((0, "HUAWEI-PWE3-MIB", "hwPWId"), (0, "HUAWEI-PWE3-MIB", "hwPWType"), (0, "HUAWEI-PWE3-MIB", "hwPWPeerIp")) if mibBuilder.loadTexts: hwPWEntry.setStatus('current') if mibBuilder.loadTexts: hwPWEntry.setDescription('Provides the information of a VC key entry.') hwPWId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 7, 1, 1, 1), Unsigned32()) if mibBuilder.loadTexts: hwPWId.setStatus('current') if mibBuilder.loadTexts: hwPWId.setDescription("Index for the conceptual row identifying a PW within this PW Emulation table.Used in the outgoing PW ID field within the 'Virtual Circuit FEC Element'.") hwPWType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 7, 1, 1, 2), HWL2VpnVcEncapsType()) if mibBuilder.loadTexts: hwPWType.setStatus('current') if mibBuilder.loadTexts: hwPWType.setDescription('Index for the conceptual row identifying a PW within this PW Emulation table.This value indicate the service to be carried over this PW.') hwPWPeerIp = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 7, 1, 1, 3), IpAddress()) if mibBuilder.loadTexts: hwPWPeerIp.setStatus('current') if mibBuilder.loadTexts: hwPWPeerIp.setDescription('This object contain the value of the peer node address of the PW/PE maintenance protocol entity. This object SHOULD contain a value of all zeroes if not applicable.') hwPWInterfaceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 1, 7, 1, 1, 4), InterfaceIndexOrZero()).setMaxAccess("readonly") if mibBuilder.loadTexts: hwPWInterfaceIndex.setStatus('current') if mibBuilder.loadTexts: hwPWInterfaceIndex.setDescription('Index of the interface (or the virtual interface) associated with the PW.') hwPwe3MIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3)) hwPwe3MIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 1)) hwPwe3MIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 1, 1)).setObjects(("HUAWEI-PWE3-MIB", "hwPWVcGroup"), ("HUAWEI-PWE3-MIB", "hwPWVcTnlGroup"), ("HUAWEI-PWE3-MIB", "hwPWVcStatisticsGroup"), ("HUAWEI-PWE3-MIB", "hwPWRemoteVcGroup"), ("HUAWEI-PWE3-MIB", "hwPWTemplateGroup"), ("HUAWEI-PWE3-MIB", "hwPWNotificationControlGroup"), ("HUAWEI-PWE3-MIB", "hwPWVcStateChangeReasonGroup"), ("HUAWEI-PWE3-MIB", "hwPWVcNotificationGroup"), ("HUAWEI-PWE3-MIB", "hwPWTableGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwPwe3MIBCompliance = hwPwe3MIBCompliance.setStatus('current') if mibBuilder.loadTexts: hwPwe3MIBCompliance.setDescription('The compliance statement for systems supporting the HUAWEI-PWE3-MIB.') hwPwe3MIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 2)) hwPWVcGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 2, 1)).setObjects(("HUAWEI-PWE3-MIB", "hwPWVcPeerAddrType"), ("HUAWEI-PWE3-MIB", "hwPWVcPeerAddr"), ("HUAWEI-PWE3-MIB", "hwPWVcStatus"), ("HUAWEI-PWE3-MIB", "hwPWVcInboundLabel"), ("HUAWEI-PWE3-MIB", "hwPWVcOutboundLabel"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchSign"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchID"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchPeerAddrType"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchPeerAddr"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchInboundLabel"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchOutboundLabel"), ("HUAWEI-PWE3-MIB", "hwPWVcGroupID"), ("HUAWEI-PWE3-MIB", "hwPWVcIfIndex"), ("HUAWEI-PWE3-MIB", "hwPWVcAcStatus"), ("HUAWEI-PWE3-MIB", "hwPWVcACOAMStatus"), ("HUAWEI-PWE3-MIB", "hwPWVcMtu"), ("HUAWEI-PWE3-MIB", "hwPWVcCtrlWord"), ("HUAWEI-PWE3-MIB", "hwPWVcVCCV"), ("HUAWEI-PWE3-MIB", "hwPWVcBandWidth"), ("HUAWEI-PWE3-MIB", "hwPWVcMaxAtmCells"), ("HUAWEI-PWE3-MIB", "hwPWVcTnlPolicyName"), ("HUAWEI-PWE3-MIB", "hwPWVcQoSBehaviorIndex"), ("HUAWEI-PWE3-MIB", "hwPWVcExplicitPathName"), ("HUAWEI-PWE3-MIB", "hwPWVcTemplateName"), ("HUAWEI-PWE3-MIB", "hwPWVcSecondary"), ("HUAWEI-PWE3-MIB", "hwPWVcUpTime"), ("HUAWEI-PWE3-MIB", "hwPWOAMSync"), ("HUAWEI-PWE3-MIB", "hwPWVCForBfdIndex"), ("HUAWEI-PWE3-MIB", "hwPWVcDelayTime"), ("HUAWEI-PWE3-MIB", "hwPWVcReroutePolicy"), ("HUAWEI-PWE3-MIB", "hwPWVcResumeTime"), ("HUAWEI-PWE3-MIB", "hwPWVcRerouteReason"), ("HUAWEI-PWE3-MIB", "hwPWVcLastRerouteTime"), ("HUAWEI-PWE3-MIB", "hwPWVcManualSetFault"), ("HUAWEI-PWE3-MIB", "hwPWVcActive"), ("HUAWEI-PWE3-MIB", "hwPWVcVrIfIndex"), ("HUAWEI-PWE3-MIB", "hwPWVcVrID"), ("HUAWEI-PWE3-MIB", "hwPWBFDDetectMultiplier"), ("HUAWEI-PWE3-MIB", "hwPWBFDMinReceiveInterval"), ("HUAWEI-PWE3-MIB", "hwPWBFDMinTransmitInterval"), ("HUAWEI-PWE3-MIB", "hwPWDynamicBFDDetect"), ("HUAWEI-PWE3-MIB", "hwPWBFDRemoteVcID"), ("HUAWEI-PWE3-MIB", "hwPWEthOamType"), ("HUAWEI-PWE3-MIB", "hwPWCfmMaIndex"), ("HUAWEI-PWE3-MIB", "hwPWVcUpStartTime"), ("HUAWEI-PWE3-MIB", "hwPWVcUpSumTime"), ("HUAWEI-PWE3-MIB", "hwPWVcIfName"), ("HUAWEI-PWE3-MIB", "hwPWVcRowStatus"), ("HUAWEI-PWE3-MIB", "hwPWVcAtmPackOvertime"), ("HUAWEI-PWE3-MIB", "hwPWVcPwJitterBufferDepth"), ("HUAWEI-PWE3-MIB", "hwPWVcPwTdmEncapsulationNum"), ("HUAWEI-PWE3-MIB", "hwPWVcPwIdleCode"), ("HUAWEI-PWE3-MIB", "hwPWVcPwRtpHeader"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchTnlPolicyName"), ("HUAWEI-PWE3-MIB", "hwPWVcCfmMdIndex"), ("HUAWEI-PWE3-MIB", "hwPWVcCfmMaName"), ("HUAWEI-PWE3-MIB", "hwPWVcCfmMdName"), ("HUAWEI-PWE3-MIB", "hwPWVcRawOrTagged"), ("HUAWEI-PWE3-MIB", "hwPWVcInterworkingType"), ("HUAWEI-PWE3-MIB", "hwPWVcCir"), ("HUAWEI-PWE3-MIB", "hwPWVcPir"), ("HUAWEI-PWE3-MIB", "hwPWVcQosProfile"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchCir"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchPir"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchQosProfile"), ("HUAWEI-PWE3-MIB", "hwPWVcTrigger"), ("HUAWEI-PWE3-MIB", "hwPWVcEnableACOAM"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchVrIfIndex"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchVrID"), ("HUAWEI-PWE3-MIB", "hwPWVcQosParaFromPWT"), ("HUAWEI-PWE3-MIB", "hwPWVcBfdParaFromPWT"), ("HUAWEI-PWE3-MIB", "hwPwVcNegotiateMode"), ("HUAWEI-PWE3-MIB", "hwPwVcIsBypass"), ("HUAWEI-PWE3-MIB", "hwPwVcIsAdmin"), ("HUAWEI-PWE3-MIB", "hwPwVcAdminPwIfIndex"), ("HUAWEI-PWE3-MIB", "hwPwVcAdminPwLinkStatus"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchAdminPwIfIndex"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchAdminPwLinkStatus"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchBackupAdminPwIfIndex"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchBackupAdminPwLinkStatus"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchBackupVcId"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchBackupVcPeerAddrType"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchBackupVcPeerAddr"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchBackupVcReceiveLabel"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchBackupVcSendLabel"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchBackupVcTnlPolicyName"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchBackupVcCir"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchBackupVcPir"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchBackupVcQosProfile"), ("HUAWEI-PWE3-MIB", "hwPwVcSlaveMasterMode"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchVcSlaveMasterMode"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchBackupVcSlaveMasterMode"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchVcActive"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchBackupVcActive"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchCwTrans"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchVcServiceName"), ("HUAWEI-PWE3-MIB", "hwPwVcSwitchBackupVcServiceName")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwPWVcGroup = hwPWVcGroup.setStatus('current') if mibBuilder.loadTexts: hwPWVcGroup.setDescription("The Pwe3's VC group.") hwPWVcTnlGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 2, 2)).setObjects(("HUAWEI-PWE3-MIB", "hwPWVcTnlType"), ("HUAWEI-PWE3-MIB", "hwPWTnlForBfdIndex")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwPWVcTnlGroup = hwPWVcTnlGroup.setStatus('current') if mibBuilder.loadTexts: hwPWVcTnlGroup.setDescription("The PWE3's VC Tunnel group.") hwPWVcStatisticsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 2, 3)).setObjects(("HUAWEI-PWE3-MIB", "hwPWVcStatisticsRcvPkts"), ("HUAWEI-PWE3-MIB", "hwPWVcStatisticsRcvBytes"), ("HUAWEI-PWE3-MIB", "hwPWVcStatisticsSndPkts"), ("HUAWEI-PWE3-MIB", "hwPWVcStatisticsSndBytes")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwPWVcStatisticsGroup = hwPWVcStatisticsGroup.setStatus('current') if mibBuilder.loadTexts: hwPWVcStatisticsGroup.setDescription("The PWE3's VC Statistics group.") hwPWRemoteVcGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 2, 4)).setObjects(("HUAWEI-PWE3-MIB", "hwPWRemoteVcID"), ("HUAWEI-PWE3-MIB", "hwPWRemoteVcType"), ("HUAWEI-PWE3-MIB", "hwPWRemoteVcStatus"), ("HUAWEI-PWE3-MIB", "hwPWRemoteVcGroupID"), ("HUAWEI-PWE3-MIB", "hwPWRemoteVcMtu"), ("HUAWEI-PWE3-MIB", "hwPWRemoteVcCtrlword"), ("HUAWEI-PWE3-MIB", "hwPWRemoteVcMaxAtmCells"), ("HUAWEI-PWE3-MIB", "hwPWRemoteVcNotif")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwPWRemoteVcGroup = hwPWRemoteVcGroup.setStatus('current') if mibBuilder.loadTexts: hwPWRemoteVcGroup.setDescription("The PWE3's Remote VC group.") hwPWTemplateGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 2, 5)).setObjects(("HUAWEI-PWE3-MIB", "hwPWTemplatePeerAddrType"), ("HUAWEI-PWE3-MIB", "hwPWTemplatePeerAddr"), ("HUAWEI-PWE3-MIB", "hwPWTemplateCtrlword"), ("HUAWEI-PWE3-MIB", "hwPWTemplateVCCV"), ("HUAWEI-PWE3-MIB", "hwPWTemplateFrag"), ("HUAWEI-PWE3-MIB", "hwPWTemplateBandwidth"), ("HUAWEI-PWE3-MIB", "hwPWTemplateTnlPolicyName"), ("HUAWEI-PWE3-MIB", "hwPWTemplateQoSBehaviorIndex"), ("HUAWEI-PWE3-MIB", "hwPWTemplateExplicitPathName"), ("HUAWEI-PWE3-MIB", "hwPWTemplateBFDDetectMultiplier"), ("HUAWEI-PWE3-MIB", "hwPWTemplateBFDMinReceiveInterval"), ("HUAWEI-PWE3-MIB", "hwPWTemplateBFDMinTransmitInterval"), ("HUAWEI-PWE3-MIB", "hwPWTemplateDynamicBFDDetect"), ("HUAWEI-PWE3-MIB", "hwPWTemplateMaxAtmCells"), ("HUAWEI-PWE3-MIB", "hwPWTemplateAtmPackOvertime"), ("HUAWEI-PWE3-MIB", "hwPWTemplatePwJitterBufferDepth"), ("HUAWEI-PWE3-MIB", "hwPWTemplatePwTdmEncapsulationNum"), ("HUAWEI-PWE3-MIB", "hwPWTemplatePwIdleCode"), ("HUAWEI-PWE3-MIB", "hwPWTemplatePwRtpHeader"), ("HUAWEI-PWE3-MIB", "hwPWTemplatePwCCSeqEnable"), ("HUAWEI-PWE3-MIB", "hwPWTemplateCir"), ("HUAWEI-PWE3-MIB", "hwPWTemplatePir"), ("HUAWEI-PWE3-MIB", "hwPWTemplateQosProfile"), ("HUAWEI-PWE3-MIB", "hwPWTemplateFlowLabel"), ("HUAWEI-PWE3-MIB", "hwPWTemplateRowStatus")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwPWTemplateGroup = hwPWTemplateGroup.setStatus('current') if mibBuilder.loadTexts: hwPWTemplateGroup.setDescription("The PWE3's Template group.") hwPWNotificationControlGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 2, 6)).setObjects(("HUAWEI-PWE3-MIB", "hwPWVcSwitchNotifEnable"), ("HUAWEI-PWE3-MIB", "hwPWVcUpDownNotifEnable"), ("HUAWEI-PWE3-MIB", "hwPWVcDeletedNotifEnable")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwPWNotificationControlGroup = hwPWNotificationControlGroup.setStatus('current') if mibBuilder.loadTexts: hwPWNotificationControlGroup.setDescription("The PWE3's Notification Control group.") hwPWVcStateChangeReasonGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 2, 7)).setObjects(("HUAWEI-PWE3-MIB", "hwPWVcStateChangeReason"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchRmtID")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwPWVcStateChangeReasonGroup = hwPWVcStateChangeReasonGroup.setStatus('current') if mibBuilder.loadTexts: hwPWVcStateChangeReasonGroup.setDescription("The PWE3's Vc State Reason group.") hwPWVcNotificationGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 2, 8)).setObjects(("HUAWEI-PWE3-MIB", "hwPWVcSwitchWtoP"), ("HUAWEI-PWE3-MIB", "hwPWVcSwitchPtoW"), ("HUAWEI-PWE3-MIB", "hwPWVcDown"), ("HUAWEI-PWE3-MIB", "hwPWVcUp"), ("HUAWEI-PWE3-MIB", "hwPWVcDeleted"), ("HUAWEI-PWE3-MIB", "hwPWVcBackup"), ("HUAWEI-PWE3-MIB", "hwLdpPWVcDown"), ("HUAWEI-PWE3-MIB", "hwLdpPWVcUp")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwPWVcNotificationGroup = hwPWVcNotificationGroup.setStatus('current') if mibBuilder.loadTexts: hwPWVcNotificationGroup.setDescription("The PWE3's VC Notification group.") hwLdpPWStateChangeReasonGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 2, 9)).setObjects(("HUAWEI-PWE3-MIB", "hwLdpPWStateChangeReason")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwLdpPWStateChangeReasonGroup = hwLdpPWStateChangeReasonGroup.setStatus('current') if mibBuilder.loadTexts: hwLdpPWStateChangeReasonGroup.setDescription('The LDP PW VC State Reason group.') hwPWVcTDMPerfCurrentGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 2, 10)).setObjects(("HUAWEI-PWE3-MIB", "hwPWVcTDMPerfCurrentMissingPkts"), ("HUAWEI-PWE3-MIB", "hwPWVcTDMPerfCurrentJtrBfrOverruns"), ("HUAWEI-PWE3-MIB", "hwPWVcTDMPerfCurrentJtrBfrUnderruns"), ("HUAWEI-PWE3-MIB", "hwPWVcTDMPerfCurrentMisOrderDropped"), ("HUAWEI-PWE3-MIB", "hwPWVcTDMPerfCurrentMalformedPkt"), ("HUAWEI-PWE3-MIB", "hwPWVcTDMPerfCurrentESs"), ("HUAWEI-PWE3-MIB", "hwPWVcTDMPerfCurrentSESs"), ("HUAWEI-PWE3-MIB", "hwPWVcTDMPerfCurrentUASs")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwPWVcTDMPerfCurrentGroup = hwPWVcTDMPerfCurrentGroup.setStatus('current') if mibBuilder.loadTexts: hwPWVcTDMPerfCurrentGroup.setDescription("The PWE3's VC TDM performance information group.") hwL2vpnSvcMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 3)) hwSvcGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 3, 1)).setObjects(("HUAWEI-PWE3-MIB", "hwSvcID"), ("HUAWEI-PWE3-MIB", "hwSvcType"), ("HUAWEI-PWE3-MIB", "hwSvcPeerAddrType"), ("HUAWEI-PWE3-MIB", "hwSvcPeerAddr"), ("HUAWEI-PWE3-MIB", "hwSvcStatus"), ("HUAWEI-PWE3-MIB", "hwSvcInboundLabel"), ("HUAWEI-PWE3-MIB", "hwSvcOutboundLabel"), ("HUAWEI-PWE3-MIB", "hwSvcGroupID"), ("HUAWEI-PWE3-MIB", "hwSvcAcStatus"), ("HUAWEI-PWE3-MIB", "hwSvcACOAMStatus"), ("HUAWEI-PWE3-MIB", "hwSvcMtu"), ("HUAWEI-PWE3-MIB", "hwSvcCtrlWord"), ("HUAWEI-PWE3-MIB", "hwSvcVCCV"), ("HUAWEI-PWE3-MIB", "hwSvcBandWidth"), ("HUAWEI-PWE3-MIB", "hwSvcMaxAtmCells"), ("HUAWEI-PWE3-MIB", "hwSvcTnlPolicyName"), ("HUAWEI-PWE3-MIB", "hwSvcQoSBehaviorIndex"), ("HUAWEI-PWE3-MIB", "hwSvcPWTemplateName"), ("HUAWEI-PWE3-MIB", "hwSvcUpTime"), ("HUAWEI-PWE3-MIB", "hwSvcOAMSync"), ("HUAWEI-PWE3-MIB", "hwSvcForBfdIndex"), ("HUAWEI-PWE3-MIB", "hwSvcSecondary"), ("HUAWEI-PWE3-MIB", "hwSvcDelayTime"), ("HUAWEI-PWE3-MIB", "hwSvcReroutePolicy"), ("HUAWEI-PWE3-MIB", "hwSvcResumeTime"), ("HUAWEI-PWE3-MIB", "hwSvcRerouteReason"), ("HUAWEI-PWE3-MIB", "hwSvcLastRerouteTime"), ("HUAWEI-PWE3-MIB", "hwSvcManualSetFault"), ("HUAWEI-PWE3-MIB", "hwSvcActive"), ("HUAWEI-PWE3-MIB", "hwSvcUpStartTime"), ("HUAWEI-PWE3-MIB", "hwSvcUpSumTime"), ("HUAWEI-PWE3-MIB", "hwSvcAtmPackOvertime"), ("HUAWEI-PWE3-MIB", "hwSvcPwJitterBufferDepth"), ("HUAWEI-PWE3-MIB", "hwSvcPwTdmEncapsulationNum"), ("HUAWEI-PWE3-MIB", "hwSvcPwIdleCode"), ("HUAWEI-PWE3-MIB", "hwSvcPwRtpHeader"), ("HUAWEI-PWE3-MIB", "hwSvcRawOrTagged"), ("HUAWEI-PWE3-MIB", "hwSvcInterworkingType"), ("HUAWEI-PWE3-MIB", "hwSvcCir"), ("HUAWEI-PWE3-MIB", "hwSvcPir"), ("HUAWEI-PWE3-MIB", "hwSvcQosProfile"), ("HUAWEI-PWE3-MIB", "hwSvcRowStatus")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwSvcGroup = hwSvcGroup.setStatus('current') if mibBuilder.loadTexts: hwSvcGroup.setDescription("The L2vpn's SVC group.") hwSvcTnlGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 3, 2)).setObjects(("HUAWEI-PWE3-MIB", "hwSvcTnlType"), ("HUAWEI-PWE3-MIB", "hwSvcTnlForBfdIndex")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwSvcTnlGroup = hwSvcTnlGroup.setStatus('current') if mibBuilder.loadTexts: hwSvcTnlGroup.setDescription("The L2vpn's SVC Tunnel group.") hwSvcStatisticsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 3, 3)).setObjects(("HUAWEI-PWE3-MIB", "hwSvcStatisticsRcvPkts"), ("HUAWEI-PWE3-MIB", "hwSvcStatisticsRcvBytes"), ("HUAWEI-PWE3-MIB", "hwSvcStatisticsSndPkts"), ("HUAWEI-PWE3-MIB", "hwSvcStatisticsSndBytes")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwSvcStatisticsGroup = hwSvcStatisticsGroup.setStatus('current') if mibBuilder.loadTexts: hwSvcStatisticsGroup.setDescription("The L2vpn's SVC Statistics group.") hwSvcNotificationControlGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 3, 4)).setObjects(("HUAWEI-PWE3-MIB", "hwSvcSwitchNotifEnable"), ("HUAWEI-PWE3-MIB", "hwSvcUpDownNotifEnable"), ("HUAWEI-PWE3-MIB", "hwSvcDeletedNotifEnable")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwSvcNotificationControlGroup = hwSvcNotificationControlGroup.setStatus('current') if mibBuilder.loadTexts: hwSvcNotificationControlGroup.setDescription("The L2vpn SVC's Notification Control group.") hwSvcStateChangeReasonGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 3, 5)).setObjects(("HUAWEI-PWE3-MIB", "hwSvcStateChangeReason")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwSvcStateChangeReasonGroup = hwSvcStateChangeReasonGroup.setStatus('current') if mibBuilder.loadTexts: hwSvcStateChangeReasonGroup.setDescription("The L2vpn's SVc State Reason group.") hwSvcNotificationGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 3, 6)).setObjects(("HUAWEI-PWE3-MIB", "hwSvcSwitchWtoP"), ("HUAWEI-PWE3-MIB", "hwSvcSwitchPtoW"), ("HUAWEI-PWE3-MIB", "hwSvcDown"), ("HUAWEI-PWE3-MIB", "hwSvcUp"), ("HUAWEI-PWE3-MIB", "hwSvcDeleted")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwSvcNotificationGroup = hwSvcNotificationGroup.setStatus('current') if mibBuilder.loadTexts: hwSvcNotificationGroup.setDescription("The L2vpn's SVC Notification group.") hwL2vpnPWTableMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 4)) hwPWTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 119, 4, 3, 4, 1)).setObjects(("HUAWEI-PWE3-MIB", "hwPWInterfaceIndex")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): hwPWTableGroup = hwPWTableGroup.setStatus('current') if mibBuilder.loadTexts: hwPWTableGroup.setDescription('The PW Table Group.') mibBuilder.exportSymbols("HUAWEI-PWE3-MIB", hwPWTemplateVCCV=hwPWTemplateVCCV, hwPWVcVCCV=hwPWVcVCCV, hwPWVcUp=hwPWVcUp, hwL2VpnPwe3=hwL2VpnPwe3, hwSvcInboundLabel=hwSvcInboundLabel, hwPwVcSwitchAdminPwIfIndex=hwPwVcSwitchAdminPwIfIndex, hwPWVcQosProfile=hwPWVcQosProfile, hwSvcInterworkingType=hwSvcInterworkingType, hwSvcTnlEntry=hwSvcTnlEntry, hwPWEntry=hwPWEntry, HWLdpPwStateChangeReason=HWLdpPwStateChangeReason, hwPWVcTDMPerfCurrentMissingPkts=hwPWVcTDMPerfCurrentMissingPkts, hwPWVcBackup=hwPWVcBackup, hwSvcStateChangeReason=hwSvcStateChangeReason, hwPWTemplatePwTdmEncapsulationNum=hwPWTemplatePwTdmEncapsulationNum, hwPWVcActive=hwPWVcActive, hwPWVcExplicitPathName=hwPWVcExplicitPathName, hwPWVcIfName=hwPWVcIfName, hwPWTemplateAtmPackOvertime=hwPWTemplateAtmPackOvertime, hwPWTemplatePwCCSeqEnable=hwPWTemplatePwCCSeqEnable, hwSvcEntry=hwSvcEntry, hwPWTemplateTnlPolicyName=hwPWTemplateTnlPolicyName, hwPWVCForBfdIndex=hwPWVCForBfdIndex, hwPWVcTnlEntry=hwPWVcTnlEntry, hwSvcStatisticsTable=hwSvcStatisticsTable, hwPWTemplateBFDDetectMultiplier=hwPWTemplateBFDDetectMultiplier, hwSvcStatisticsRcvBytes=hwSvcStatisticsRcvBytes, hwPWVcRerouteReason=hwPWVcRerouteReason, hwSvcSwitchWtoP=hwSvcSwitchWtoP, hwPWVcQoSBehaviorIndex=hwPWVcQoSBehaviorIndex, hwSvcTnlType=hwSvcTnlType, hwL2vpnSvcMIBTraps=hwL2vpnSvcMIBTraps, hwPWVcUpStartTime=hwPWVcUpStartTime, hwPwVcSwitchVcActive=hwPwVcSwitchVcActive, hwSvcOAMSync=hwSvcOAMSync, hwSvcLastRerouteTime=hwSvcLastRerouteTime, hwPWVcCfmMdIndex=hwPWVcCfmMdIndex, hwPWVcPwIdleCode=hwPWVcPwIdleCode, hwPWVcSwitchVrID=hwPWVcSwitchVrID, hwPWTemplatePwJitterBufferDepth=hwPWTemplatePwJitterBufferDepth, hwPWVcPwTdmEncapsulationNum=hwPWVcPwTdmEncapsulationNum, hwLdpPWVcUp=hwLdpPWVcUp, hwPWVcGroup=hwPWVcGroup, hwPWRemoteVcMtu=hwPWRemoteVcMtu, hwSvcStatisticsSndBytes=hwSvcStatisticsSndBytes, hwSvcTnlIndex=hwSvcTnlIndex, hwPwVcSwitchBackupVcActive=hwPwVcSwitchBackupVcActive, hwPWRemoteVcGroup=hwPWRemoteVcGroup, hwSvcGroup=hwSvcGroup, hwPWVcAcStatus=hwPWVcAcStatus, hwPWRemoteVcCtrlword=hwPWRemoteVcCtrlword, hwPWVcUpDownNotifEnable=hwPWVcUpDownNotifEnable, hwSvcNotificationGroup=hwSvcNotificationGroup, hwPWVcType=hwPWVcType, hwPWVcSwitchInboundLabel=hwPWVcSwitchInboundLabel, hwPWVcStatisticsRcvPkts=hwPWVcStatisticsRcvPkts, hwSvcStatus=hwSvcStatus, hwSvcDeletedNotifEnable=hwSvcDeletedNotifEnable, hwSvcStatisticsSndPkts=hwSvcStatisticsSndPkts, hwPWBFDDetectMultiplier=hwPWBFDDetectMultiplier, hwPWVcCfmMdName=hwPWVcCfmMdName, hwPWVcManualSetFault=hwPWVcManualSetFault, hwPwVcSlaveMasterMode=hwPwVcSlaveMasterMode, hwPWTemplateFrag=hwPWTemplateFrag, hwPWVcTDMPerfCurrentMalformedPkt=hwPWVcTDMPerfCurrentMalformedPkt, hwSvcRowStatus=hwSvcRowStatus, hwSvcNotificationControlGroup=hwSvcNotificationControlGroup, hwPwVcSwitchBackupVcCir=hwPwVcSwitchBackupVcCir, hwPWRemoteVcTable=hwPWRemoteVcTable, hwSvcAcStatus=hwSvcAcStatus, hwPWVcTDMPerfCurrentUASs=hwPWVcTDMPerfCurrentUASs, PYSNMP_MODULE_ID=hwL2VpnPwe3, hwSvcStatisticsEntry=hwSvcStatisticsEntry, hwPWVcSwitchID=hwPWVcSwitchID, hwSvcPWTemplateName=hwSvcPWTemplateName, hwPWVcSwitchPir=hwPWVcSwitchPir, hwPwVcSwitchCwTrans=hwPwVcSwitchCwTrans, hwPWBFDMinReceiveInterval=hwPWBFDMinReceiveInterval, hwPWVcSwitchPeerAddrType=hwPWVcSwitchPeerAddrType, hwPWVcSwitchTnlPolicyName=hwPWVcSwitchTnlPolicyName, hwPWVcPir=hwPWVcPir, hwPwVcAdminPwIfIndex=hwPwVcAdminPwIfIndex, hwSvcQoSBehaviorIndex=hwSvcQoSBehaviorIndex, hwPWVcEntry=hwPWVcEntry, hwPwVcIsBypass=hwPwVcIsBypass, hwPWVcStatus=hwPWVcStatus, hwPWVcReroutePolicy=hwPWVcReroutePolicy, hwPWVcMtu=hwPWVcMtu, hwPWVcLastRerouteTime=hwPWVcLastRerouteTime, hwPwVcSwitchBackupVcPeerAddrType=hwPwVcSwitchBackupVcPeerAddrType, hwSvcCtrlWord=hwSvcCtrlWord, hwSvcUpSumTime=hwSvcUpSumTime, hwSvcPwTdmEncapsulationNum=hwSvcPwTdmEncapsulationNum, hwSvcRawOrTagged=hwSvcRawOrTagged, hwPWTemplateExplicitPathName=hwPWTemplateExplicitPathName, hwPWVcNotificationGroup=hwPWVcNotificationGroup, hwPWTemplatePir=hwPWTemplatePir, hwPWTemplateMaxAtmCells=hwPWTemplateMaxAtmCells, hwPWRemoteVcMaxAtmCells=hwPWRemoteVcMaxAtmCells, hwPwVcAdminPwLinkStatus=hwPwVcAdminPwLinkStatus, hwPWTemplatePeerAddrType=hwPWTemplatePeerAddrType, hwPWInterfaceIndex=hwPWInterfaceIndex, hwPWVcPeerAddrType=hwPWVcPeerAddrType, hwPWVcVrIfIndex=hwPWVcVrIfIndex, hwPWTemplateDynamicBFDDetect=hwPWTemplateDynamicBFDDetect, hwPWVcEnableACOAM=hwPWVcEnableACOAM, hwPwVcSwitchBackupVcReceiveLabel=hwPwVcSwitchBackupVcReceiveLabel, hwPWTemplateBandwidth=hwPWTemplateBandwidth, hwPWVcUpTime=hwPWVcUpTime, hwPwVcSwitchBackupAdminPwLinkStatus=hwPwVcSwitchBackupAdminPwLinkStatus, hwPWVcStatisticsTable=hwPWVcStatisticsTable, hwPWVcTnlIndex=hwPWVcTnlIndex, hwPWVcStatisticsSndPkts=hwPWVcStatisticsSndPkts, hwPWVcCtrlWord=hwPWVcCtrlWord, hwLdpPWStateChangeReasonGroup=hwLdpPWStateChangeReasonGroup, hwPWTemplateQosProfile=hwPWTemplateQosProfile, hwPwVcSwitchBackupVcId=hwPwVcSwitchBackupVcId, hwSvcActive=hwSvcActive, hwPWVcTDMPerfCurrentEntry=hwPWVcTDMPerfCurrentEntry, hwSvcPwRtpHeader=hwSvcPwRtpHeader, hwPwe3MIBObjects=hwPwe3MIBObjects, hwSvcUpDownNotifEnable=hwSvcUpDownNotifEnable, hwPWTemplateGroup=hwPWTemplateGroup, hwSvcReroutePolicy=hwSvcReroutePolicy, hwSvcPwJitterBufferDepth=hwSvcPwJitterBufferDepth, hwLdpPWStateChangeReason=hwLdpPWStateChangeReason, hwPWTemplatePwIdleCode=hwPWTemplatePwIdleCode, hwPWVcID=hwPWVcID, hwPWVcTnlPolicyName=hwPWVcTnlPolicyName, hwPWVcTDMPerfCurrentESs=hwPWVcTDMPerfCurrentESs, hwPWTemplateBFDMinTransmitInterval=hwPWTemplateBFDMinTransmitInterval, hwSvcStateChangeReasonGroup=hwSvcStateChangeReasonGroup, hwPwVcSwitchBackupVcSendLabel=hwPwVcSwitchBackupVcSendLabel, hwPWTnlForBfdIndex=hwPWTnlForBfdIndex, hwSvcBandWidth=hwSvcBandWidth, hwPWVcTDMPerfCurrentMisOrderDropped=hwPWVcTDMPerfCurrentMisOrderDropped, hwPWTemplateName=hwPWTemplateName, hwPWVcCir=hwPWVcCir, hwPWTableGroup=hwPWTableGroup, hwSvcTnlTable=hwSvcTnlTable, hwPWVcOutboundLabel=hwPWVcOutboundLabel, hwPWTableObjects=hwPWTableObjects, hwPWRemoteVcGroupID=hwPWRemoteVcGroupID, hwPWVcStateChangeReason=hwPWVcStateChangeReason, hwSvcQosProfile=hwSvcQosProfile, hwSvcUpTime=hwSvcUpTime, hwSvcPwIdleCode=hwSvcPwIdleCode, hwPWVcSwitchRmtID=hwPWVcSwitchRmtID, hwPWVcInboundLabel=hwPWVcInboundLabel, hwPWVcBfdParaFromPWT=hwPWVcBfdParaFromPWT, hwPWVcSwitchNotifEnable=hwPWVcSwitchNotifEnable, hwSvcDeleted=hwSvcDeleted, hwSvcRerouteReason=hwSvcRerouteReason, hwPWVcQosParaFromPWT=hwPWVcQosParaFromPWT, hwPWRemoteVcType=hwPWRemoteVcType, hwPWVcRowStatus=hwPWVcRowStatus, hwPWVcRawOrTagged=hwPWVcRawOrTagged, hwPWTemplateMIBTraps=hwPWTemplateMIBTraps, hwPWVcACOAMStatus=hwPWVcACOAMStatus, hwPWBFDMinTransmitInterval=hwPWBFDMinTransmitInterval, hwPWVcSwitchQosProfile=hwPWVcSwitchQosProfile, hwPWVcSwitchVrIfIndex=hwPWVcSwitchVrIfIndex, hwPWType=hwPWType, hwPwVcSwitchBackupVcPir=hwPwVcSwitchBackupVcPir, hwPWVcSwitchSign=hwPWVcSwitchSign, hwPwVcSwitchBackupVcQosProfile=hwPwVcSwitchBackupVcQosProfile, hwPWTemplateFlowLabel=hwPWTemplateFlowLabel, hwPWVcTDMPerfCurrentJtrBfrOverruns=hwPWVcTDMPerfCurrentJtrBfrOverruns, hwPWVcGroupID=hwPWVcGroupID, hwPWVcUpSumTime=hwPWVcUpSumTime, hwPwe3MIBTraps=hwPwe3MIBTraps, hwPWVcTDMPerfCurrentTable=hwPWVcTDMPerfCurrentTable, hwSvcResumeTime=hwSvcResumeTime, hwPwVcSwitchBackupAdminPwIfIndex=hwPwVcSwitchBackupAdminPwIfIndex, hwPWTemplateRowStatus=hwPWTemplateRowStatus, hwPwe3MIBGroups=hwPwe3MIBGroups, hwSvcType=hwSvcType, hwSvcVCCV=hwSvcVCCV, hwPWVcStatisticsSndBytes=hwPWVcStatisticsSndBytes, hwSvcDelayTime=hwSvcDelayTime, hwSvcPir=hwSvcPir, hwPWVcPwRtpHeader=hwPWVcPwRtpHeader, hwPWRemoteVcNotif=hwPWRemoteVcNotif, hwPWTemplateQoSBehaviorIndex=hwPWTemplateQoSBehaviorIndex, hwL2vpnPWTableMIBGroups=hwL2vpnPWTableMIBGroups, hwPWVcTable=hwPWVcTable, hwPwVcSwitchAdminPwLinkStatus=hwPwVcSwitchAdminPwLinkStatus, hwPWRemoteVcEntry=hwPWRemoteVcEntry, hwLdpPWVcDown=hwLdpPWVcDown, hwSvcAtmPackOvertime=hwSvcAtmPackOvertime, hwPwe3MIBCompliance=hwPwe3MIBCompliance, hwPWVcPwJitterBufferDepth=hwPWVcPwJitterBufferDepth, hwPWVcSwitchCir=hwPWVcSwitchCir, hwPwVcSwitchVcServiceName=hwPwVcSwitchVcServiceName, hwPWTemplateBFDMinReceiveInterval=hwPWTemplateBFDMinReceiveInterval, hwPWVcDelayTime=hwPWVcDelayTime, hwSvcStatisticsGroup=hwSvcStatisticsGroup, hwPWVcTnlGroup=hwPWVcTnlGroup, hwSvcUp=hwSvcUp, hwSvcPeerAddrType=hwSvcPeerAddrType, hwPWVcStatisticsGroup=hwPWVcStatisticsGroup, hwPwe3MIBCompliances=hwPwe3MIBCompliances, hwL2Vpn=hwL2Vpn, hwPWVcInterworkingType=hwPWVcInterworkingType, hwPwe3Objects=hwPwe3Objects, hwPWVcSwitchPeerAddr=hwPWVcSwitchPeerAddr, hwPwVcIsAdmin=hwPwVcIsAdmin, hwPWOAMSync=hwPWOAMSync, hwPWVcTnlTable=hwPWVcTnlTable, hwSvcOutboundLabel=hwSvcOutboundLabel, hwPWTemplatePeerAddr=hwPWTemplatePeerAddr, hwPWVcMaxAtmCells=hwPWVcMaxAtmCells, hwPWVcCfmMaName=hwPWVcCfmMaName, hwPwVcSwitchBackupVcServiceName=hwPwVcSwitchBackupVcServiceName, hwPWVcStatisticsRcvBytes=hwPWVcStatisticsRcvBytes, hwSvcMtu=hwSvcMtu, hwSvcTnlForBfdIndex=hwSvcTnlForBfdIndex, hwPWVcTDMPerfCurrentGroup=hwPWVcTDMPerfCurrentGroup, hwPWBFDRemoteVcID=hwPWBFDRemoteVcID, hwPWVcTrigger=hwPWVcTrigger, hwSvcSwitchPtoW=hwSvcSwitchPtoW, hwPwVcSwitchBackupVcSlaveMasterMode=hwPwVcSwitchBackupVcSlaveMasterMode, hwPwe3MIBConformance=hwPwe3MIBConformance, hwSvcID=hwSvcID, hwPWTemplatePwRtpHeader=hwPWTemplatePwRtpHeader, hwSvcMaxAtmCells=hwSvcMaxAtmCells, hwSvcIfIndex=hwSvcIfIndex, hwPWEthOamType=hwPWEthOamType, hwPWVcIfIndex=hwPWVcIfIndex, hwSvcTnlPolicyName=hwSvcTnlPolicyName, hwPWTemplateCir=hwPWTemplateCir, hwPWVcStatisticsEntry=hwPWVcStatisticsEntry, hwPWVcDeleted=hwPWVcDeleted, hwPWRemoteVcStatus=hwPWRemoteVcStatus, hwPWVcTnlType=hwPWVcTnlType, hwSvcTnlGroup=hwSvcTnlGroup, hwPWVcResumeTime=hwPWVcResumeTime, hwPWVcTemplateName=hwPWVcTemplateName, hwPWVcSwitchOutboundLabel=hwPWVcSwitchOutboundLabel, hwSvcPeerAddr=hwSvcPeerAddr, hwPWId=hwPWId, hwPwVcSwitchVcSlaveMasterMode=hwPwVcSwitchVcSlaveMasterMode, hwPWCfmMaIndex=hwPWCfmMaIndex, hwPWTemplateCannotDeleted=hwPWTemplateCannotDeleted, hwPWVcSwitchPtoW=hwPWVcSwitchPtoW, hwPWVcAtmPackOvertime=hwPWVcAtmPackOvertime, hwPWVcSwitchWtoP=hwPWVcSwitchWtoP, hwPWTemplateCtrlword=hwPWTemplateCtrlword, hwPWVcTDMPerfCurrentJtrBfrUnderruns=hwPWVcTDMPerfCurrentJtrBfrUnderruns, hwSvcGroupID=hwSvcGroupID, hwPWRemoteVcID=hwPWRemoteVcID, hwPWVcDeletedNotifEnable=hwPWVcDeletedNotifEnable, hwPWVcBandWidth=hwPWVcBandWidth, hwPwVcNegotiateMode=hwPwVcNegotiateMode) mibBuilder.exportSymbols("HUAWEI-PWE3-MIB", hwSvcSwitchNotifEnable=hwSvcSwitchNotifEnable, hwSvcStatisticsRcvPkts=hwSvcStatisticsRcvPkts, hwPWVcDown=hwPWVcDown, hwPWVcTDMPerfCurrentSESs=hwPWVcTDMPerfCurrentSESs, hwPWVcStateChangeReasonGroup=hwPWVcStateChangeReasonGroup, hwPWVcPeerAddr=hwPWVcPeerAddr, hwPWVcVrID=hwPWVcVrID, hwPWVcSecondary=hwPWVcSecondary, hwPwVcSwitchBackupVcPeerAddr=hwPwVcSwitchBackupVcPeerAddr, hwPWNotificationControlGroup=hwPWNotificationControlGroup, hwSvcManualSetFault=hwSvcManualSetFault, hwSvcObjects=hwSvcObjects, hwSvcACOAMStatus=hwSvcACOAMStatus, hwSvcUpStartTime=hwSvcUpStartTime, hwPwVcSwitchBackupVcTnlPolicyName=hwPwVcSwitchBackupVcTnlPolicyName, hwPWTable=hwPWTable, hwSvcTable=hwSvcTable, hwPWTemplateTable=hwPWTemplateTable, hwSvcSecondary=hwSvcSecondary, hwPWPeerIp=hwPWPeerIp, hwL2vpnSvcMIBGroups=hwL2vpnSvcMIBGroups, hwSvcForBfdIndex=hwSvcForBfdIndex, hwSvcCir=hwSvcCir, hwPWDynamicBFDDetect=hwPWDynamicBFDDetect, hwSvcDown=hwSvcDown, hwPWTemplateEntry=hwPWTemplateEntry)
31e9cd4c46e9ca7dcf3f0bccfa1cf4e7f7aa7945
27aaadf435779c29012233cb1dacf27bd9dd0d0f
/imagesearch-20201214/setup.py
158893f64912b1bb6ea8f0b27a99576256e3835a
[ "Apache-2.0" ]
permissive
aliyun/alibabacloud-python-sdk
afadedb09db5ba6c2bc6b046732b2a6dc215f004
e02f34e07a7f05e898a492c212598a348d903739
refs/heads/master
2023-08-22T20:26:44.695288
2023-08-22T12:27:39
2023-08-22T12:27:39
288,972,087
43
29
null
2022-09-26T09:21:19
2020-08-20T10:08:11
Python
UTF-8
Python
false
false
2,842
py
# -*- coding: utf-8 -*- """ Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os from setuptools import setup, find_packages """ setup module for alibabacloud_imagesearch20201214. Created on 16/12/2022 @author: Alibaba Cloud SDK """ PACKAGE = "alibabacloud_imagesearch20201214" NAME = "alibabacloud_imagesearch20201214" or "alibabacloud-package" DESCRIPTION = "Alibaba Cloud image search (20201214) SDK Library for Python" AUTHOR = "Alibaba Cloud SDK" AUTHOR_EMAIL = "[email protected]" URL = "https://github.com/aliyun/alibabacloud-python-sdk" VERSION = __import__(PACKAGE).__version__ REQUIRES = [ "alibabacloud_tea_util>=0.3.8, <1.0.0", "alibabacloud_oss_sdk>=0.1.0, <1.0.0", "alibabacloud_openplatform20191219>=2.0.0, <3.0.0", "alibabacloud_oss_util>=0.0.5, <1.0.0", "alibabacloud_tea_fileform>=0.0.3, <1.0.0", "alibabacloud_tea_openapi>=0.3.6, <1.0.0", "alibabacloud_openapi_util>=0.2.0, <1.0.0", "alibabacloud_endpoint_util>=0.0.3, <1.0.0" ] LONG_DESCRIPTION = '' if os.path.exists('./README.md'): with open("README.md", encoding='utf-8') as fp: LONG_DESCRIPTION = fp.read() setup( name=NAME, version=VERSION, description=DESCRIPTION, long_description=LONG_DESCRIPTION, long_description_content_type='text/markdown', author=AUTHOR, author_email=AUTHOR_EMAIL, license="Apache License 2.0", url=URL, keywords=["alibabacloud","imagesearch20201214"], packages=find_packages(exclude=["tests*"]), include_package_data=True, platforms="any", install_requires=REQUIRES, python_requires=">=3.6", classifiers=( "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', "Topic :: Software Development" ) )
2cd72276ffa96c0292769e9e855768fde32ae8b2
f28b2ec517ac93526ac66b0b4a4fccc8614739d0
/HRC_arlac_monitor/Save/zinfo.py
7b806e9f132141ae3ab8e3b52d11d788f490533d
[]
no_license
tisobe/HRC
8b8f7137e0804d7478da17d0796c0f25c19c04e6
aaba4e5512b14c92b8d413dd173e9e944041fa4d
refs/heads/master
2020-03-17T14:43:00.972966
2018-05-16T15:28:29
2018-05-16T15:28:29
133,683,862
0
0
null
null
null
null
UTF-8
Python
false
false
6,944
py
#!/usr/bin/env /proj/sot/ska/bin/python ############################################################################################################# # # # hrc_gain_find_ar_lac.py: find new AL Lac observations and put in a list # # # # author: t. isobe ([email protected]) # # # # Last Update: Sep 24, 2014 # # # ############################################################################################################# import os import sys import re import string import random import operator import numpy import unittest from astropy.table import Table from Ska.DBI import DBI # #--- reading directory list # path = '/data/mta/Script/HRC/Gain/house_keeping/dir_list_py' f = open(path, 'r') data = [line.strip() for line in f.readlines()] f.close() for ent in data: atemp = re.split(':', ent) var = atemp[1].strip() line = atemp[0].strip() exec "%s = %s" %(var, line) # #--- append a path to a private folder to python directory # sys.path.append(bin_dir) sys.path.append(mta_dir) # #--- converTimeFormat contains MTA time conversion routines # import convertTimeFormat as tcnv import mta_common_functions as mcf # #--- sql realated settings # db_user = 'browser' db_server = 'ocatsqlsrv' file = bdata_dir + '/.targpass' db_passwd = mcf.get_val(file) #--------------------------------------------------------------------------------------------------- #-- hrc_gain_find_ar_lac: find new AL Lac observations and put in a list -- #--------------------------------------------------------------------------------------------------- def hrc_gain_find_ar_lac(): """ find new AL Lac observations and put in a list Input: none, but the data will be read from mp_reports and also hrc_obsid_list in <house_keeping> Output: "./candidate_list" which lists obsids of new AR Lac observations candidate_list it also returns the same list """ hrc_list = hrc_gain_find_hrc_obs() candidate_list = hrc_gain_test_obs(hrc_list) return candidate_list #--------------------------------------------------------------------------------------------------- #-- hrc_gain_find_ar_lac: find new AL Lac observations and put in a list -- #--------------------------------------------------------------------------------------------------- def hrc_gain_find_hrc_obs(): """ select out the current hrc observations and create test candidate list Input: none, but the data will be read from mp_reports and also hrc_obsid_list in <house_keeping> Output: new_obs --- recently observed HRC obsid list """ # #--- read obsid list of AR Lac we already checked # file = house_keeping + '/hrc_obsid_list' f = open(file, 'r') obsid_list = [line.strip() for line in f.readlines()] f.close() # #--- find HRC events from a recent mp_reports # page = '/data/mta_www/mp_reports/events/mta_events.html' f = open(page, 'r') data = [line.strip() for line in f.readlines()] f.close() data_list = [] for ent in data: m1 = re.search('HRC', ent) m2 = re.search('Obsid', ent) if (m1 is not None) and (m2 is not None): atemp = re.split('\/', ent) data_list.append(atemp[1]) # #--- select out obsids which we have not checked before # new_obs = [] for ent in data_list: chk = 0 for comp in obsid_list: if ent == comp: chk = 1 continue if chk > 0: continue new_obs.append(ent) return new_obs #--------------------------------------------------------------------------------------------------- #-- hrc_gain_test_obs: find new AL Lac observations from a hrc obsid list ---- #--------------------------------------------------------------------------------------------------- def hrc_gain_test_obs(new_obs, test=''): """ find new AL Lac observations from a hrc obsid list Input: new_obs --- a list of hrc obsids test --- a test indicator. if it is other than "", test will run Output: "./candidate_list" which lists obsids of new AR Lac observations candidate_list it also returns the same list """ if test == "": f1 = open('./candidate_list', 'w') file = house_keeping + 'hrc_obsid_list' file2 = house_keeping + 'hrc_obsid_list~' cmd = 'cp -f ' + file + ' ' + file2 os.system(cmd) f2 = open(file, 'a') candidate_list = [] for obsid in new_obs: # #--- open sql database and extract data we need # db = DBI(dbi='sybase', server=db_server, user=db_user, passwd=db_passwd, database='axafocat') cmd = 'select obsid,targid,seq_nbr,targname,grating,instrument from target where obsid=' + obsid query_results = db.fetchall(cmd) if len(query_results): query_results = Table(query_results) line = query_results['targname'].data targname = line[0] # #--- if the observation is AR Lac, write it down in candidate_list # m1 = re.search('arlac', targname.lower()) if m1 is not None: line = obsid + '\n' candidate_list.append(obsid) if test == '': f1.write(line) f2.write(line) if test == '': f1.close() f2.close() return candidate_list #----------------------------------------------------------------------------------------- #-- TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST TEST --- #----------------------------------------------------------------------------------------- class TestFunctions(unittest.TestCase): """ testing functions """ def test_hrc_gain_test_obs(self): page = house_keeping + '/Test_prep/candidate' f = open(page, 'r') data_list = [line.strip() for line in f.readlines()] f.close() test_candidates = ['14313', '14314', '14315', '14316'] candidates = hrc_gain_test_obs(data_list, test='test') self.assertEquals(candidates, test_candidates) #-------------------------------------------------------------------- if __name__ == '__main__': unittest.main()
a5df9c3956ce741cb2eb6a9c0155d07349363a1d
7cb626363bbce2f66c09e509e562ff3d371c10c6
/multimodel_inference/py3_v1/olds/sc3emlsm.py
dd484cd9338ae7f71742e4f7d8a632754eab4a2b
[ "LicenseRef-scancode-warranty-disclaimer" ]
no_license
z0on/AFS-analysis-with-moments
76bfd6b0361ab7e9173144dbd21b6fa2c7bf1795
eea4735b3b6fbe31c4e396da3d798387884a1500
refs/heads/master
2023-07-31T20:49:20.865161
2023-07-19T06:57:32
2023-07-19T06:57:32
96,915,117
4
5
null
2020-09-02T17:39:08
2017-07-11T16:38:03
Python
UTF-8
Python
false
false
3,249
py
#!/usr/bin/env python # split, three epochs in each pop, asymmetric migration at same rates in all epochs # n(para): 11 import matplotlib matplotlib.use('PDF') import moments import pylab import random import matplotlib.pyplot as plt import numpy as np from numpy import array from moments import Misc,Spectrum,Numerics,Manips,Integration,Demographics1D,Demographics2D import sys infile=sys.argv[1] pop_ids=[sys.argv[2],sys.argv[3]] projections=[int(sys.argv[4]),int(sys.argv[5])] if len(sys.argv)==9: params = np.loadtxt(sys.argv[8], delimiter=" ", unpack=False) else: params=[1,1,1,1,1,1,1,1,1,1,0.01] # mutation rate per sequenced portion of genome per generation: for A.millepora, 0.02 mu=float(sys.argv[6]) # generation time, in thousand years: 0.005 (5 years) gtime=float(sys.argv[7]) # set Polarized=False below for folded AFS analysis fs = moments.Spectrum.from_file(infile) data=fs.project(projections) ns=data.sample_sizes np.set_printoptions(precision=3) #------------------- # split into unequal pop sizes with asymmetrical migration def sc3ei(params , ns): # p_misid: proportion of misidentified ancestral states nu1_1, nu2_1, nu1_2,nu2_2,nu1_3,nu2_3,T1, T2, T3,m, p_misid = params sts = moments.LinearSystem_1D.steady_state_1D(ns[0] + ns[1]) fs = moments.Spectrum(sts) fs = moments.Manips.split_1D_to_2D(fs, ns[0], ns[1]) fs.integrate([nu1_1, nu2_1], T1, m = np.array([[0, m], [m, 0]])) fs.integrate([nu1_2, nu2_2], T2, m = np.array([[0, m], [m, 0]])) fs.integrate([nu1_3, nu2_3], T3, m = np.array([[0, m], [m, 0]])) return (1-p_misid)*fs + p_misid*moments.Numerics.reverse_array(fs) func=sc3ei upper_bound = [100, 100, 100,100,100, 100, 100, 100,100, 200,0.25] lower_bound = [1e-3,1e-3, 1e-3,1e-3,1e-3,1e-3,1e-3,1e-3,1e-3,1e-5,1e-5] params = moments.Misc.perturb_params(params, fold=2, upper_bound=upper_bound, lower_bound=lower_bound) poptg = moments.Inference.optimize_log(params, data, func, lower_bound=lower_bound, upper_bound=upper_bound, verbose=False, maxiter=30) # extracting model predictions, likelihood and theta model = func(poptg, ns) ll_model = moments.Inference.ll_multinom(model, data) theta = moments.Inference.optimal_sfs_scaling(model, data) # random index for this replicate ind=str(random.randint(0,999999)) # plotting demographic model plot_mod = moments.ModelPlot.generate_model(func, poptg, ns) moments.ModelPlot.plot_model(plot_mod, save_file="sc3emlsm_"+ind+".png", pop_labels=pop_ids, nref=theta/(4*mu), draw_scale=False, gen_time=gtime, gen_time_units="KY", reverse_timeline=True) # bootstrapping for SDs of params and theta # printing parameters and their SDs print( "RESULT","sc3emlsm",ind,len(params),ll_model,sys.argv[1],sys.argv[2],sys.argv[3],poptg,theta) # plotting quad-panel figure witt AFS, model, residuals: moments.Plotting.plot_2d_comp_multinom(model, data, vmin=0.1, resid_range=3, pop_ids =pop_ids) plt.savefig("sc3emlsm_"+ind+"_"+sys.argv[1]+"_"+sys.argv[2]+"_"+sys.argv[3]+"_"+sys.argv[4]+"_"+sys.argv[5]+'.pdf')
e8d46842c93e593d1abf393ca6aab62c76269e13
9f35bea3c50668a4205c04373da95195e20e5427
/third_party/blink/renderer/bindings/scripts/bind_gen/path_manager.py
84b01c38d33b1cf2e4c6f677ce0cd1442c1f52c8
[ "LGPL-2.0-or-later", "LicenseRef-scancode-warranty-disclaimer", "LGPL-2.1-only", "GPL-1.0-or-later", "GPL-2.0-only", "LGPL-2.0-only", "BSD-2-Clause", "LicenseRef-scancode-other-copyleft", "MIT", "Apache-2.0", "BSD-3-Clause" ]
permissive
foolcodemonkey/chromium
5958fb37df91f92235fa8cf2a6e4a834c88f44aa
c155654fdaeda578cebc218d47f036debd4d634f
refs/heads/master
2023-02-21T00:56:13.446660
2020-01-07T05:12:51
2020-01-07T05:12:51
232,250,603
1
0
BSD-3-Clause
2020-01-07T05:38:18
2020-01-07T05:38:18
null
UTF-8
Python
false
false
5,524
py
# Copyright 2019 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import posixpath import web_idl from . import name_style from .blink_v8_bridge import blink_class_name class PathManager(object): """ Provides a variety of paths such as Blink headers and output files. Unless explicitly specified, returned paths are relative to the project's root directory or the root directory of generated files. e.g. "third_party/blink/renderer/..." About output files, there are two cases. - cross-components case: APIs are generated in 'core' and implementations are generated in 'modules'. - single component case: Everything is generated in a single component. """ _REQUIRE_INIT_MESSAGE = ("PathManager.init must be called in advance.") _is_initialized = False @classmethod def init(cls, root_src_dir, root_gen_dir, component_reldirs): """ Args: root_src_dir: Project's root directory, which corresponds to "//" in GN. root_gen_dir: Root directory of generated files, which corresponds to "//out/Default/gen" in GN. component_reldirs: Pairs of component and output directory relative to |root_gen_dir|. """ assert not cls._is_initialized assert isinstance(root_src_dir, str) assert isinstance(root_gen_dir, str) assert isinstance(component_reldirs, dict) cls._blink_path_prefix = posixpath.sep + posixpath.join( "third_party", "blink", "renderer", "") cls._root_src_dir = posixpath.abspath(root_src_dir) cls._root_gen_dir = posixpath.abspath(root_gen_dir) cls._component_reldirs = { component: posixpath.normpath(rel_dir) for component, rel_dir in component_reldirs.iteritems() } cls._is_initialized = True @staticmethod def gen_path_to(path): """ Returns the absolute path of |path| that must be relative to the root directory of generated files. """ assert PathManager._is_initialized, PathManager._REQUIRE_INIT_MESSAGE return posixpath.abspath( posixpath.join(PathManager._root_gen_dir, path)) @classmethod def relpath_to_project_root(cls, path): index = path.find(cls._blink_path_prefix) if index < 0: assert path.startswith(cls._blink_path_prefix[1:]) return path return path[index + 1:] def __init__(self, idl_definition): assert self._is_initialized, self._REQUIRE_INIT_MESSAGE idl_path = PathManager.relpath_to_project_root( posixpath.normpath(idl_definition.debug_info.location.filepath)) idl_basepath, _ = posixpath.splitext(idl_path) self._idl_dir, self._idl_basename = posixpath.split(idl_basepath) components = sorted(idl_definition.components) # "core" < "modules" if len(components) == 1: component = components[0] self._is_cross_components = False self._api_component = component self._impl_component = component elif len(components) == 2: assert components[0] == "core" assert components[1] == "modules" self._is_cross_components = True self._api_component = components[0] self._impl_component = components[1] else: assert False self._api_dir = self._component_reldirs[self._api_component] self._impl_dir = self._component_reldirs[self._impl_component] self._v8_bind_basename = name_style.file("v8", idl_definition.identifier) self._blink_dir = self._idl_dir self._blink_basename = name_style.file( blink_class_name(idl_definition)) @property def idl_dir(self): return self._idl_dir def blink_path(self, filename=None, ext=None): """ Returns a path to a Blink implementation file relative to the project root directory, e.g. "third_party/blink/renderer/..." """ return self._join( dirpath=self._blink_dir, filename=(filename or self._blink_basename), ext=ext) @property def is_cross_components(self): return self._is_cross_components @property def api_component(self): return self._api_component @property def api_dir(self): return self._api_dir def api_path(self, filename=None, ext=None): return self._join( dirpath=self.api_dir, filename=(filename or self._v8_bind_basename), ext=ext) @property def impl_component(self): return self._impl_component @property def impl_dir(self): return self._impl_dir def impl_path(self, filename=None, ext=None): return self._join( dirpath=self.impl_dir, filename=(filename or self._v8_bind_basename), ext=ext) # TODO(crbug.com/1034398): Remove this API def dict_path(self, filename=None, ext=None): return self.blink_path(filename, ext) @staticmethod def _join(dirpath, filename, ext=None): if ext is not None: filename = posixpath.extsep.join([filename, ext]) return posixpath.join(dirpath, filename)
002fe2c73ab39e05e6d94dade6da1b7d05d694a9
f38db79439185ab6062294e1d82f6e909d2be81e
/avazacli/models/currency.py
1afe7f4f49b83f587263b664dcf5fb614d8f72a3
[]
no_license
ContainerSolutions/avazacli
3a37f8500ad1f1843acbdbb413d4949e00ec6f91
49618314f15d8cb2bda36e6019670fdfbed1524f
refs/heads/master
2020-06-18T18:44:58.594385
2019-07-11T14:23:10
2019-07-11T14:23:10
196,406,206
3
2
null
null
null
null
UTF-8
Python
false
false
5,344
py
# coding: utf-8 """ Avaza API Documentation Welcome to the autogenerated documentation & test tool for Avaza's API. <br/><br/><strong>API Security & Authentication</strong><br/>Authentication options include OAuth2 Implicit and Authorization Code flows, and Personal Access Token. All connections should be encrypted over SSL/TLS <br/><br/>You can set up and manage your api authentication credentials from within your Avaza account. (requires Administrator permissions on your Avaza account).<br/><br/> OAuth2 Authorization endpoint: https://any.avaza.com/oauth2/authorize <br/>OAuth2 Token endpoint: https://any.avaza.com/oauth2/token<br/>Base URL for subsequent API Requests: https://api.avaza.com/ <br/><br/>Blogpost about authenticating with Avaza's API: https://www.avaza.com/avaza-api-oauth2-authentication/ <br/>Blogpost on using Avaza's webhooks: https://www.avaza.com/avaza-api-webhook-notifications/<br/>The OAuth flow currently issues Access Tokens that last 1 day, and Refresh tokens that last 180 days<br/>The Api respects the security Roles assigned to the authenticating Avaza user and filters the data return appropriately. <br/><br><strong>Support</strong><br/>For API Support, and to request access please contact Avaza Support Team via our support chat. <br/><br/><strong>User Contributed Libraries:</strong><br/>Graciously contributed by 3rd party users like you. <br/>Note these are not tested or endorsesd by Avaza. We encourage you to review before use, and use at own risk.<br/> <ul><li> - <a target='blank' href='https://packagist.org/packages/debiprasad/oauth2-avaza'>PHP OAuth Client Package for Azava API (by Debiprasad Sahoo)</a></li></ul> # noqa: E501 OpenAPI spec version: v1 Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six class Currency(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'currency_code': 'str', 'decimal_places': 'int' } attribute_map = { 'currency_code': 'CurrencyCode', 'decimal_places': 'DecimalPlaces' } def __init__(self, currency_code=None, decimal_places=None): # noqa: E501 """Currency - a model defined in Swagger""" # noqa: E501 self._currency_code = None self._decimal_places = None self.discriminator = None if currency_code is not None: self.currency_code = currency_code if decimal_places is not None: self.decimal_places = decimal_places @property def currency_code(self): """Gets the currency_code of this Currency. # noqa: E501 :return: The currency_code of this Currency. # noqa: E501 :rtype: str """ return self._currency_code @currency_code.setter def currency_code(self, currency_code): """Sets the currency_code of this Currency. :param currency_code: The currency_code of this Currency. # noqa: E501 :type: str """ self._currency_code = currency_code @property def decimal_places(self): """Gets the decimal_places of this Currency. # noqa: E501 :return: The decimal_places of this Currency. # noqa: E501 :rtype: int """ return self._decimal_places @decimal_places.setter def decimal_places(self, decimal_places): """Sets the decimal_places of this Currency. :param decimal_places: The decimal_places of this Currency. # noqa: E501 :type: int """ self._decimal_places = decimal_places def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, Currency): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
aadcbf0b94233b17dbea12ad397b0590f1241baf
bb33e6be8316f35decbb2b81badf2b6dcf7df515
/source/res/scripts/client/helpers/edgedetectcolorcontroller.py
65c2877cf2f2b4814e0f3a4c532a57a905ba4be7
[]
no_license
StranikS-Scan/WorldOfTanks-Decompiled
999c9567de38c32c760ab72c21c00ea7bc20990c
d2fe9c195825ececc728e87a02983908b7ea9199
refs/heads/1.18
2023-08-25T17:39:27.718097
2022-09-22T06:49:44
2022-09-22T06:49:44
148,696,315
103
39
null
2022-09-14T17:50:03
2018-09-13T20:49:11
Python
UTF-8
Python
false
false
3,793
py
# Python bytecode 2.7 (decompiled from Python 2.7) # Embedded file name: scripts/client/helpers/EdgeDetectColorController.py import BigWorld import Math from PlayerEvents import g_playerEvents from Account import PlayerAccount from helpers import dependency from skeletons.account_helpers.settings_core import ISettingsCore _DEFAULT_OVERLAY_COLOR = Math.Vector4(1, 1, 1, 1) _OVERLAY_SOLID_KEYS = ('overlay', 'destructible') _OVERLAY_PATTERN_KEYS = ('overlayForeground', 'overlay', 'destructibleForeground', 'destructible') _OVERLAY_TARGET_INDEXES = {'enemy': 1, 'friend': 2} g_instance = None class EdgeDetectColorController(object): settingsCore = dependency.descriptor(ISettingsCore) def __init__(self, dataSec): self.__colors = {'common': dict(), 'colorBlind': dict()} self.__readColors(self.__colors, 'common', dataSec) self.__readColors(self.__colors, 'colorBlind', dataSec) def updateColors(self): self.__changeColor({'isColorBlind': self.settingsCore.getSetting('isColorBlind')}) def create(self): self.settingsCore.onSettingsChanged += self.__changeColor g_playerEvents.onAccountShowGUI += self.__onAccountShowGUI def destroy(self): self.settingsCore.onSettingsChanged -= self.__changeColor g_playerEvents.onAccountShowGUI -= self.__onAccountShowGUI def __readColors(self, colors, cType, section): cName = '{}/'.format(cType) out, common = colors[cType], colors['common'] out['self'] = section.readVector4(cName + 'self', common.get('self', Math.Vector4(0.2, 0.2, 0.2, 0.5))) out['enemy'] = section.readVector4(cName + 'enemy', common.get('enemy', Math.Vector4(1, 0, 0, 0.5))) out['friend'] = section.readVector4(cName + 'friend', common.get('friend', Math.Vector4(0, 1, 0, 0.5))) out['flag'] = section.readVector4(cName + 'flag', common.get('flag', Math.Vector4(1, 1, 1, 1))) out['hangar'] = section.readVector4(cName + 'hangar', common.get('hangar', Math.Vector4(1, 1, 0, 1))) self.__readOverlayColors(out, common, cType, 'overlaySolidColors', _OVERLAY_SOLID_KEYS, section) self.__readOverlayColors(out, common, cType, 'overlayPatternColors', _OVERLAY_PATTERN_KEYS, section) def __readOverlayColors(self, out, common, cType, overlayType, keys, section): targets = ['enemy', 'friend'] common, out[overlayType] = common.get(overlayType) or {}, {} for target in targets: commonTarget, out[overlayType][target] = common.get(target) or {}, {} targetPath = '/'.join([cType, overlayType, target]) + '/' for key in keys: color = section.readVector4(targetPath + key, commonTarget.get(key, _DEFAULT_OVERLAY_COLOR)) out[overlayType][target][key] = color out[overlayType][target]['packed'] = [ out[overlayType][target][key] for key in keys ] def __onAccountShowGUI(self, ctx): self.updateColors() def __changeColor(self, diff): if 'isColorBlind' not in diff: return isHangar = isinstance(BigWorld.player(), PlayerAccount) cType = 'colorBlind' if diff['isColorBlind'] else 'common' colors = self.__colors[cType] colorsSet = (colors['hangar'] if isHangar else colors['self'], colors['enemy'], colors['friend'], colors['flag']) i = 0 for c in colorsSet: BigWorld.wgSetEdgeDetectEdgeColor(i, c) i += 1 for target, idx in _OVERLAY_TARGET_INDEXES.iteritems(): BigWorld.wgSetEdgeDetectSolidColors(idx, *colors['overlaySolidColors'][target]['packed']) BigWorld.wgSetEdgeDetectPatternColors(idx, *colors['overlayPatternColors'][target]['packed'])
f9ce519233fd2afe6e44f4421dd950aae83bb057
9c718b8964d476db4728fc0cf18e24292dd8cf60
/mxshop/MxShop/apps/goods/migrations/0003_auto_20180623_1326.py
8a898faf7e1b7893e241d7d7688d7cf50c63bdd5
[]
no_license
1400720231/Django-Projects
960f9226e0f5c01628afd65b9a78e810fdeb1b83
72f96788163f7ffe76e7599966ddbfa1d2199926
refs/heads/master
2021-06-25T17:41:14.147011
2019-04-03T02:24:38
2019-04-03T02:24:38
114,955,012
1
0
null
null
null
null
UTF-8
Python
false
false
471
py
# -*- coding: utf-8 -*- # Generated by Django 1.10 on 2018-06-23 13:26 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('goods', '0002_goodscategorybrand_category'), ] operations = [ migrations.AlterModelOptions( name='goodscategory', options={'verbose_name': '商品类目', 'verbose_name_plural': '商品类目'}, ), ]
40903430884cb63c47a7d28f94fa27d31ac66245
92adce9ebf87fc374f6cc093f68d14b7dc7a697f
/read.py
2a08a459948865f3bdc2e5d27e913a2e6bdb6128
[ "MIT" ]
permissive
nicolay-r/RuSentRel
1f4f5678fdcbe342bda82834d6192a70b5c8b94c
4fc0df1580d3da21f0be1e832e403652f73caed1
refs/heads/master
2021-08-27T16:49:34.342494
2021-08-03T06:48:39
2021-08-03T06:48:39
128,990,497
6
2
MIT
2021-08-03T06:48:39
2018-04-10T20:22:16
Python
UTF-8
Python
false
false
3,153
py
#!/usr/bin/python from reader.common.bound import Bound from reader.common.entities.collection import EntityCollection from reader.common.lemmatization.mystem import MystemWrapper from reader.entities.collection import RuSentRelDocumentEntityCollection from reader.entities.entity import RuSentRelEntity from reader.news import RuSentRelNews from reader.opinions.collection import RuSentRelOpinionCollection from reader.opinions.opinion import RuSentRelOpinion from reader.sentence import RuSentRelSentence from reader.synonyms import RuSentRelSynonymsCollection import utils # Initializing stemmer stemmer = MystemWrapper() # Reading synonyms collection. synonyms = RuSentRelSynonymsCollection.from_file('synonyms.txt', stemmer=stemmer) # Reading 'train' subfolder of collection. train_root = 'test' for news_id in utils.get_rusentrel_test_indices(): print("NewsID: {}".format(news_id)) # Init filepaths entities_filepath = utils.get_rusentrel_entity_filepath(news_id, root=train_root) news_filepath = utils.get_rusentrel_news_filepath(news_id, root=train_root) opinion_filepath = utils.get_rusentrel_format_sentiment_opin_filepath(news_id, root=train_root, is_etalon=True) # Read collections entities = RuSentRelDocumentEntityCollection.from_file(entities_filepath, stemmer=stemmer, synonyms=synonyms) news = RuSentRelNews.from_file(news_filepath, entities) opininons = RuSentRelOpinionCollection.from_file(opinion_filepath, synonyms=synonyms) ############# # Application ############# # Example: Access to the read OPINIONS collection. for opinion in opininons: assert(isinstance(opinion, RuSentRelOpinion)) print("\t{}->{} ({}) [synonym groups opinion: {}->{}]".format( opinion.SourceValue, opinion.TargetValue, opinion.Sentiment.to_str(), # Considering synonyms. synonyms.get_synonym_group_index(opinion.SourceValue), synonyms.get_synonym_group_index(opinion.TargetValue))) # Example: Access to the read NEWS collection. for sentence in news.iter_sentences(): assert(isinstance(sentence, RuSentRelSentence)) # Access to text. print("\tSentence: '{}'".format(sentence.Text.strip())) # Access to inner entities. for entity, bound in sentence.iter_entity_with_local_bounds(): assert(isinstance(entity, RuSentRelEntity)) assert(isinstance(bound, Bound)) print("\tEntity: {}, text position: ({}-{}), IdInDocument: {}".format( entity.Value, bound.Position, bound.Position + bound.Length, entity.IdInDocument)) # Example: Access to the read ENTITIES collection. example_entity = entities.get_entity_by_index(10) entities_list = entities.try_get_entities(example_entity.Value, group_key=EntityCollection.KeyType.BY_SYNONYMS) print("\tText synonymous to: '{}'".format(example_entity.Value)) print("\t[{}]".format(", ".join([str((e.Value, str(e.IdInDocument))) for e in entities_list])))
c7d4ab5c5775b06d53e956773414cb4210cd5023
c4a3eeabe660e5d6b42f704d0325a755331ab3c5
/hyperion/hyperion_sedcom.py
93f0c5a6dcaec34cae9ebd884d6942b366f84b70
[]
no_license
yaolun/misc
dfcfde2ac4a6429201644e1354912d3a064f9524
049b68ce826ddf638cec9a3b995d9ee84bf6075a
refs/heads/master
2021-01-21T23:54:08.953071
2018-06-02T19:46:18
2018-06-02T19:46:18
26,666,071
1
0
null
null
null
null
UTF-8
Python
false
false
4,970
py
def hyperion_sedcom(modellist, outdir, plotname, obs_data=None, labellist=None, lbol=False, legend=True, mag=1.5,\ obs_preset='sh', dstar=1, aper=[3.6, 4.5, 5.8, 8.0, 10, 20, 24, 70, 160, 250, 350, 500, 850]): """ obs_data: dictionary which obs_data['spec'] is spectrum and obs_data['phot'] is photometry obs_data['label'] = (wave, Fv, err) in um and Jy by default """ import numpy as np import os import matplotlib.pyplot as plt import astropy.constants as const from hyperion.model import ModelOutput from scipy.interpolate import interp1d from l_bol import l_bol import seaborn as sb # from seaborn import color_palette # from seaborn_color import seaborn_color # constant setup c = const.c.cgs.value pc = const.pc.cgs.value if labellist == None: if legend == True: print 'Model labels are not provided. Use their filename instead.' labellist = [] for i in range(0, len(modellist)): labellist.append(r'$\mathrm{'+os.path.splitext(os.path.basename(modellist[i]))[0]+'}$') # cm = seaborn_color('colorblind',len(modellist)) sb.set(style="white") cm = sb.color_palette('husl', len(modellist)) # create figure object fig = plt.figure(figsize=(8*mag,6*mag)) ax = fig.add_subplot(111) # sb.set_style('ticks') print 'plotting with aperture at ', aper, 'um' # if the obs_data is provided than plot the observation first. In this way, models won't be blocked by data if obs_data != None: if 'spec' in obs_data.keys(): (wave, fv, err) = obs_data['spec'] vfv = c/(wave*1e-4)*fv*1e-23 l_bol_obs = l_bol(wave, fv, dstar) if legend == True: ax.text(0.75,0.9,r'$\mathrm{L_{bol}= %5.2f L_{\odot}}$' % l_bol_obs,fontsize=mag*16,transform=ax.transAxes) # general plotting scheme if obs_preset == None: spec, = ax.plot(np.log10(wave),np.log10(vfv),'-',color='k',linewidth=1.5*mag, label=r'$\mathrm{observations}$') # plot spitzer, Herschel pacs and spire in different colors elif obs_preset == 'sh': # spitzer spitz, = ax.plot(np.log10(wave[wave < 50]),np.log10(vfv[wave < 50]),'-',color='b',linewidth=1*mag,\ label=r'$\mathrm{\it Spitzer}$') # herschel pacs, = ax.plot(np.log10(wave[(wave < 190.31) & (wave > 50)]),np.log10(vfv[(wave < 190.31) & (wave > 50)]),'-',\ color='Green',linewidth=1*mag, label=r'$\mathrm{{\it Herschel}-PACS}$') spire, = ax.plot(np.log10(wave[wave >= 190.31]),np.log10(vfv[wave >= 190.31]),'-',color='k',linewidth=1*mag,\ label=r'$\mathrm{{\it Herschel}-SPIRE}$') spec = [spitz, pacs, spire] if 'phot' in obs_data.keys(): (wave_p, fv_p, err_p) = obs_data['phot'] vfv_p = c/(wave_p*1e-4)*fv_p*1e-23 vfv_p_err = c/(wave_p*1e-4)*err_p*1e-23 phot, = ax.plot(np.log10(wave_p),np.log10(vfv_p),'s',mfc='DimGray',mec='k',markersize=8) ax.errorbar(np.log10(wave_p),np.log10(vfv_p),yerr=[np.log10(vfv_p)-np.log10(vfv_p-vfv_p_err), np.log10(vfv_p+vfv_p_err)-np.log10(vfv_p)],\ fmt='s',mfc='DimGray',mec='k',markersize=8) modplot = dict() for imod in range(0, len(modellist)): m = ModelOutput(modellist[imod]) # if not specified, distance of the star will be taken as 1 pc. if aper == None: sed_dum = m.get_sed(group=0, inclination=0, aperture=-1, distance=dstar * pc) modplot['mod'+str(imod+1)], = ax_sed.plot(np.log10(sed_dum.wav), np.log10(sed_dum.val), '-', color='GoldenRod', linewidth=1.5*mag) else: vfv_aper = np.empty_like(aper) for i in range(0, len(aper)): sed_dum = m.get_sed(group=i+1, inclination=0, aperture=-1, distance=dstar * pc) f = interp1d(sed_dum.wav, sed_dum.val) vfv_aper[i] = f(aper[i]) modplot['mod'+str(imod+1)], = ax.plot(np.log10(aper),np.log10(vfv_aper),'o',mfc='None',mec=cm[imod],markersize=12,\ markeredgewidth=3, label=labellist[imod], linestyle='-',color=cm[imod],linewidth=1.5*mag) # plot fine tune ax.set_xlabel(r'$\mathrm{log~\lambda~({\mu}m)}$',fontsize=mag*20) ax.set_ylabel(r'$\mathrm{log~\nu S_{\nu}~(erg/cm^{2}/s)}$',fontsize=mag*20) [ax.spines[axis].set_linewidth(1.5*mag) for axis in ['top','bottom','left','right']] ax.minorticks_on() ax.tick_params('both',labelsize=mag*18,width=1.5*mag,which='major',pad=15,length=5*mag) ax.tick_params('both',labelsize=mag*18,width=1.5*mag,which='minor',pad=15,length=2.5*mag) if obs_preset == 'sh': ax.set_ylim([-14,-7]) ax.set_xlim([0,3]) if legend == True: lg = ax.legend(loc='best',fontsize=14*mag,numpoints=1,framealpha=0.3) # Write out the plot fig.savefig(outdir+plotname+'.pdf',format='pdf',dpi=300,bbox_inches='tight') fig.clf() # import numpy as np # from get_bhr71_obs import get_bhr71_obs # obs_data = get_bhr71_obs('/Users/yaolun/bhr71/obs_for_radmc/') # mod_num = [32,56] # modellist = [] # modir = '/Users/yaolun/test/model' # for mod in mod_num: # modellist.append(modir+str(mod)+'/model'+str(mod)+'.rtout') # outdir = '/Users/yaolun/test/' # hyperion_sedcom(modellist, outdir, 'test', obs_data=obs_data, lbol=True, dstar=178)
6eed42b73bb347a25e6a01089fb43ea62d05b786
282d0a84b45b12359b96bbf0b1d7ca9ee0cb5d19
/Malware1/venv/Lib/site-packages/pandas/tests/scalar/timedelta/test_arithmetic.py
9fd1780c8ab77d3e11c59fa5cff81d29267a95f1
[]
no_license
sameerakhtar/CyberSecurity
9cfe58df98495eac6e4e2708e34e70b7e4c055d3
594973df27b4e1a43f8faba0140ce7d6c6618f93
refs/heads/master
2022-12-11T11:53:40.875462
2020-09-07T23:13:22
2020-09-07T23:13:22
293,598,094
0
0
null
null
null
null
UTF-8
Python
false
false
130
py
version https://git-lfs.github.com/spec/v1 oid sha256:fa4d666d9e29f8945376652f7fb7084c3be064a66d0592c045742bd872cd8934 size 22242
5744415b18511a257ef945bcfa2df6f5ae1b04d1
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p02954/s798759253.py
36a83144ddfc3b7f136a3c3568e3fead3056d7a7
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
709
py
S = input() + 'R' numbers = [0] * (len(S) - 1) offset = 0 length = 0 right = 0 left = 0 for index, (s0, s1) in enumerate(zip(S, S[1:])): length += 1 if s0 == 'R' and s1 == 'L': right = length - 1 left = length elif s0 == 'L' and s1 == 'R': if length % 2 == 0: numbers[offset + right] = length // 2 numbers[offset + left] = length // 2 elif right % 2 == 0: numbers[offset + right] = length // 2 + 1 numbers[offset + left] = length // 2 else: numbers[offset + right] = length // 2 numbers[offset + left] = length // 2 + 1 length = 0 offset = index + 1 print(*numbers)
30331b3f91ed037754998e3d702452ca29ec74bf
fca1edf135c49edf08a51d6758a18b0bffe69309
/todobackend/todobackend/views.py
47d0a4f5fcc9bf4b254736902e56b77beafda449
[]
no_license
BakdauletBolatE/myfirstTodo
9d1daa8ac08806524dff1cc975d165dca8fb1b1f
c8e9f29afca06ce2198847770c777f96c09eefbb
refs/heads/master
2023-04-18T04:07:27.684527
2021-04-30T20:04:15
2021-04-30T20:04:15
363,240,056
0
0
null
null
null
null
UTF-8
Python
false
false
106
py
from django.shortcuts import render def index(request): return render(request,'todoapp/index.html')
c7c6601ede207fab5b186671af8d6270d38078ca
75dcb56e318688499bdab789262839e7f58bd4f6
/_algorithms_challenges/leetcode/LeetcodePythonProject/leetcode_0451_0500/LeetCode460_LFUCache.py
b209a542f7836f153654b987fa1ac33c01e19f6d
[]
no_license
syurskyi/Algorithms_and_Data_Structure
9a1f358577e51e89c862d0f93f373b7f20ddd261
929dde1723fb2f54870c8a9badc80fc23e8400d3
refs/heads/master
2023-02-22T17:55:55.453535
2022-12-23T03:15:00
2022-12-23T03:15:00
226,243,987
4
1
null
2023-02-07T21:01:45
2019-12-06T04:14:10
Jupyter Notebook
UTF-8
Python
false
false
4,277
py
''' Created on Apr 23, 2017 @author: MT ''' class Node(object): def __init__(self, key, val): self.val = val self.next = None self.prev = None self.freq = 1 self.key = key class LFUCache(object): def __init__(self, capacity): """ :type capacity: int """ self.capacity = capacity self.hashmap = {} self.freqMap = {} self.length = 0 self.head = Node(-1, -1) self.tail = Node(-1, -1) self.head.next = self.tail self.head.freq = float('-inf') self.tail.freq = float('inf') def get(self, key): """ :type key: int :rtype: int """ if key not in self.hashmap: return -1 else: value = self.hashmap[key].val self.updateNode(self.hashmap[key]) return value def put(self, key, value): """ :type key: int :type value: int :rtype: void """ if self.capacity == 0: return if key in self.hashmap: self.hashmap[key].val = value self.updateNode(self.hashmap[key]) else: if self.capacity > self.length: self.length += 1 node = Node(key, value) self.hashmap[key] = node node.freq = 1 if 1 in self.freqMap: tmp = self.freqMap[1][1] # tail of freq nextNode = tmp.next tmp.next = node node.prev = tmp node.next = nextNode node.next.prev = node self.freqMap[1][1] = node else: nextNode = self.head.next node.next = nextNode node.prev = self.head nextNode.prev = node self.head.next = node self.freqMap[1] = [node, node] else: node = Node(key, value) self.hashmap[key] = node firstNode = self.head.next freq = firstNode.freq if self.freqMap[freq][0] == self.freqMap[freq][1]: self.head.next = firstNode.next firstNode.next.prev = self.head del self.freqMap[freq] else: self.freqMap[freq][0] = self.freqMap[freq][0].next self.head.next = firstNode.next firstNode.next.prev = self.head del self.hashmap[firstNode.key] if 1 in self.freqMap: tmp = self.freqMap[1][1] # tail of freq nextNode = tmp.next tmp.next = node node.prev = tmp node.next = nextNode node.next.prev = node self.freqMap[1][1] = node else: nextNode = self.head.next nextNode.prev = node node.next = nextNode self.head.next = node node.prev = self.head self.freqMap[1] = [node, node] def updateNode(self, node): freq = node.freq nextNode = self.freqMap[freq][1].next node.prev.next = node.next node.next.prev = node.prev if self.freqMap[freq][0] == self.freqMap[freq][1]: del self.freqMap[freq] else: if self.freqMap[freq][0] == node: self.freqMap[freq][0] = node.next if self.freqMap[freq][1] == node: self.freqMap[freq][1] = node.prev node.freq += 1 freq += 1 if freq in self.freqMap: tail = self.freqMap[freq][1] node.next = tail.next tail.next = node node.next.prev = node node.prev = tail self.freqMap[freq][1] = node else: prevNode = nextNode.prev prevNode.next = node node.next = nextNode nextNode.prev = node node.prev = prevNode self.freqMap[freq] = [node, node]
8b634a88c02b8398beec70c04cb11898abd76653
bb150497a05203a718fb3630941231be9e3b6a32
/models/PaddleHub/hub_all_func/test_ID_Photo_GEN.py
f43c8cd99596b00e1f7cb91e1e66c9e5fca71e42
[]
no_license
PaddlePaddle/PaddleTest
4fb3dec677f0f13f7f1003fd30df748bf0b5940d
bd3790ce72a2a26611b5eda3901651b5a809348f
refs/heads/develop
2023-09-06T04:23:39.181903
2023-09-04T11:17:50
2023-09-04T11:17:50
383,138,186
42
312
null
2023-09-13T11:13:35
2021-07-05T12:44:59
Python
UTF-8
Python
false
false
642
py
"""ID_Photo_GEN""" import os import cv2 import paddle import paddlehub as hub if paddle.is_compiled_with_cuda(): paddle.set_device("gpu") use_gpu = True else: paddle.set_device("cpu") use_gpu = False def test_ID_Photo_GEN_predict(): """ID_Photo_GEN""" os.system("hub install ID_Photo_GEN") model = hub.Module(name="ID_Photo_GEN") result = model.Photo_GEN( images=[cv2.imread("face_01.jpeg")], paths=None, batch_size=1, output_dir="output_ID_Photo_GEN", visualization=True, use_gpu=use_gpu, ) print(result) os.system("hub uninstall ID_Photo_GEN")
e0993846926170ccec472592ba4c25eadd8b01b5
0fa98dbc4d6256121b9f478a13ff2254047fb543
/12_01_typical_interview_tasks/K. One in a binary number.py
b5aaa9111c3f0e1f571ed9c30e528b3a2e6a41b5
[]
no_license
vamotest/yandex_algorithms
48d5b29cb6e2789ea8f7e8024c798851058f1d4c
a588da3d21ff95e2437818493769719600f3eaf7
refs/heads/master
2023-03-19T20:44:59.373046
2021-01-20T19:06:28
2021-01-20T19:06:28
330,421,669
3
1
null
null
null
null
UTF-8
Python
false
false
358
py
def binary_convert(number): binary_number = '' while number > 0: binary_number = str(number % 2) + binary_number number = number // 2 return binary_number def find_amount(binary_number): return binary_number.count('1') if __name__ == '__main__': count = find_amount(binary_convert(int(input()))) print(count)
de6b02deb9e80be2b82f5b65928b23b1d4744a49
d554b1aa8b70fddf81da8988b4aaa43788fede88
/5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/222/users/4029/codes/1643_1054.py
8a3c8ff612464a89b4701c45ea86a8670cc77b5b
[]
no_license
JosephLevinthal/Research-projects
a3bc3ca3b09faad16f5cce5949a2279cf14742ba
60d5fd6eb864a5181f4321e7a992812f3c2139f9
refs/heads/master
2022-07-31T06:43:02.686109
2020-05-23T00:24:26
2020-05-23T00:24:26
266,199,309
1
0
null
null
null
null
UTF-8
Python
false
false
176
py
x= float(input("Coordenada x: ")) y= float(input("Coordenada y: ")) if (((2 * x) + y == 3)): print ("ponto pertence a reta") else: print ("ponto nao pertence a reta")
37591cad599ca130ba2cdf6a22dc5e86d13f686f
ae0f37ebb76bce44c5e366d62424b5ef411f94b3
/3. OOP/Tutorial/Rational.py
f83ae514fef27d48b4fc04bc06c6667e5dfeaacd
[]
no_license
khangsk/PPL
a30b656a8a70b8f6dd96ce39f57d3540495a5a26
b8e3a04210796e03ff257c05cd1e60923f016d2f
refs/heads/master
2023-02-21T09:28:25.216162
2021-01-18T09:35:15
2021-01-18T09:35:15
306,542,959
0
1
null
null
null
null
UTF-8
Python
false
false
804
py
import math class Rational: def __init__(self, n = 0, d = 1): assert(d != 0) g = math.gcd(abs(n), abs(d)) self.numer = int(n / g) self.denom = int(d / g) def __str__(self): return str(self.numer) + "/" + str(self.denom) def __add__(self, another): targetType = type(another).__name__ if targetType == 'int': return self.addRational(Rational(another)) elif targetType == 'Rational': return self.addRational(another) raise Exception('Rational not support operator + with type ' + targetType) def addRational(self, r): assert(type(r).__name__ == 'Rational') return Rational( self.numer * r.denom + self.denom * r.numer, self.denom * r.denom )
061aebdb29e7c6907a3a9ac112f56e81483271f8
15f321878face2af9317363c5f6de1e5ddd9b749
/solutions_python/Problem_156/832.py
6c50802fc3b768a580b91d24aaf771150f9157d8
[]
no_license
dr-dos-ok/Code_Jam_Webscraper
c06fd59870842664cd79c41eb460a09553e1c80a
26a35bf114a3aa30fc4c677ef069d95f41665cc0
refs/heads/master
2020-04-06T08:17:40.938460
2018-10-14T10:12:47
2018-10-14T10:12:47
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,650
py
#!/usr/bin/env python import sys from math import ceil, floor def solve_naive_test(pancakes_per_plate, minute=0): if not any(pancakes_per_plate): return minute else: splits = xrange(len(pancakes_per_plate)) eaten_one = [(x - 1) if x > 0 else 0 for x in pancakes_per_plate] solutions = [solve_naive_test(eaten_one, minute + 1)] for split in splits: if pancakes_per_plate[split] > 2: split_one = pancakes_per_plate[:split] split_one.append(int(floor(pancakes_per_plate[split] / 2.0))) split_one.append(int(ceil(pancakes_per_plate[split] / 2.0))) split_one.extend(pancakes_per_plate[split + 1:]) solutions.append(solve_naive_test(split_one, minute + 1)) return min(solutions) def one_eaten(plates): return [(x - 1) if x > 0 else 0 for x in plates] def mean(l): return float(sum(l)) / len(l) def max_split(slope, max_slope, max_count): for p in xrange(1, int(max_slope / 2.0) + 1): s = list(slope) for x in xrange(max_count): s[x] -= p s.extend([p] * max_count) #print '\t', s yield s def solve(pancakes_per_plate, minute=0, log=[]): if not any(pancakes_per_plate): return (minute, log) else: slope = list(sorted(pancakes_per_plate, reverse=True)) max_slope = max(slope) max_count = sum(1 for p in slope if p == max_slope) #print slope, max_slope, max_count solutions = [solve(one_eaten(slope), minute + 1, log + ['eat'])] if max_slope > 2: for split in max_split(slope, max_slope, max_count): solutions.append( solve( split, minute + max_count, log + ['split {x}'.format(x=max_count)], )) return min(solutions, key=lambda k: k[0]) if __name__ == '__main__': input_file_name = sys.argv[1] input_file = open(input_file_name) number_of_cases = int(input_file.readline().strip()) first_case_number = 1 for x in xrange(number_of_cases): sys.stderr.write("{x}\n".format(x=x)) nonempty_plate_count = int(input_file.readline().strip()) pancakes_per_plate = [int(d) for d in input_file.readline().strip().split()] #print "Input", pancakes_per_plate solution, log = solve(pancakes_per_plate) print "Case #{case_number}: {minutes_needed}".format( case_number=(first_case_number + x), minutes_needed=solution ) #print log
7f635137feec443809b1f3a31a83d1c7aeda4e2d
ba694353a3cb1cfd02a6773b40f693386d0dba39
/sdk/python/pulumi_google_native/compute/v1/get_node_group.py
9d5b2f685d300d8534f4e615881b009826f37060
[ "BSD-3-Clause", "Apache-2.0" ]
permissive
pulumi/pulumi-google-native
cc57af8bd3d1d6b76f1f48333ed1f1b31d56f92b
124d255e5b7f5440d1ef63c9a71e4cc1d661cd10
refs/heads/master
2023-08-25T00:18:00.300230
2023-07-20T04:25:48
2023-07-20T04:25:48
323,680,373
69
16
Apache-2.0
2023-09-13T00:28:04
2020-12-22T16:39:01
Python
UTF-8
Python
false
false
10,612
py
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import copy import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from ... import _utilities from . import outputs __all__ = [ 'GetNodeGroupResult', 'AwaitableGetNodeGroupResult', 'get_node_group', 'get_node_group_output', ] @pulumi.output_type class GetNodeGroupResult: def __init__(__self__, autoscaling_policy=None, creation_timestamp=None, description=None, fingerprint=None, kind=None, location_hint=None, maintenance_policy=None, maintenance_window=None, name=None, node_template=None, self_link=None, share_settings=None, size=None, status=None, zone=None): if autoscaling_policy and not isinstance(autoscaling_policy, dict): raise TypeError("Expected argument 'autoscaling_policy' to be a dict") pulumi.set(__self__, "autoscaling_policy", autoscaling_policy) if creation_timestamp and not isinstance(creation_timestamp, str): raise TypeError("Expected argument 'creation_timestamp' to be a str") pulumi.set(__self__, "creation_timestamp", creation_timestamp) if description and not isinstance(description, str): raise TypeError("Expected argument 'description' to be a str") pulumi.set(__self__, "description", description) if fingerprint and not isinstance(fingerprint, str): raise TypeError("Expected argument 'fingerprint' to be a str") pulumi.set(__self__, "fingerprint", fingerprint) if kind and not isinstance(kind, str): raise TypeError("Expected argument 'kind' to be a str") pulumi.set(__self__, "kind", kind) if location_hint and not isinstance(location_hint, str): raise TypeError("Expected argument 'location_hint' to be a str") pulumi.set(__self__, "location_hint", location_hint) if maintenance_policy and not isinstance(maintenance_policy, str): raise TypeError("Expected argument 'maintenance_policy' to be a str") pulumi.set(__self__, "maintenance_policy", maintenance_policy) if maintenance_window and not isinstance(maintenance_window, dict): raise TypeError("Expected argument 'maintenance_window' to be a dict") pulumi.set(__self__, "maintenance_window", maintenance_window) if name and not isinstance(name, str): raise TypeError("Expected argument 'name' to be a str") pulumi.set(__self__, "name", name) if node_template and not isinstance(node_template, str): raise TypeError("Expected argument 'node_template' to be a str") pulumi.set(__self__, "node_template", node_template) if self_link and not isinstance(self_link, str): raise TypeError("Expected argument 'self_link' to be a str") pulumi.set(__self__, "self_link", self_link) if share_settings and not isinstance(share_settings, dict): raise TypeError("Expected argument 'share_settings' to be a dict") pulumi.set(__self__, "share_settings", share_settings) if size and not isinstance(size, int): raise TypeError("Expected argument 'size' to be a int") pulumi.set(__self__, "size", size) if status and not isinstance(status, str): raise TypeError("Expected argument 'status' to be a str") pulumi.set(__self__, "status", status) if zone and not isinstance(zone, str): raise TypeError("Expected argument 'zone' to be a str") pulumi.set(__self__, "zone", zone) @property @pulumi.getter(name="autoscalingPolicy") def autoscaling_policy(self) -> 'outputs.NodeGroupAutoscalingPolicyResponse': """ Specifies how autoscaling should behave. """ return pulumi.get(self, "autoscaling_policy") @property @pulumi.getter(name="creationTimestamp") def creation_timestamp(self) -> str: """ Creation timestamp in RFC3339 text format. """ return pulumi.get(self, "creation_timestamp") @property @pulumi.getter def description(self) -> str: """ An optional description of this resource. Provide this property when you create the resource. """ return pulumi.get(self, "description") @property @pulumi.getter def fingerprint(self) -> str: return pulumi.get(self, "fingerprint") @property @pulumi.getter def kind(self) -> str: """ The type of the resource. Always compute#nodeGroup for node group. """ return pulumi.get(self, "kind") @property @pulumi.getter(name="locationHint") def location_hint(self) -> str: """ An opaque location hint used to place the Node close to other resources. This field is for use by internal tools that use the public API. The location hint here on the NodeGroup overrides any location_hint present in the NodeTemplate. """ return pulumi.get(self, "location_hint") @property @pulumi.getter(name="maintenancePolicy") def maintenance_policy(self) -> str: """ Specifies how to handle instances when a node in the group undergoes maintenance. Set to one of: DEFAULT, RESTART_IN_PLACE, or MIGRATE_WITHIN_NODE_GROUP. The default value is DEFAULT. For more information, see Maintenance policies. """ return pulumi.get(self, "maintenance_policy") @property @pulumi.getter(name="maintenanceWindow") def maintenance_window(self) -> 'outputs.NodeGroupMaintenanceWindowResponse': return pulumi.get(self, "maintenance_window") @property @pulumi.getter def name(self) -> str: """ The name of the resource, provided by the client when initially creating the resource. The resource name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. """ return pulumi.get(self, "name") @property @pulumi.getter(name="nodeTemplate") def node_template(self) -> str: """ URL of the node template to create the node group from. """ return pulumi.get(self, "node_template") @property @pulumi.getter(name="selfLink") def self_link(self) -> str: """ Server-defined URL for the resource. """ return pulumi.get(self, "self_link") @property @pulumi.getter(name="shareSettings") def share_settings(self) -> 'outputs.ShareSettingsResponse': """ Share-settings for the node group """ return pulumi.get(self, "share_settings") @property @pulumi.getter def size(self) -> int: """ The total number of nodes in the node group. """ return pulumi.get(self, "size") @property @pulumi.getter def status(self) -> str: return pulumi.get(self, "status") @property @pulumi.getter def zone(self) -> str: """ The name of the zone where the node group resides, such as us-central1-a. """ return pulumi.get(self, "zone") class AwaitableGetNodeGroupResult(GetNodeGroupResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetNodeGroupResult( autoscaling_policy=self.autoscaling_policy, creation_timestamp=self.creation_timestamp, description=self.description, fingerprint=self.fingerprint, kind=self.kind, location_hint=self.location_hint, maintenance_policy=self.maintenance_policy, maintenance_window=self.maintenance_window, name=self.name, node_template=self.node_template, self_link=self.self_link, share_settings=self.share_settings, size=self.size, status=self.status, zone=self.zone) def get_node_group(node_group: Optional[str] = None, project: Optional[str] = None, zone: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNodeGroupResult: """ Returns the specified NodeGroup. Get a list of available NodeGroups by making a list() request. Note: the "nodes" field should not be used. Use nodeGroups.listNodes instead. """ __args__ = dict() __args__['nodeGroup'] = node_group __args__['project'] = project __args__['zone'] = zone opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts) __ret__ = pulumi.runtime.invoke('google-native:compute/v1:getNodeGroup', __args__, opts=opts, typ=GetNodeGroupResult).value return AwaitableGetNodeGroupResult( autoscaling_policy=pulumi.get(__ret__, 'autoscaling_policy'), creation_timestamp=pulumi.get(__ret__, 'creation_timestamp'), description=pulumi.get(__ret__, 'description'), fingerprint=pulumi.get(__ret__, 'fingerprint'), kind=pulumi.get(__ret__, 'kind'), location_hint=pulumi.get(__ret__, 'location_hint'), maintenance_policy=pulumi.get(__ret__, 'maintenance_policy'), maintenance_window=pulumi.get(__ret__, 'maintenance_window'), name=pulumi.get(__ret__, 'name'), node_template=pulumi.get(__ret__, 'node_template'), self_link=pulumi.get(__ret__, 'self_link'), share_settings=pulumi.get(__ret__, 'share_settings'), size=pulumi.get(__ret__, 'size'), status=pulumi.get(__ret__, 'status'), zone=pulumi.get(__ret__, 'zone')) @_utilities.lift_output_func(get_node_group) def get_node_group_output(node_group: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[Optional[str]]] = None, zone: Optional[pulumi.Input[str]] = None, opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetNodeGroupResult]: """ Returns the specified NodeGroup. Get a list of available NodeGroups by making a list() request. Note: the "nodes" field should not be used. Use nodeGroups.listNodes instead. """ ...
db6b50f1fb9d0dd10bd09ef05b6d826b4e4dc87e
91d1a6968b90d9d461e9a2ece12b465486e3ccc2
/elbv2_read/ssl-policy_list_custom.py
03504aaa68bf723809ae33af5c520e79b7bfbca0
[]
no_license
lxtxl/aws_cli
c31fc994c9a4296d6bac851e680d5adbf7e93481
aaf35df1b7509abf5601d3f09ff1fece482facda
refs/heads/master
2023-02-06T09:00:33.088379
2020-12-27T13:38:45
2020-12-27T13:38:45
318,686,394
0
0
null
null
null
null
UTF-8
Python
false
false
1,992
py
#!/usr/bin/python # -*- codding: utf-8 -*- import os import sys sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__)))) from common.execute_command import read_no_parameter_custom # url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/elbv2/describe-ssl-policies.html if __name__ == '__main__': """ """ parameter_num = len(sys.argv) if parameter_num != 3: print("config value is not exist") print("Usage: python {} <config> <template>".format(sys.argv[0])) print("Support template : base, uid") sys.exit(1) profile_name = sys.argv[1] template_name = sys.argv[2] if template_name == "base": output_name = "table" query_name = """\" SslPolicies[*].[ Name ,SslProtocols[] | join(',',@) ,Ciphers[].Name | join(',',@) ] \"""" elif template_name == "uid": output_name = "text" query_name = """\" \"""" else: print("Usage : {} template name is not exist".format(template_name)) print("Support template : base, uid") sys.exit(1) change_query_name = query_name.replace("\n", "") change_query_name = change_query_name.replace(" ", "") add_option_dict = {} ####################################################################### # setting option use # ex: add_option_dict["setting_matching_parameter"] = "--owners" # ex: add_option_dict["setting_key"] = "owner_id" ####################################################################### # single parameter # ex: add_option_dict["no_value_parameter_list"] = "--single-parameter" ####################################################################### # custom parameter add_option_dict["output"] = output_name add_option_dict["query"] = change_query_name read_no_parameter_custom("elbv2", "describe-ssl-policies", add_option_dict)
c7a70ffaa62499a456c09c3f1f3ef7cd0b05a611
09e57dd1374713f06b70d7b37a580130d9bbab0d
/data/p2DJ/New/program/pyquil/startPyquil387.py
06eb003d185853b93d56f5429d08368942bce8d7
[ "BSD-3-Clause" ]
permissive
UCLA-SEAL/QDiff
ad53650034897abb5941e74539e3aee8edb600ab
d968cbc47fe926b7f88b4adf10490f1edd6f8819
refs/heads/main
2023-08-05T04:52:24.961998
2021-09-19T02:56:16
2021-09-19T02:56:16
405,159,939
2
0
null
null
null
null
UTF-8
Python
false
false
1,341
py
# qubit number=2 # total number=20 import pyquil from pyquil.api import local_forest_runtime, QVMConnection from pyquil import Program, get_qc from pyquil.gates import * import numpy as np conn = QVMConnection() def make_circuit()-> Program: prog = Program() # circuit begin prog += H(0) # number=1 prog += H(1) # number=8 prog += CZ(0,1) # number=9 prog += H(1) # number=10 prog += H(1) # number=11 prog += CZ(0,1) # number=12 prog += H(1) # number=13 prog += H(1) # number=17 prog += CZ(0,1) # number=18 prog += H(1) # number=19 prog += CNOT(1,0) # number=14 prog += X(0) # number=15 prog += CNOT(1,0) # number=16 prog += Y(1) # number=6 prog += X(0) # number=4 # circuit end return prog def summrise_results(bitstrings) -> dict: d = {} for l in bitstrings: if d.get(l) is None: d[l] = 1 else: d[l] = d[l] + 1 return d if __name__ == '__main__': prog = make_circuit() qvm = get_qc('1q-qvm') results = qvm.run_and_measure(prog,1024) bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T bitstrings = [''.join(map(str, l)) for l in bitstrings] writefile = open("../data/startPyquil387.csv","w") print(summrise_results(bitstrings),file=writefile) writefile.close()
1c84b99c1dbcc510063d63711b2450fd162f9f19
4f408d65db60911f56110c351cb3b64835e0c5fb
/caffe2/python/predictor/predictor_exporter_test.py
61e3fb0a5ad87ad8bfa994bb3bffe2727df0a74f
[ "Apache-2.0", "MIT", "BSD-3-Clause", "LicenseRef-scancode-generic-cla", "BSD-2-Clause" ]
permissive
KeyKy/caffe2_SSD
a02c065aef2dbcfd00faae8be0440d7a4ff0fb76
7235688ea5e212dbe8609d780dd94c8c7d9fef54
refs/heads/master
2021-09-18T14:36:11.247427
2018-07-10T09:59:35
2018-07-10T09:59:35
89,928,918
8
5
null
2018-07-27T02:14:38
2017-05-01T14:04:20
Jupyter Notebook
UTF-8
Python
false
false
4,555
py
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import tempfile import unittest import numpy as np from caffe2.python import cnn, workspace, core from caffe2.python.predictor_constants import predictor_constants as pc import caffe2.python.predictor.predictor_exporter as pe import caffe2.python.predictor.predictor_py_utils as pred_utils class PredictorExporterTest(unittest.TestCase): def _create_model(self): m = cnn.CNNModelHelper() m.FC("data", "y", dim_in=5, dim_out=10, weight_init=m.XavierInit, bias_init=m.XavierInit) return m def setUp(self): np.random.seed(1) m = self._create_model() self.predictor_export_meta = pe.PredictorExportMeta( predict_net=m.net.Proto(), parameters=[str(b) for b in m.params], inputs=["data"], outputs=["y"], shapes={"y": (1, 10), "data": (1, 5)}, ) workspace.RunNetOnce(m.param_init_net) self.params = { param: workspace.FetchBlob(param) for param in self.predictor_export_meta.parameters} # Reset the workspace, to ensure net creation proceeds as expected. workspace.ResetWorkspace() def test_meta_constructor(self): ''' Test that passing net itself instead of proto works ''' m = self._create_model() pe.PredictorExportMeta( predict_net=m.net, parameters=m.params, inputs=["data"], outputs=["y"], shapes={"y": (1, 10), "data": (1, 5)}, ) def test_meta_net_def_net_runs(self): for param, value in self.params.items(): workspace.FeedBlob(param, value) extra_init_net = core.Net('extra_init') extra_init_net.ConstantFill('data', 'data', value=1.0) pem = pe.PredictorExportMeta( predict_net=self.predictor_export_meta.predict_net, parameters=self.predictor_export_meta.parameters, inputs=self.predictor_export_meta.inputs, outputs=self.predictor_export_meta.outputs, shapes=self.predictor_export_meta.shapes, extra_init_net=extra_init_net, ) db_type = 'minidb' db_file = tempfile.NamedTemporaryFile( delete=False, suffix=".{}".format(db_type)) pe.save_to_db( db_type=db_type, db_destination=db_file.name, predictor_export_meta=pem) workspace.ResetWorkspace() meta_net_def = pe.load_from_db( db_type=db_type, filename=db_file.name, ) self.assertTrue("data" not in workspace.Blobs()) self.assertTrue("y" not in workspace.Blobs()) init_net = pred_utils.GetNet(meta_net_def, pc.PREDICT_INIT_NET_TYPE) # 0-fills externalblobs blobs and runs extra_init_net workspace.RunNetOnce(init_net) self.assertTrue("data" in workspace.Blobs()) self.assertTrue("y" in workspace.Blobs()) print(workspace.FetchBlob("data")) np.testing.assert_array_equal( workspace.FetchBlob("data"), np.ones(shape=(1, 5))) np.testing.assert_array_equal( workspace.FetchBlob("y"), np.zeros(shape=(1, 10))) # Load parameters from DB global_init_net = pred_utils.GetNet(meta_net_def, pc.GLOBAL_INIT_NET_TYPE) workspace.RunNetOnce(global_init_net) # Run the net with a reshaped input and verify we are # producing good numbers (with our custom implementation) workspace.FeedBlob("data", np.random.randn(2, 5).astype(np.float32)) predict_net = pred_utils.GetNet(meta_net_def, pc.PREDICT_NET_TYPE) workspace.RunNetOnce(predict_net) np.testing.assert_array_almost_equal( workspace.FetchBlob("y"), workspace.FetchBlob("data").dot(self.params["y_w"].T) + self.params["y_b"]) def test_db_fails_without_params(self): with self.assertRaises(Exception): for db_type in ["minidb"]: db_file = tempfile.NamedTemporaryFile( delete=False, suffix=".{}".format(db_type)) pe.save_to_db( db_type=db_type, db_destination=db_file.name, predictor_export_meta=self.predictor_export_meta)
e79c9f4bec7946370a9fb9d8c7ec083d5df103b7
f02485de5a101f3b69a45b2c4e71bd950ee55eba
/Z_other/del/ncPreprocessNew.py
f15f0e79cda38afbbe7ca272efe385042833c0f5
[]
no_license
newjokker/PyUtil
ef4266b0ca32157f9de6e2cac1b1a10647190d99
32e64be10a6cd2856850f6720d70b4c6e7033f4e
refs/heads/master
2020-11-28T00:19:02.073391
2019-12-23T02:07:40
2019-12-23T02:07:40
229,654,616
0
0
null
null
null
null
UTF-8
Python
false
false
8,221
py
# -*-coding:utf-8 -*- import os import sys import datetime import xml.dom.minidom from xml.dom.minidom import Document import datetime # from Decorator.time_it import time_this class NcFileToTiff(object): """传入 nc 文件,输出 tiff 文件""" def __init__(self): # 需要处理的文件 self.files_to_transform = [] # 保存的路径 self.save_dir = None # 转换用到的 exe self.gdal_translate_exe_path = None @staticmethod def get_file_time(file_path): """获取文件的时间,世界时转为北京时""" file_base_name = os.path.basename(file_path) # 文件名 if 'ASI' in file_base_name: UTC_str = file_base_name[-13:-3] UTC_time = datetime.datetime.strptime(UTC_str, '%Y%m%d%H') CHN_time = UTC_time + datetime.timedelta(hours=-8) CHN_str = datetime.datetime.strftime(CHN_time, '%Y%m%d%H') return CHN_str elif 'CHN' in file_base_name: return file_base_name[-13:-3] else: return def get_save_path(self, file_path): """根据文件名,得到输出路径""" CHN_time_str = self.get_file_time(file_path) # 获取北京时间 # 取到文件产品时间之前, file_basename = os.path.basename(file_path) # 输出文件夹 out_reladir = os.path.join(CHN_time_str[:6], CHN_time_str[:8]) out_reladir += r'\NRT' if '_NRT_' in file_basename else r'\RT' out_reladir += r'\HOR' if '_HOR-' in file_basename else r'\DAY' out_path = os.path.join(self.save_dir, out_reladir) # 新建文件夹 if not os.path.exists(out_path): os.makedirs(out_path) # geotif_name = file_basename[:-13] + CHN_time_str + '.tif' # 把 basename 的时间统一改为北京时间 out_geotif = os.path.join(out_path, geotif_name) return out_geotif def nc_to_tiff(self, nc_file, out_geotif): """调用cmd , nc 文件转为 tiff 文件""" cmd_str = self.gdal_translate_exe_path + ' -a_srs WGS84 -of GTiff -sds ' + nc_file + ' ' + out_geotif os.system(cmd_str) # @time_this def do_process(self): """主流程""" for each_file in self.files_to_transform: if not each_file.endswith('.nc'): # 过滤非 nc 文件 continue # 得到返回值 save_path = self.get_save_path(each_file) # 转换 self.nc_to_tiff(each_file, save_path) class Xml_Util(object): # ------------------------------ 写 xml ---------------------------------------------------------------------------- @staticmethod def get_document(head_time_str): """返回 document, 用于写 xml 文件""" document = Document() document.appendChild(document.createComment(head_time_str)) return document @staticmethod def add_sub_node(document, curNode, nodeKey, nodeValue, nodeAtt=None): """在节点下添加子节点信息""" if nodeAtt is None: nodeAtt = {} try: child = document.createElement(nodeKey) # 写属性 for attKey in nodeAtt: child.setAttribute(attKey, nodeAtt[attKey]) # 写值 if nodeValue: child_text = document.createTextNode(nodeValue) child.appendChild(child_text) # 添加节点 curNode.appendChild(child) return child except: print("* error in add node") return None @staticmethod def save_xml(document, xml_path): """将 xml 保存为本地文件""" with open(xml_path, 'w') as f: f.write(document.toprettyxml(indent='\t', encoding='utf-8')) # ------------------------------ 读 xml ---------------------------------------------------------------------------- @staticmethod def get_root_node(xml_path): """返回用于读 xml 的 collection""" DOMTree = xml.dom.minidom.parse(xml_path) root_node = DOMTree.documentElement return root_node @staticmethod def get_info_from_node(eachNode, assign_attr=None): """现在只支持 Element""" # ----------------------------------------------------------------- if eachNode.nodeType != 1: return # ----------------------------------------------------------------- element_info = {} # ----------------------------------------------------------------- # 获得所有的属性 attr_dict = {} if assign_attr: assign_attr = set(assign_attr) for each_attr in assign_attr: attr_dict[each_attr] = eachNode.getAttribute(each_attr) else: for each_attr in eachNode.attributes.values(): attr_dict[each_attr.name] = each_attr.value element_info['attr'] = attr_dict # ----------------------------------------------------------------- # 得到值,有子节点就没有值,这一点要注意 node_value = None if len(eachNode.childNodes) == 1: if eachNode.childNodes[0].nodeType == 3: node_value = eachNode.childNodes[0].nodeValue element_info['value'] = node_value # ----------------------------------------------------------------- # 得到子节点 # child_nodes = eachNode.childNodes # ----------------------------------------------------------------- return element_info # ------------------------------ 常用 ------------------------------------------------------------------------------ @staticmethod def xml_parser(xml_path, need_tages, attr="identify"): """ 读取xml文件(一个级别),保存为字典 :param xml_path: xml 文件路径 :param need_tages: 需要的标签名 :param attr: 需要的属性名 :return: {'attr':value} """ def get_key_value(oneNode, attr_temp): """读取标签""" key = oneNode.getAttribute(attr_temp) value = oneNode.childNodes[0].data return key, value xml_info = {} DOMTree = xml.dom.minidom.parse(xml_path) collection = DOMTree.documentElement # 遍历节点 for each_tag in need_tages: for eachNode in collection.getElementsByTagName(each_tag): (info_key, info_value) = get_key_value(eachNode, attr) xml_info[info_key] = info_value return xml_info if __name__ == '__main__': a = NcFileToTiff() # ------------------ 读取 issue 等,解析为需要的文件夹 ------------------------------------ # xml_path = r'Y:\inputXML\nc\inputXml\201908011259\201908011259.xml' xml_path = r'D:\BaiduNetdiskDownload\Algo\201908011239.xml' # xml_path = sys.argv[1] xml_dict = Xml_Util.xml_parser(xml_path, ['input']) save_path_xml = xml_dict['outXMLPath'] a.save_dir = xml_dict['outFolder'] a.gdal_translate_exe_path = xml_dict['gdal_translate'] # a.gdal_translate_exe_path = r'C:\ProgramData\Anaconda2\envs\QingHai\Library\bin\gdal_translate.exe' a.files_to_transform = xml_dict['inputFile'].split(',') a.do_process() try: a.do_process() status = 'success' except: status = 'error' # --------------------------------------------------------------------------------- # 写 output xml 文件 if status == 'error': status, info = '0', 'error' else: status, info = '1', 'success' # 初始化 document = Xml_Util.get_document('ProcessTime:' + str(datetime.datetime.now())) # 添加根节点 XML = Xml_Util.add_sub_node(document, document, 'XML', '', None) # info 信息 log = Xml_Util.add_sub_node(document, XML, 'log', '', None) Xml_Util.add_sub_node(document, log, 'status', status, None) Xml_Util.add_sub_node(document, log, 'info', info, None) Xml_Util.save_xml(document, save_path_xml) print('ok')
a3b82adff7f91cff3f104899144e29691bc4658b
cd40b7cc395f36740000ed4a4144b1c0666ab0fd
/tests/test_hstrat/test_auxiliary_lib/test_parse_from_numeral_system.py
16155f656f410b5cdd20c314308e9e350596ab27
[ "MIT" ]
permissive
mmore500/hstrat
94fd22c86a87a5707590b9398ef679444ed82d6d
b2d2caded1db5e2dc681d9f171d7c74b322c55c3
refs/heads/master
2023-08-31T03:36:44.457576
2023-08-25T14:39:29
2023-08-25T14:39:29
464,531,144
5
2
NOASSERTION
2023-08-25T13:07:52
2022-02-28T15:11:45
Python
UTF-8
Python
false
false
2,824
py
import random import string import pytest from hstrat._auxiliary_lib import ( parse_from_numeral_system, render_to_numeral_system, ) @pytest.fixture def alphabet(): return string.digits + string.ascii_lowercase def test_parse_from_numeral_system(alphabet): assert parse_from_numeral_system("0", alphabet) == 0 assert parse_from_numeral_system("1", alphabet) == 1 assert parse_from_numeral_system("a", alphabet) == 10 assert parse_from_numeral_system("z", alphabet) == 35 assert parse_from_numeral_system("10", alphabet) == 36 assert parse_from_numeral_system("2s", alphabet) == 100 assert parse_from_numeral_system("2n9c", alphabet) == 123456 def test_render_and_parse_numeral_system_consistency(alphabet): for __ in range(100): num = random.randint(0, 1000000) base_num = render_to_numeral_system(num, alphabet) assert parse_from_numeral_system(base_num, alphabet) == num for __ in range(100): test_digits = "".join([random.choice(alphabet) for j in range(10)]) base_num = parse_from_numeral_system(test_digits, alphabet) assert render_to_numeral_system( base_num, alphabet ) == test_digits.lstrip("0") def test_hex_parsing(): assert parse_from_numeral_system("0", "0123456789abcdef") == int("0", 16) assert parse_from_numeral_system("1", "0123456789abcdef") == int("1", 16) assert parse_from_numeral_system("0a3", "0123456789abcdef") == int( "0a3", 16 ) assert parse_from_numeral_system("a3", "0123456789abcdef") == int("a3", 16) assert parse_from_numeral_system("ff", "0123456789abcdef") == int("ff", 16) def test_octal_parsing(): assert parse_from_numeral_system("0", "01234567") == int("0", 8) assert parse_from_numeral_system("1", "01234567") == int("1", 8) assert parse_from_numeral_system("052", "01234567") == int("052", 8) assert parse_from_numeral_system("52", "01234567") == int("52", 8) assert parse_from_numeral_system("777", "01234567") == int("777", 8) def test_binary_parsing(): assert parse_from_numeral_system("0", "01") == int("0", 2) assert parse_from_numeral_system("1", "01") == int("1", 2) assert parse_from_numeral_system("0101101", "01") == int("0101101", 2) assert parse_from_numeral_system("101101", "01") == int("101101", 2) assert parse_from_numeral_system("111", "01") == int("111", 2) def test_decimal_parsing(): assert parse_from_numeral_system("0", "0123456789") == int("0", 10) assert parse_from_numeral_system("1", "0123456789") == int("1", 10) assert parse_from_numeral_system("0123", "0123456789") == int("0123", 10) assert parse_from_numeral_system("123", "0123456789") == int("123", 10) assert parse_from_numeral_system("456", "0123456789") == int("456", 10)
04d2d4b1fd96ad5507c4890234ccd8f5a3e0cd22
4d2a007fb3810d0b1de676d0193eb24cb0134180
/site.py
3d10e3e877d02cd1ca4193ab1643d66aed39797e
[]
no_license
Rossonero/qlin
241a822402beb4b05fe69f024978148a5758a765
02cc017daf3e540cfd9b5bfe7e021c176d5e90e1
refs/heads/master
2021-01-18T00:26:45.747983
2012-02-13T01:43:12
2012-02-13T01:43:12
null
0
0
null
null
null
null
UTF-8
Python
false
false
446
py
# -*- coding: utf-8 -*- def Site_infor(request): ''' 站点信息展示前台 将所有信息展示开来 ''' t = get_template('site_infor.html') #从数据库中将信息进行展示 cx = sq.connect('store/site.sqlite') cu = self.cx.cursor() #开始查找信息 strr = "select * from site" cu.execute(strr) print cu.fetchone() html = t.render(Context({})) return HttpResponse(html)
8308e473e1a26f517a95db64f71ad6b5fee0cbe0
faca8866b3c8aca30a915d8cb2748766557ed808
/object_detection_updata/builders/box_predictor_builder.py
7fe88e08bc63c355e4f03b45417646d6635acdbc
[]
no_license
yongqis/proposal_joint_retireval
6899d80f8fb94569c7b60764f6e7de74bcfa9cc8
97b086c62473ab1a5baf45743535fce70c3f8c20
refs/heads/master
2020-05-25T19:07:22.946008
2019-06-03T07:09:04
2019-06-03T07:09:09
187,943,867
0
0
null
null
null
null
UTF-8
Python
false
false
48,966
py
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Function to build box predictor from configuration.""" import collections import tensorflow as tf from object_detection_updata.predictors import convolutional_box_predictor from object_detection_updata.predictors import convolutional_keras_box_predictor from object_detection_updata.predictors import mask_rcnn_box_predictor from object_detection_updata.predictors import mask_rcnn_keras_box_predictor from object_detection_updata.predictors import rfcn_box_predictor from object_detection_updata.predictors import rfcn_keras_box_predictor from object_detection_updata.predictors.heads import box_head from object_detection_updata.predictors.heads import class_head from object_detection_updata.predictors.heads import keras_box_head from object_detection_updata.predictors.heads import keras_class_head from object_detection_updata.predictors.heads import keras_mask_head from object_detection_updata.predictors.heads import mask_head from object_detection_updata.protos import box_predictor_pb2 def build_convolutional_box_predictor(is_training, num_classes, conv_hyperparams_fn, min_depth, max_depth, num_layers_before_predictor, use_dropout, dropout_keep_prob, kernel_size, box_code_size, apply_sigmoid_to_scores=False, add_background_class=True, class_prediction_bias_init=0.0, use_depthwise=False, box_encodings_clip_range=None): """Builds the ConvolutionalBoxPredictor from the arguments. Args: is_training: Indicates whether the BoxPredictor is in training mode. num_classes: number of classes. Note that num_classes *does not* include the background category, so if groundtruth labels take values in {0, 1, .., K-1}, num_classes=K (and not K+1, even though the assigned classification targets can range from {0,... K}). conv_hyperparams_fn: A function to generate tf-slim arg_scope with hyperparameters for convolution ops. min_depth: Minimum feature depth prior to predicting box encodings and class predictions. max_depth: Maximum feature depth prior to predicting box encodings and class predictions. If max_depth is set to 0, no additional feature map will be inserted before location and class predictions. num_layers_before_predictor: Number of the additional conv layers before the predictor. use_dropout: Option to use dropout or not. Note that a single dropout op is applied here prior to both box and class predictions, which stands in contrast to the ConvolutionalBoxPredictor below. dropout_keep_prob: Keep probability for dropout. This is only used if use_dropout is True. kernel_size: Size of final convolution kernel. If the spatial resolution of the feature map is smaller than the kernel size, then the kernel size is automatically set to be min(feature_width, feature_height). box_code_size: Size of encoding for each box. apply_sigmoid_to_scores: If True, apply the sigmoid on the output class_predictions. add_background_class: Whether to add an implicit background class. class_prediction_bias_init: Constant value to initialize bias of the last conv2d layer before class prediction. use_depthwise: Whether to use depthwise convolutions for prediction steps. Default is False. box_encodings_clip_range: Min and max values for clipping the box_encodings. Returns: A ConvolutionalBoxPredictor class. """ box_prediction_head = box_head.ConvolutionalBoxHead( is_training=is_training, box_code_size=box_code_size, kernel_size=kernel_size, use_depthwise=use_depthwise, box_encodings_clip_range=box_encodings_clip_range) class_prediction_head = class_head.ConvolutionalClassHead( is_training=is_training, num_class_slots=num_classes + 1 if add_background_class else num_classes, use_dropout=use_dropout, dropout_keep_prob=dropout_keep_prob, kernel_size=kernel_size, apply_sigmoid_to_scores=apply_sigmoid_to_scores, class_prediction_bias_init=class_prediction_bias_init, use_depthwise=use_depthwise) other_heads = {} return convolutional_box_predictor.ConvolutionalBoxPredictor( is_training=is_training, num_classes=num_classes, box_prediction_head=box_prediction_head, class_prediction_head=class_prediction_head, other_heads=other_heads, conv_hyperparams_fn=conv_hyperparams_fn, num_layers_before_predictor=num_layers_before_predictor, min_depth=min_depth, max_depth=max_depth) def build_convolutional_keras_box_predictor(is_training, num_classes, conv_hyperparams, freeze_batchnorm, inplace_batchnorm_update, num_predictions_per_location_list, min_depth, max_depth, num_layers_before_predictor, use_dropout, dropout_keep_prob, kernel_size, box_code_size, add_background_class=True, class_prediction_bias_init=0.0, use_depthwise=False, box_encodings_clip_range=None, name='BoxPredictor'): """Builds the Keras ConvolutionalBoxPredictor from the arguments. Args: is_training: Indicates whether the BoxPredictor is in training mode. num_classes: number of classes. Note that num_classes *does not* include the background category, so if groundtruth labels take values in {0, 1, .., K-1}, num_classes=K (and not K+1, even though the assigned classification targets can range from {0,... K}). conv_hyperparams: A `hyperparams_builder.KerasLayerHyperparams` object containing hyperparameters for convolution ops. freeze_batchnorm: Whether to freeze batch norm parameters during training or not. When training with a small batch size (e.g. 1), it is desirable to freeze batch norm update and use pretrained batch norm params. inplace_batchnorm_update: Whether to update batch norm moving average values inplace. When this is false train op must add a control dependency on tf.graphkeys.UPDATE_OPS collection in order to update batch norm statistics. num_predictions_per_location_list: A list of integers representing the number of box predictions to be made per spatial location for each feature map. min_depth: Minimum feature depth prior to predicting box encodings and class predictions. max_depth: Maximum feature depth prior to predicting box encodings and class predictions. If max_depth is set to 0, no additional feature map will be inserted before location and class predictions. num_layers_before_predictor: Number of the additional conv layers before the predictor. use_dropout: Option to use dropout or not. Note that a single dropout op is applied here prior to both box and class predictions, which stands in contrast to the ConvolutionalBoxPredictor below. dropout_keep_prob: Keep probability for dropout. This is only used if use_dropout is True. kernel_size: Size of final convolution kernel. If the spatial resolution of the feature map is smaller than the kernel size, then the kernel size is automatically set to be min(feature_width, feature_height). box_code_size: Size of encoding for each box. add_background_class: Whether to add an implicit background class. class_prediction_bias_init: constant value to initialize bias of the last conv2d layer before class prediction. use_depthwise: Whether to use depthwise convolutions for prediction steps. Default is False. box_encodings_clip_range: Min and max values for clipping the box_encodings. name: A string name scope to assign to the box predictor. If `None`, Keras will auto-generate one from the class name. Returns: A Keras ConvolutionalBoxPredictor class. """ box_prediction_heads = [] class_prediction_heads = [] other_heads = {} for stack_index, num_predictions_per_location in enumerate( num_predictions_per_location_list): box_prediction_heads.append( keras_box_head.ConvolutionalBoxHead( is_training=is_training, box_code_size=box_code_size, kernel_size=kernel_size, conv_hyperparams=conv_hyperparams, freeze_batchnorm=freeze_batchnorm, num_predictions_per_location=num_predictions_per_location, use_depthwise=use_depthwise, box_encodings_clip_range=box_encodings_clip_range, name='ConvolutionalBoxHead_%d' % stack_index)) class_prediction_heads.append( keras_class_head.ConvolutionalClassHead( is_training=is_training, num_class_slots=( num_classes + 1 if add_background_class else num_classes), use_dropout=use_dropout, dropout_keep_prob=dropout_keep_prob, kernel_size=kernel_size, conv_hyperparams=conv_hyperparams, freeze_batchnorm=freeze_batchnorm, num_predictions_per_location=num_predictions_per_location, class_prediction_bias_init=class_prediction_bias_init, use_depthwise=use_depthwise, name='ConvolutionalClassHead_%d' % stack_index)) return convolutional_keras_box_predictor.ConvolutionalBoxPredictor( is_training=is_training, num_classes=num_classes, box_prediction_heads=box_prediction_heads, class_prediction_heads=class_prediction_heads, other_heads=other_heads, conv_hyperparams=conv_hyperparams, num_layers_before_predictor=num_layers_before_predictor, min_depth=min_depth, max_depth=max_depth, freeze_batchnorm=freeze_batchnorm, inplace_batchnorm_update=inplace_batchnorm_update, name=name) def build_weight_shared_convolutional_box_predictor( is_training, num_classes, conv_hyperparams_fn, depth, num_layers_before_predictor, box_code_size, kernel_size=3, add_background_class=True, class_prediction_bias_init=0.0, use_dropout=False, dropout_keep_prob=0.8, share_prediction_tower=False, apply_batch_norm=True, use_depthwise=False, score_converter_fn=tf.identity, box_encodings_clip_range=None): """Builds and returns a WeightSharedConvolutionalBoxPredictor class. Args: is_training: Indicates whether the BoxPredictor is in training mode. num_classes: number of classes. Note that num_classes *does not* include the background category, so if groundtruth labels take values in {0, 1, .., K-1}, num_classes=K (and not K+1, even though the assigned classification targets can range from {0,... K}). conv_hyperparams_fn: A function to generate tf-slim arg_scope with hyperparameters for convolution ops. depth: depth of conv layers. num_layers_before_predictor: Number of the additional conv layers before the predictor. box_code_size: Size of encoding for each box. kernel_size: Size of final convolution kernel. add_background_class: Whether to add an implicit background class. class_prediction_bias_init: constant value to initialize bias of the last conv2d layer before class prediction. use_dropout: Whether to apply dropout to class prediction head. dropout_keep_prob: Probability of keeping activiations. share_prediction_tower: Whether to share the multi-layer tower between box prediction and class prediction heads. apply_batch_norm: Whether to apply batch normalization to conv layers in this predictor. use_depthwise: Whether to use depthwise separable conv2d instead of conv2d. score_converter_fn: Callable score converter to perform elementwise op on class scores. box_encodings_clip_range: Min and max values for clipping the box_encodings. Returns: A WeightSharedConvolutionalBoxPredictor class. """ box_prediction_head = box_head.WeightSharedConvolutionalBoxHead( box_code_size=box_code_size, kernel_size=kernel_size, use_depthwise=use_depthwise, box_encodings_clip_range=box_encodings_clip_range) class_prediction_head = ( class_head.WeightSharedConvolutionalClassHead( num_class_slots=( num_classes + 1 if add_background_class else num_classes), kernel_size=kernel_size, class_prediction_bias_init=class_prediction_bias_init, use_dropout=use_dropout, dropout_keep_prob=dropout_keep_prob, use_depthwise=use_depthwise, score_converter_fn=score_converter_fn)) other_heads = {} return convolutional_box_predictor.WeightSharedConvolutionalBoxPredictor( is_training=is_training, num_classes=num_classes, box_prediction_head=box_prediction_head, class_prediction_head=class_prediction_head, other_heads=other_heads, conv_hyperparams_fn=conv_hyperparams_fn, depth=depth, num_layers_before_predictor=num_layers_before_predictor, kernel_size=kernel_size, apply_batch_norm=apply_batch_norm, share_prediction_tower=share_prediction_tower, use_depthwise=use_depthwise) def build_weight_shared_convolutional_keras_box_predictor( is_training, num_classes, conv_hyperparams, freeze_batchnorm, inplace_batchnorm_update, num_predictions_per_location_list, depth, num_layers_before_predictor, box_code_size, kernel_size=3, add_background_class=True, class_prediction_bias_init=0.0, use_dropout=False, dropout_keep_prob=0.8, share_prediction_tower=False, apply_batch_norm=True, use_depthwise=False, score_converter_fn=tf.identity, box_encodings_clip_range=None, name='WeightSharedConvolutionalBoxPredictor'): """Builds the Keras WeightSharedConvolutionalBoxPredictor from the arguments. Args: is_training: Indicates whether the BoxPredictor is in training mode. num_classes: number of classes. Note that num_classes *does not* include the background category, so if groundtruth labels take values in {0, 1, .., K-1}, num_classes=K (and not K+1, even though the assigned classification targets can range from {0,... K}). conv_hyperparams: A `hyperparams_builder.KerasLayerHyperparams` object containing hyperparameters for convolution ops. freeze_batchnorm: Whether to freeze batch norm parameters during training or not. When training with a small batch size (e.g. 1), it is desirable to freeze batch norm update and use pretrained batch norm params. inplace_batchnorm_update: Whether to update batch norm moving average values inplace. When this is false train op must add a control dependency on tf.graphkeys.UPDATE_OPS collection in order to update batch norm statistics. num_predictions_per_location_list: A list of integers representing the number of box predictions to be made per spatial location for each feature map. depth: depth of conv layers. num_layers_before_predictor: Number of the additional conv layers before the predictor. box_code_size: Size of encoding for each box. kernel_size: Size of final convolution kernel. add_background_class: Whether to add an implicit background class. class_prediction_bias_init: constant value to initialize bias of the last conv2d layer before class prediction. use_dropout: Whether to apply dropout to class prediction head. dropout_keep_prob: Probability of keeping activiations. share_prediction_tower: Whether to share the multi-layer tower between box prediction and class prediction heads. apply_batch_norm: Whether to apply batch normalization to conv layers in this predictor. use_depthwise: Whether to use depthwise separable conv2d instead of conv2d. score_converter_fn: Callable score converter to perform elementwise op on class scores. box_encodings_clip_range: Min and max values for clipping the box_encodings. name: A string name scope to assign to the box predictor. If `None`, Keras will auto-generate one from the class name. Returns: A Keras WeightSharedConvolutionalBoxPredictor class. """ if len(set(num_predictions_per_location_list)) > 1: raise ValueError('num predictions per location must be same for all' 'feature maps, found: {}'.format( num_predictions_per_location_list)) num_predictions_per_location = num_predictions_per_location_list[0] box_prediction_head = keras_box_head.WeightSharedConvolutionalBoxHead( box_code_size=box_code_size, kernel_size=kernel_size, conv_hyperparams=conv_hyperparams, num_predictions_per_location=num_predictions_per_location, use_depthwise=use_depthwise, box_encodings_clip_range=box_encodings_clip_range, name='WeightSharedConvolutionalBoxHead') class_prediction_head = keras_class_head.WeightSharedConvolutionalClassHead( num_class_slots=( num_classes + 1 if add_background_class else num_classes), use_dropout=use_dropout, dropout_keep_prob=dropout_keep_prob, kernel_size=kernel_size, conv_hyperparams=conv_hyperparams, num_predictions_per_location=num_predictions_per_location, class_prediction_bias_init=class_prediction_bias_init, use_depthwise=use_depthwise, score_converter_fn=score_converter_fn, name='WeightSharedConvolutionalClassHead') other_heads = {} return ( convolutional_keras_box_predictor.WeightSharedConvolutionalBoxPredictor( is_training=is_training, num_classes=num_classes, box_prediction_head=box_prediction_head, class_prediction_head=class_prediction_head, other_heads=other_heads, conv_hyperparams=conv_hyperparams, depth=depth, num_layers_before_predictor=num_layers_before_predictor, freeze_batchnorm=freeze_batchnorm, inplace_batchnorm_update=inplace_batchnorm_update, kernel_size=kernel_size, apply_batch_norm=apply_batch_norm, share_prediction_tower=share_prediction_tower, use_depthwise=use_depthwise, name=name)) def build_mask_rcnn_keras_box_predictor(is_training, num_classes, fc_hyperparams, freeze_batchnorm, use_dropout, dropout_keep_prob, box_code_size, add_background_class=True, share_box_across_classes=False, predict_instance_masks=False, conv_hyperparams=None, mask_height=14, mask_width=14, mask_prediction_num_conv_layers=2, mask_prediction_conv_depth=256, masks_are_class_agnostic=False, convolve_then_upsample_masks=False): """Builds and returns a MaskRCNNKerasBoxPredictor class. Args: is_training: Indicates whether the BoxPredictor is in training mode. num_classes: number of classes. Note that num_classes *does not* include the background category, so if groundtruth labels take values in {0, 1, .., K-1}, num_classes=K (and not K+1, even though the assigned classification targets can range from {0,... K}). fc_hyperparams: A `hyperparams_builder.KerasLayerHyperparams` object containing hyperparameters for fully connected dense ops. freeze_batchnorm: Whether to freeze batch norm parameters during training or not. When training with a small batch size (e.g. 1), it is desirable to freeze batch norm update and use pretrained batch norm params. use_dropout: Option to use dropout or not. Note that a single dropout op is applied here prior to both box and class predictions, which stands in contrast to the ConvolutionalBoxPredictor below. dropout_keep_prob: Keep probability for dropout. This is only used if use_dropout is True. box_code_size: Size of encoding for each box. add_background_class: Whether to add an implicit background class. share_box_across_classes: Whether to share boxes across classes rather than use a different box for each class. predict_instance_masks: If True, will add a third stage mask prediction to the returned class. conv_hyperparams: A `hyperparams_builder.KerasLayerHyperparams` object containing hyperparameters for convolution ops. mask_height: Desired output mask height. The default value is 14. mask_width: Desired output mask width. The default value is 14. mask_prediction_num_conv_layers: Number of convolution layers applied to the image_features in mask prediction branch. mask_prediction_conv_depth: The depth for the first conv2d_transpose op applied to the image_features in the mask prediction branch. If set to 0, the depth of the convolution layers will be automatically chosen based on the number of object classes and the number of channels in the image features. masks_are_class_agnostic: Boolean determining if the mask-head is class-agnostic or not. convolve_then_upsample_masks: Whether to apply convolutions on mask features before upsampling using nearest neighbor resizing. Otherwise, mask features are resized to [`mask_height`, `mask_width`] using bilinear resizing before applying convolutions. Returns: A MaskRCNNKerasBoxPredictor class. """ box_prediction_head = keras_box_head.MaskRCNNBoxHead( is_training=is_training, num_classes=num_classes, fc_hyperparams=fc_hyperparams, freeze_batchnorm=freeze_batchnorm, use_dropout=use_dropout, dropout_keep_prob=dropout_keep_prob, box_code_size=box_code_size, share_box_across_classes=share_box_across_classes) class_prediction_head = keras_class_head.MaskRCNNClassHead( is_training=is_training, num_class_slots=num_classes + 1 if add_background_class else num_classes, fc_hyperparams=fc_hyperparams, freeze_batchnorm=freeze_batchnorm, use_dropout=use_dropout, dropout_keep_prob=dropout_keep_prob) third_stage_heads = {} if predict_instance_masks: third_stage_heads[ mask_rcnn_box_predictor. MASK_PREDICTIONS] = keras_mask_head.MaskRCNNMaskHead( is_training=is_training, num_classes=num_classes, conv_hyperparams=conv_hyperparams, freeze_batchnorm=freeze_batchnorm, mask_height=mask_height, mask_width=mask_width, mask_prediction_num_conv_layers=mask_prediction_num_conv_layers, mask_prediction_conv_depth=mask_prediction_conv_depth, masks_are_class_agnostic=masks_are_class_agnostic, convolve_then_upsample=convolve_then_upsample_masks) return mask_rcnn_keras_box_predictor.MaskRCNNKerasBoxPredictor( is_training=is_training, num_classes=num_classes, freeze_batchnorm=freeze_batchnorm, box_prediction_head=box_prediction_head, class_prediction_head=class_prediction_head, third_stage_heads=third_stage_heads) def build_mask_rcnn_box_predictor(is_training, num_classes, fc_hyperparams_fn, use_dropout, dropout_keep_prob, box_code_size, add_background_class=True, share_box_across_classes=False, predict_instance_masks=False, conv_hyperparams_fn=None, mask_height=14, mask_width=14, mask_prediction_num_conv_layers=2, mask_prediction_conv_depth=256, masks_are_class_agnostic=False, convolve_then_upsample_masks=False): """Builds and returns a MaskRCNNBoxPredictor class. Args: is_training: Indicates whether the BoxPredictor is in training mode. num_classes: number of classes. Note that num_classes *does not* include the background category, so if groundtruth labels take values in {0, 1, .., K-1}, num_classes=K (and not K+1, even though the assigned classification targets can range from {0,... K}). fc_hyperparams_fn: A function to generate tf-slim arg_scope with hyperparameters for fully connected ops. use_dropout: Option to use dropout or not. Note that a single dropout op is applied here prior to both box and class predictions, which stands in contrast to the ConvolutionalBoxPredictor below. dropout_keep_prob: Keep probability for dropout. This is only used if use_dropout is True. box_code_size: Size of encoding for each box. add_background_class: Whether to add an implicit background class. share_box_across_classes: Whether to share boxes across classes rather than use a different box for each class. predict_instance_masks: If True, will add a third stage mask prediction to the returned class. conv_hyperparams_fn: A function to generate tf-slim arg_scope with hyperparameters for convolution ops. mask_height: Desired output mask height. The default value is 14. mask_width: Desired output mask width. The default value is 14. mask_prediction_num_conv_layers: Number of convolution layers applied to the image_features in mask prediction branch. mask_prediction_conv_depth: The depth for the first conv2d_transpose op applied to the image_features in the mask prediction branch. If set to 0, the depth of the convolution layers will be automatically chosen based on the number of object classes and the number of channels in the image features. masks_are_class_agnostic: Boolean determining if the mask-head is class-agnostic or not. convolve_then_upsample_masks: Whether to apply convolutions on mask features before upsampling using nearest neighbor resizing. Otherwise, mask features are resized to [`mask_height`, `mask_width`] using bilinear resizing before applying convolutions. Returns: A MaskRCNNBoxPredictor class. """ box_prediction_head = box_head.MaskRCNNBoxHead( is_training=is_training, num_classes=num_classes, fc_hyperparams_fn=fc_hyperparams_fn, use_dropout=use_dropout, dropout_keep_prob=dropout_keep_prob, box_code_size=box_code_size, share_box_across_classes=share_box_across_classes) class_prediction_head = class_head.MaskRCNNClassHead( is_training=is_training, num_class_slots=num_classes + 1 if add_background_class else num_classes, fc_hyperparams_fn=fc_hyperparams_fn, use_dropout=use_dropout, dropout_keep_prob=dropout_keep_prob) third_stage_heads = {} if predict_instance_masks: third_stage_heads[ mask_rcnn_box_predictor. MASK_PREDICTIONS] = mask_head.MaskRCNNMaskHead( num_classes=num_classes, conv_hyperparams_fn=conv_hyperparams_fn, mask_height=mask_height, mask_width=mask_width, mask_prediction_num_conv_layers=mask_prediction_num_conv_layers, mask_prediction_conv_depth=mask_prediction_conv_depth, masks_are_class_agnostic=masks_are_class_agnostic, convolve_then_upsample=convolve_then_upsample_masks) return mask_rcnn_box_predictor.MaskRCNNBoxPredictor( is_training=is_training, num_classes=num_classes, box_prediction_head=box_prediction_head, class_prediction_head=class_prediction_head, third_stage_heads=third_stage_heads) def build_score_converter(score_converter_config, is_training): """Builds score converter based on the config. Builds one of [tf.identity, tf.sigmoid] score converters based on the config and whether the BoxPredictor is for training or inference. Args: score_converter_config: box_predictor_pb2.WeightSharedConvolutionalBoxPredictor.score_converter. is_training: Indicates whether the BoxPredictor is in training mode. Returns: Callable score converter op. Raises: ValueError: On unknown score converter. """ if score_converter_config == ( box_predictor_pb2.WeightSharedConvolutionalBoxPredictor.IDENTITY): return tf.identity if score_converter_config == ( box_predictor_pb2.WeightSharedConvolutionalBoxPredictor.SIGMOID): return tf.identity if is_training else tf.sigmoid raise ValueError('Unknown score converter.') BoxEncodingsClipRange = collections.namedtuple('BoxEncodingsClipRange', ['min', 'max']) def build(argscope_fn, box_predictor_config, is_training, num_classes, add_background_class=True): """Builds box predictor based on the configuration. Builds box predictor based on the configuration. See box_predictor.proto for configurable options. Also, see box_predictor.py for more details. Args: argscope_fn: A function that takes the following inputs: * hyperparams_pb2.Hyperparams proto * a boolean indicating if the model is in training mode. and returns a tf slim argscope for Conv and FC hyperparameters. box_predictor_config: box_predictor_pb2.BoxPredictor proto containing configuration. is_training: Whether the models is in training mode. num_classes: Number of classes to predict. add_background_class: Whether to add an implicit background class. Returns: box_predictor: box_predictor.BoxPredictor object. Raises: ValueError: On unknown box predictor. """ if not isinstance(box_predictor_config, box_predictor_pb2.BoxPredictor): raise ValueError('box_predictor_config not of type ' 'box_predictor_pb2.BoxPredictor.') box_predictor_oneof = box_predictor_config.WhichOneof('box_predictor_oneof') if box_predictor_oneof == 'convolutional_box_predictor': config_box_predictor = box_predictor_config.convolutional_box_predictor conv_hyperparams_fn = argscope_fn(config_box_predictor.conv_hyperparams, is_training) # Optionally apply clipping to box encodings, when box_encodings_clip_range # is set. box_encodings_clip_range = None if config_box_predictor.HasField('box_encodings_clip_range'): box_encodings_clip_range = BoxEncodingsClipRange( min=config_box_predictor.box_encodings_clip_range.min, max=config_box_predictor.box_encodings_clip_range.max) return build_convolutional_box_predictor( is_training=is_training, num_classes=num_classes, add_background_class=add_background_class, conv_hyperparams_fn=conv_hyperparams_fn, use_dropout=config_box_predictor.use_dropout, dropout_keep_prob=config_box_predictor.dropout_keep_probability, box_code_size=config_box_predictor.box_code_size, kernel_size=config_box_predictor.kernel_size, num_layers_before_predictor=( config_box_predictor.num_layers_before_predictor), min_depth=config_box_predictor.min_depth, max_depth=config_box_predictor.max_depth, apply_sigmoid_to_scores=config_box_predictor.apply_sigmoid_to_scores, class_prediction_bias_init=( config_box_predictor.class_prediction_bias_init), use_depthwise=config_box_predictor.use_depthwise, box_encodings_clip_range=box_encodings_clip_range) if box_predictor_oneof == 'weight_shared_convolutional_box_predictor': config_box_predictor = ( box_predictor_config.weight_shared_convolutional_box_predictor) conv_hyperparams_fn = argscope_fn(config_box_predictor.conv_hyperparams, is_training) apply_batch_norm = config_box_predictor.conv_hyperparams.HasField( 'batch_norm') # During training phase, logits are used to compute the loss. Only apply # sigmoid at inference to make the inference graph TPU friendly. score_converter_fn = build_score_converter( config_box_predictor.score_converter, is_training) # Optionally apply clipping to box encodings, when box_encodings_clip_range # is set. box_encodings_clip_range = None if config_box_predictor.HasField('box_encodings_clip_range'): box_encodings_clip_range = BoxEncodingsClipRange( min=config_box_predictor.box_encodings_clip_range.min, max=config_box_predictor.box_encodings_clip_range.max) return build_weight_shared_convolutional_box_predictor( is_training=is_training, num_classes=num_classes, add_background_class=add_background_class, conv_hyperparams_fn=conv_hyperparams_fn, depth=config_box_predictor.depth, num_layers_before_predictor=( config_box_predictor.num_layers_before_predictor), box_code_size=config_box_predictor.box_code_size, kernel_size=config_box_predictor.kernel_size, class_prediction_bias_init=( config_box_predictor.class_prediction_bias_init), use_dropout=config_box_predictor.use_dropout, dropout_keep_prob=config_box_predictor.dropout_keep_probability, share_prediction_tower=config_box_predictor.share_prediction_tower, apply_batch_norm=apply_batch_norm, use_depthwise=config_box_predictor.use_depthwise, score_converter_fn=score_converter_fn, box_encodings_clip_range=box_encodings_clip_range) if box_predictor_oneof == 'mask_rcnn_box_predictor': config_box_predictor = box_predictor_config.mask_rcnn_box_predictor fc_hyperparams_fn = argscope_fn(config_box_predictor.fc_hyperparams, is_training) conv_hyperparams_fn = None if config_box_predictor.HasField('conv_hyperparams'): conv_hyperparams_fn = argscope_fn( config_box_predictor.conv_hyperparams, is_training) return build_mask_rcnn_box_predictor( is_training=is_training, num_classes=num_classes, add_background_class=add_background_class, fc_hyperparams_fn=fc_hyperparams_fn, use_dropout=config_box_predictor.use_dropout, dropout_keep_prob=config_box_predictor.dropout_keep_probability, box_code_size=config_box_predictor.box_code_size, share_box_across_classes=( config_box_predictor.share_box_across_classes), predict_instance_masks=config_box_predictor.predict_instance_masks, conv_hyperparams_fn=conv_hyperparams_fn, mask_height=config_box_predictor.mask_height, mask_width=config_box_predictor.mask_width, mask_prediction_num_conv_layers=( config_box_predictor.mask_prediction_num_conv_layers), mask_prediction_conv_depth=( config_box_predictor.mask_prediction_conv_depth), masks_are_class_agnostic=( config_box_predictor.masks_are_class_agnostic), convolve_then_upsample_masks=( config_box_predictor.convolve_then_upsample_masks)) if box_predictor_oneof == 'rfcn_box_predictor': config_box_predictor = box_predictor_config.rfcn_box_predictor conv_hyperparams_fn = argscope_fn(config_box_predictor.conv_hyperparams, is_training) box_predictor_object = rfcn_box_predictor.RfcnBoxPredictor( is_training=is_training, num_classes=num_classes, conv_hyperparams_fn=conv_hyperparams_fn, crop_size=[config_box_predictor.crop_height, config_box_predictor.crop_width], num_spatial_bins=[config_box_predictor.num_spatial_bins_height, config_box_predictor.num_spatial_bins_width], depth=config_box_predictor.depth, box_code_size=config_box_predictor.box_code_size) return box_predictor_object raise ValueError('Unknown box predictor: {}'.format(box_predictor_oneof)) def build_keras(hyperparams_fn, freeze_batchnorm, inplace_batchnorm_update, num_predictions_per_location_list, box_predictor_config, is_training, num_classes, add_background_class=True): """Builds a Keras-based box predictor based on the configuration. Builds Keras-based box predictor based on the configuration. See box_predictor.proto for configurable options. Also, see box_predictor.py for more details. Args: hyperparams_fn: A function that takes a hyperparams_pb2.Hyperparams proto and returns a `hyperparams_builder.KerasLayerHyperparams` for Conv or FC hyperparameters. freeze_batchnorm: Whether to freeze batch norm parameters during training or not. When training with a small batch size (e.g. 1), it is desirable to freeze batch norm update and use pretrained batch norm params. inplace_batchnorm_update: Whether to update batch norm moving average values inplace. When this is false train op must add a control dependency on tf.graphkeys.UPDATE_OPS collection in order to update batch norm statistics. num_predictions_per_location_list: A list of integers representing the number of box predictions to be made per spatial location for each feature map. box_predictor_config: box_predictor_pb2.BoxPredictor proto containing configuration. is_training: Whether the models is in training mode. num_classes: Number of classes to predict. add_background_class: Whether to add an implicit background class. Returns: box_predictor: box_predictor.KerasBoxPredictor object. Raises: ValueError: On unknown box predictor, or one with no Keras box predictor. """ if not isinstance(box_predictor_config, box_predictor_pb2.BoxPredictor): raise ValueError('box_predictor_config not of type ' 'box_predictor_pb2.BoxPredictor.') box_predictor_oneof = box_predictor_config.WhichOneof('box_predictor_oneof') if box_predictor_oneof == 'convolutional_box_predictor': config_box_predictor = box_predictor_config.convolutional_box_predictor conv_hyperparams = hyperparams_fn( config_box_predictor.conv_hyperparams) # Optionally apply clipping to box encodings, when box_encodings_clip_range # is set. box_encodings_clip_range = None if config_box_predictor.HasField('box_encodings_clip_range'): box_encodings_clip_range = BoxEncodingsClipRange( min=config_box_predictor.box_encodings_clip_range.min, max=config_box_predictor.box_encodings_clip_range.max) return build_convolutional_keras_box_predictor( is_training=is_training, num_classes=num_classes, add_background_class=add_background_class, conv_hyperparams=conv_hyperparams, freeze_batchnorm=freeze_batchnorm, inplace_batchnorm_update=inplace_batchnorm_update, num_predictions_per_location_list=num_predictions_per_location_list, use_dropout=config_box_predictor.use_dropout, dropout_keep_prob=config_box_predictor.dropout_keep_probability, box_code_size=config_box_predictor.box_code_size, kernel_size=config_box_predictor.kernel_size, num_layers_before_predictor=( config_box_predictor.num_layers_before_predictor), min_depth=config_box_predictor.min_depth, max_depth=config_box_predictor.max_depth, class_prediction_bias_init=( config_box_predictor.class_prediction_bias_init), use_depthwise=config_box_predictor.use_depthwise, box_encodings_clip_range=box_encodings_clip_range) if box_predictor_oneof == 'weight_shared_convolutional_box_predictor': config_box_predictor = ( box_predictor_config.weight_shared_convolutional_box_predictor) conv_hyperparams = hyperparams_fn(config_box_predictor.conv_hyperparams) apply_batch_norm = config_box_predictor.conv_hyperparams.HasField( 'batch_norm') # During training phase, logits are used to compute the loss. Only apply # sigmoid at inference to make the inference graph TPU friendly. This is # required because during TPU inference, model.postprocess is not called. score_converter_fn = build_score_converter( config_box_predictor.score_converter, is_training) # Optionally apply clipping to box encodings, when box_encodings_clip_range # is set. box_encodings_clip_range = None if config_box_predictor.HasField('box_encodings_clip_range'): box_encodings_clip_range = BoxEncodingsClipRange( min=config_box_predictor.box_encodings_clip_range.min, max=config_box_predictor.box_encodings_clip_range.max) return build_weight_shared_convolutional_keras_box_predictor( is_training=is_training, num_classes=num_classes, conv_hyperparams=conv_hyperparams, freeze_batchnorm=freeze_batchnorm, inplace_batchnorm_update=inplace_batchnorm_update, num_predictions_per_location_list=num_predictions_per_location_list, depth=config_box_predictor.depth, num_layers_before_predictor=( config_box_predictor.num_layers_before_predictor), box_code_size=config_box_predictor.box_code_size, kernel_size=config_box_predictor.kernel_size, add_background_class=add_background_class, class_prediction_bias_init=( config_box_predictor.class_prediction_bias_init), use_dropout=config_box_predictor.use_dropout, dropout_keep_prob=config_box_predictor.dropout_keep_probability, share_prediction_tower=config_box_predictor.share_prediction_tower, apply_batch_norm=apply_batch_norm, use_depthwise=config_box_predictor.use_depthwise, score_converter_fn=score_converter_fn, box_encodings_clip_range=box_encodings_clip_range) if box_predictor_oneof == 'mask_rcnn_box_predictor': config_box_predictor = box_predictor_config.mask_rcnn_box_predictor fc_hyperparams = hyperparams_fn(config_box_predictor.fc_hyperparams) conv_hyperparams = None if config_box_predictor.HasField('conv_hyperparams'): conv_hyperparams = hyperparams_fn( config_box_predictor.conv_hyperparams) return build_mask_rcnn_keras_box_predictor( is_training=is_training, num_classes=num_classes, add_background_class=add_background_class, fc_hyperparams=fc_hyperparams, freeze_batchnorm=freeze_batchnorm, use_dropout=config_box_predictor.use_dropout, dropout_keep_prob=config_box_predictor.dropout_keep_probability, box_code_size=config_box_predictor.box_code_size, share_box_across_classes=( config_box_predictor.share_box_across_classes), predict_instance_masks=config_box_predictor.predict_instance_masks, conv_hyperparams=conv_hyperparams, mask_height=config_box_predictor.mask_height, mask_width=config_box_predictor.mask_width, mask_prediction_num_conv_layers=( config_box_predictor.mask_prediction_num_conv_layers), mask_prediction_conv_depth=( config_box_predictor.mask_prediction_conv_depth), masks_are_class_agnostic=( config_box_predictor.masks_are_class_agnostic), convolve_then_upsample_masks=( config_box_predictor.convolve_then_upsample_masks)) if box_predictor_oneof == 'rfcn_box_predictor': config_box_predictor = box_predictor_config.rfcn_box_predictor conv_hyperparams = hyperparams_fn(config_box_predictor.conv_hyperparams) box_predictor_object = rfcn_keras_box_predictor.RfcnKerasBoxPredictor( is_training=is_training, num_classes=num_classes, conv_hyperparams=conv_hyperparams, freeze_batchnorm=freeze_batchnorm, crop_size=[config_box_predictor.crop_height, config_box_predictor.crop_width], num_spatial_bins=[config_box_predictor.num_spatial_bins_height, config_box_predictor.num_spatial_bins_width], depth=config_box_predictor.depth, box_code_size=config_box_predictor.box_code_size) return box_predictor_object raise ValueError( 'Unknown box predictor for Keras: {}'.format(box_predictor_oneof))
aa3b3557c2779676f7b441397483f7569c7d4901
9ca6885d197aaf6869e2080901b361b034e4cc37
/ElectroWeakAnalysis/ZReco/python/zToTauTau_ETau_EventContent_AODSIM_cff.py
ff21d1bbfb2a6f565db5f6386bf52e4f0df33be0
[]
no_license
ktf/cmssw-migration
153ff14346b20086f908a370029aa96575a2c51a
583340dd03481dff673a52a2075c8bb46fa22ac6
refs/heads/master
2020-07-25T15:37:45.528173
2013-07-11T04:54:56
2013-07-11T04:54:56
null
0
0
null
null
null
null
UTF-8
Python
false
false
449
py
import FWCore.ParameterSet.Config as cms from Configuration.EventContent.EventContent_cff import * from ElectroWeakAnalysis.ZReco.zToTauTau_ETau_EventContent_cff import * AODSIMZToTauTauETauEventContent = cms.PSet( outputCommands = cms.untracked.vstring() ) AODSIMZToTauTauETauEventContent.outputCommands.extend(AODSIMEventContent.outputCommands) AODSIMZToTauTauETauEventContent.outputCommands.extend(zToTauTauETauEventContent.outputCommands)
9b9b87f2ae5c5a6e2cebe144fce70e4c548ef29b
66ab8fac9fb19e5ff470be0fa7b2b73600231f16
/pyble/const/service/link_loss.py
069e803e4cc77d7d9397e6abc7ba441d3108fc97
[ "MIT" ]
permissive
bgromov/PyBLEWrapper
e97bbc2299f880838d246a8c6fdb27b05cb72af1
8a5d016e65b3c259391ddc97c371ab4b1b5c61b5
refs/heads/master
2020-03-25T21:41:43.702666
2018-08-12T23:38:16
2018-08-12T23:38:16
144,185,816
0
0
MIT
2018-08-09T17:50:12
2018-08-09T17:50:12
null
UTF-8
Python
false
false
29
py
NAME="Link Loss" UUID=0x1803
3a2079455f632c92b4b8d209389e7bd7bd73ee00
b4a4fa30ae3f857e209356d1a28273b3cbbdc3e3
/api_basic/views.py
3b09cd7401a291568f6dc1dda8cbc8a97862ad2f
[]
no_license
sahin88/Django_Rest_Framework_Account_Article_RestAPI
c0fd4b97eb5629c3b65781663dd82edc6f0de384
f5d7d12c04852392583f881f1e334d71c4582c4d
refs/heads/master
2023-03-25T19:02:17.427294
2020-04-21T09:26:51
2020-04-21T09:26:51
257,544,520
0
0
null
null
null
null
UTF-8
Python
false
false
4,326
py
from django.shortcuts import render from django.views.decorators.csrf import csrf_exempt from django.http import HttpResponse ,JsonResponse from rest_framework.parsers import JSONParser from .models import Article from .serializers import ArticleSerializer from rest_framework.decorators import api_view from rest_framework.response import Response from rest_framework import status from rest_framework.views import APIView from rest_framework import mixins from rest_framework import generics from rest_framework.authentication import SessionAuthentication, BasicAuthentication,TokenAuthentication from rest_framework.permissions import IsAuthenticated class GenericAPIViews(generics.GenericAPIView, mixins.ListModelMixin, mixins.CreateModelMixin, mixins.UpdateModelMixin,mixins.RetrieveModelMixin, mixins.DestroyModelMixin): serializer_class=ArticleSerializer queryset=Article.objects.all() lookup_field='id' authentication_classes=[SessionAuthentication, BasicAuthentication] authentication_classes=[TokenAuthentication] permission_classes=[IsAuthenticated] def get(self, request, id=None): if id: return self.retrieve(request,id) else: return self.list(request) def post(self, request): return self.create(request) def put(self, request, id=None): return self.update(request, id) def delete(self, request, id=None): return self.destroy(request, id) class ArticleView(APIView): def get(self, request): articles=Article.objects.all() serializer=ArticleSerializer(articles, many=True) return Response(serializer.data) def post(self,request): serializer=ArticleSerializer(data=request.data) #request.POST # Only handles form data. Only works for 'POST' method. #request.data # Handles arbitrary data. Works for 'POST', 'PUT' and 'PATCH' methods. if serializer.is_valid(): serializer.save() return Response(serializer.data) return Response(serializer.error, status.HTTP_400_BAD_REQUEST) class ArticleDetailView(APIView): def get(self, request, pk): try: article= Article.objects.get(pk=pk) except: return HttpResponse(status.HTTP_404_NOT_FOUND) serializer= ArticleSerializer(article) return Response(serializer.data) def put(self, request, pk): article=Article.objects.get(pk=pk) serializer=ArticleSerializer(article, data=request.data) if serializer.is_valid(): serializer.save() return Response(serializer.data) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def delete(self,request, pk): article=Article.objects.get(pk=pk) article.delete() return Response(status=status.HTTP_204_NO_CONTENT) # Create your views here. # @api_view(['GET', 'POST']) # def article_list(request): # if request.method=='GET': # articles=Article.objects.all() # serializer=ArticleSerializer(articles, many=True) # return Response(serializer.data) # elif request.method=='POST': # serializer=ArticleSerializer(data=request.data) # if serializer.is_valid(): # serializer.save() # return Response(serializer.data, status=status.HTTP_201_CREATED) # return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) # @api_view(['GET','PUT', 'DELETE']) # @csrf_exempt # def article_detail(request,pk): # try: # article=Article.objects.get(pk=pk) # except: # return HttpResponse(status.HTTP_404_NOT_FOUND) # if request.method=='GET': # serializer=ArticleSerializer(article) # return Response(serializer.data) # elif request.method=='PUT': # #data=JSONParser().parse(request) # serializer=ArticleSerializer(article, data=request.data) # if serializer.is_valid(): # serializer.save() # return Response(serializer.data) # return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) # elif request.method=='DELETE': # article.delete() # return Response(serializer.errors, status=status.HTTP_204_NO_CONTENT)
141fb7da59e137862d2a34b048d6dee9113a4465
6e601105760f09d3c9f5306e18e4cf085f0bb4a2
/10000-99999/10868.py
f86c7f9e367022589178de78365c22587a18d80c
[]
no_license
WSJI0/BOJ
6412f69fddd46c4bcc96377e2b6e013f3bb1b524
160d8c13f72d7da835d938686f433e7b245be682
refs/heads/master
2023-07-06T15:35:50.815021
2023-07-04T01:39:48
2023-07-04T01:39:48
199,650,520
2
0
null
2020-04-20T09:03:03
2019-07-30T12:48:37
Python
UTF-8
Python
false
false
768
py
''' 10868번 최솟값 ''' import sys input=sys.stdin.readline def init(start, end, node): if start==end: tree[node]=a[start] return tree[node] mid=(start+end)//2 tree[node]=min(init(start, mid, node*2),init(mid+1, end, node*2+1)) return tree[node] def findMin(start, end, node, left, right): if left>end or right<start: return 9999999999 if left<=start and end<=right: return tree[node] mid=(start+end)//2 return min(findMin(start, mid, node*2, left, right), findMin(mid+1, end, node*2+1, left, right)) n, m=map(int, input().split()) a=[] tree=[9999999999]*(4*n) for _ in range(n): a.append(int(input())) init(0, n-1, 1) for _ in range(m): s, e=map(int, input().split()) print(findMin(0, n-1, 1, s-1, e-1))
2d89215d3f25c0013d427f43c8a31bbfa0744d4c
4d332c45578246847ef2cdcdeb827ca29ab06090
/modules/Bio/PopGen/GenePop/Controller.py
e9866838933b8ef22e36ba20611c8b64317f340e
[ "MIT" ]
permissive
prateekgupta3991/justforlearn
616cc297a2a6119fa959b9337a5e91c77a11ebf7
3984c64063b356cf89003e17a914272983b6cf48
refs/heads/master
2021-03-12T22:09:12.184638
2014-01-28T10:37:07
2014-01-28T10:37:07
null
0
0
null
null
null
null
UTF-8
Python
false
false
52
py
/usr/share/pyshared/Bio/PopGen/GenePop/Controller.py
451f6e60c661a8a37b1823007d8f99a173a8ec2a
60139399c13b87c150573808d23c8f00542672a2
/admux/mixins/websites.py
4d3b238caf36bfa27f567f0d7fc5337f55b466db
[]
no_license
adverserve/admux_client
d4781501b5e8486a072eb3dc6c19580292c517f8
1c633072e75d2f3be660565825a5f77c9699e68c
refs/heads/master
2021-01-10T20:29:45.695490
2013-10-24T14:52:21
2013-10-24T14:52:21
null
0
0
null
null
null
null
UTF-8
Python
false
false
955
py
# coding: utf-8 from admux import helpers class WebsitesClientMixin(object): def websites(self, links=None, expand=None): """ http://admux-demo.trust-box.at/developer/api/v1/get/websites/ links: Boolean expand: array of strings """ url = '/websites' params = { 'links': helpers._bool(links), 'expand': helpers._list(expand), } return self._request('GET', url, params=params) def website(self, uuid, links=None, expand=None): """ http://admux-demo.trust-box.at/developer/api/v1/get/websites/uuid/ uuid: website identifier links: Boolean expand: array of strings """ url = '/websites/%(uuid)s' % { 'uuid': uuid, } params = { 'links': helpers._bool(links), 'expand': helpers._list(expand), } return self._request('GET', url, params=params)
f6f2e5c22f89a074e87d518b68687ad9227a830c
de24f83a5e3768a2638ebcf13cbe717e75740168
/moodledata/vpl_data/380/usersdata/344/68278/submittedfiles/testes.py
e1878449ad74b2fb2bf5ad590a3c0f974f6d628b
[]
no_license
rafaelperazzo/programacao-web
95643423a35c44613b0f64bed05bd34780fe2436
170dd5440afb9ee68a973f3de13a99aa4c735d79
refs/heads/master
2021-01-12T14:06:25.773146
2017-12-22T16:05:45
2017-12-22T16:05:45
69,566,344
0
0
null
null
null
null
UTF-8
Python
false
false
82
py
# -*- coding: utf-8 -*- #COMECE AQUI ABAIXO idade = int(input('digite sua idade'))
d3a372d9cda192f974f6edac19afaf207d458e26
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p02785/s474912270.py
3746381a7a22f2e9f041861db8ba3b5bb71bc13a
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
378
py
import sys input = sys.stdin.readline def I(): return int(input()) def MI(): return map(int, input().split()) def LI(): return list(map(int, input().split())) def main(): mod=10**9+7 N,K=MI() h=LI() h.sort() if K>=N: print(0) exit() for i in range(K): h[-1-i]=0 ans=sum(h) print(ans) main()
34e433c211df58f96eb9e335278588ea0f8e6417
5ddb3d3095ae782671d4592549e1fec38b73eb84
/spacy/tests/pipeline/test_functions.py
fbb88ade26a6127552ab9bcb9f6ab40c4e39d3ef
[ "MIT" ]
permissive
Ali-Tahir/spaCy
c1ea9c5389044bc1e1f0e30f176b9b6e60a10690
9e210fa7fdb8e376655e7a7ab7debd3ffd718a63
refs/heads/master
2020-08-30T02:27:16.538275
2019-10-28T22:59:03
2019-10-28T22:59:03
218,234,283
0
0
MIT
2020-02-09T06:47:46
2019-10-29T08:05:03
null
UTF-8
Python
false
false
882
py
# coding: utf-8 from __future__ import unicode_literals import pytest from spacy.pipeline.functions import merge_subtokens from ..util import get_doc @pytest.fixture def doc(en_tokenizer): # fmt: off text = "This is a sentence. This is another sentence. And a third." heads = [1, 0, 1, -2, -3, 1, 0, 1, -2, -3, 1, 1, 1, 0] deps = ["nsubj", "ROOT", "subtok", "attr", "punct", "nsubj", "ROOT", "subtok", "attr", "punct", "subtok", "subtok", "subtok", "ROOT"] # fmt: on tokens = en_tokenizer(text) return get_doc(tokens.vocab, words=[t.text for t in tokens], heads=heads, deps=deps) def test_merge_subtokens(doc): doc = merge_subtokens(doc) # get_doc() doesn't set spaces, so the result is "And a third ." assert [t.text for t in doc] == ["This", "is", "a sentence", ".", "This", "is", "another sentence", ".", "And a third ."]
eff19506d086bf10caf3a7ff7cc64b66606e87dd
888899f0cb3e6e7b28a9de39001a1fd1c177cd35
/COMPLETE PYTHON-3 COURSE/Chapter-19-READ_WRITE_CSV_FILES/read_write_csv_simultanously.py
4d67f080dfecfeb586057bc4ec3209854e177e35
[]
no_license
VivakaNand/COMPLETE_PYTHON_3
ef162d71d3a44bf661fcc1a8aacce31e7953cd7c
b3b835afe7671fdc3d29d912650fd4ccd3bc83f6
refs/heads/master
2023-02-04T10:13:41.881939
2020-12-23T08:30:51
2020-12-23T08:30:51
323,839,528
0
1
null
null
null
null
UTF-8
Python
false
false
583
py
# reader, DictReader # writer, DictWriter from csv import DictWriter, DictReader with open('final.csv', 'r') as rf: with open('file2.csv', 'w',newline='') as wf: csv_reader = DictReader(rf) csv_writer = DictWriter(wf, fieldnames=['first_name','last_name','age']) csv_writer.writeheader() for row in csv_reader: fname,lname,age = row['firstname'],row['lastname'],row['age'] csv_writer.writerow({ 'first_name': fname.upper(), 'last_name':lname.upper(), 'age':age })
15cada83ac6b82129e73f14015c85f9399b59db6
974c5a4f101d0e6f4dfa5fc2f7c641c9d2bd8184
/sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2019_12_01/operations/_virtual_machines_operations.py
86e37d500f8e3ac1ecdf9bcedfff149562445ccb
[ "LicenseRef-scancode-generic-cla", "MIT", "LGPL-2.1-or-later" ]
permissive
gaoyp830/azure-sdk-for-python
4816f04c554dcffb7510a6b7044b0c86a2dd32e1
1c66defa502b754abcc9e5afa444ca03c609342f
refs/heads/master
2022-10-20T21:33:44.281041
2022-09-29T17:03:13
2022-09-29T17:03:13
250,355,505
0
0
MIT
2020-03-26T19:42:13
2020-03-26T19:42:12
null
UTF-8
Python
false
false
155,767
py
# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload from urllib.parse import parse_qs, urljoin, urlparse from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, map_error, ) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models from ..._serialization import Serializer from .._vendor import _convert_request, _format_url_section T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False def build_list_by_location_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/virtualMachines", ) # pylint: disable=line-too-long path_format_arguments = { "location": _SERIALIZER.url("location", location, "str", pattern=r"^[-\w\._]+$"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_capture_request(resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/capture", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_update_request(resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request(resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") return HttpRequest(method="DELETE", url=_url, params=_params, **kwargs) def build_get_request( resource_group_name: str, vm_name: str, subscription_id: str, *, expand: str = "instanceView", **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters if expand is not None: _params["$expand"] = _SERIALIZER.query("expand", expand, "str") _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_instance_view_request( resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/instanceView", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_convert_to_managed_disks_request( resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/convertToManagedDisks", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") return HttpRequest(method="POST", url=_url, params=_params, **kwargs) def build_deallocate_request( resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/deallocate", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") return HttpRequest(method="POST", url=_url, params=_params, **kwargs) def build_generalize_request( resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/generalize", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") return HttpRequest(method="POST", url=_url, params=_params, **kwargs) def build_list_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_list_all_request(subscription_id: str, *, status_only: Optional[str] = None, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.Compute/virtualMachines") path_format_arguments = { "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if status_only is not None: _params["statusOnly"] = _SERIALIZER.query("status_only", status_only, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_list_available_sizes_request( resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/vmSizes", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_power_off_request( resource_group_name: str, vm_name: str, subscription_id: str, *, skip_shutdown: bool = False, **kwargs: Any ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/powerOff", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters if skip_shutdown is not None: _params["skipShutdown"] = _SERIALIZER.query("skip_shutdown", skip_shutdown, "bool") _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") return HttpRequest(method="POST", url=_url, params=_params, **kwargs) def build_reapply_request(resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str accept = _headers.pop("Accept", "application/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/reapply", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_restart_request(resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/restart", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") return HttpRequest(method="POST", url=_url, params=_params, **kwargs) def build_start_request(resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/start", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") return HttpRequest(method="POST", url=_url, params=_params, **kwargs) def build_redeploy_request(resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/redeploy", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") return HttpRequest(method="POST", url=_url, params=_params, **kwargs) def build_reimage_request(resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/reimage", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_perform_maintenance_request( resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/performMaintenance", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") return HttpRequest(method="POST", url=_url, params=_params, **kwargs) def build_simulate_eviction_request( resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/simulateEviction", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") return HttpRequest(method="POST", url=_url, params=_params, **kwargs) def build_run_command_request( resource_group_name: str, vm_name: str, subscription_id: str, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] accept = _headers.pop("Accept", "application/json, text/json") # Construct URL _url = kwargs.pop( "template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/runCommand", ) # pylint: disable=line-too-long path_format_arguments = { "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str"), "vmName": _SERIALIZER.url("vm_name", vm_name, "str"), "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"), } _url = _format_url_section(_url, **path_format_arguments) # Construct parameters _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers if content_type is not None: _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) class VirtualMachinesOperations: # pylint: disable=too-many-public-methods """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.mgmt.compute.v2019_12_01.ComputeManagementClient`'s :attr:`virtual_machines` attribute. """ models = _models def __init__(self, *args, **kwargs): input_args = list(args) self._client = input_args.pop(0) if input_args else kwargs.pop("client") self._config = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list_by_location(self, location: str, **kwargs: Any) -> Iterable["_models.VirtualMachine"]: """Gets all the virtual machines under the specified subscription for the specified location. :param location: The location for which virtual machines under the subscription are queried. Required. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either VirtualMachine or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2019_12_01.models.VirtualMachine] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachineListResult] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): if not next_link: request = build_list_by_location_request( location=location, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self.list_by_location.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore else: # make call to next link with the client's api-version _parsed_next_link = urlparse(next_link) _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) _next_request_params["api-version"] = self._config.api_version request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" return request def extract_data(pipeline_response): deserialized = self._deserialize("VirtualMachineListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged(get_next, extract_data) list_by_location.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/virtualMachines"} # type: ignore def _capture_initial( self, resource_group_name: str, vm_name: str, parameters: Union[_models.VirtualMachineCaptureParameters, IO], **kwargs: Any ) -> Optional[_models.VirtualMachineCaptureResult]: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.VirtualMachineCaptureResult]] content_type = content_type or "application/json" _json = None _content = None if isinstance(parameters, (IO, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "VirtualMachineCaptureParameters") request = build_capture_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, content=_content, template_url=self._capture_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize("VirtualMachineCaptureResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _capture_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/capture"} # type: ignore @overload def begin_capture( self, resource_group_name: str, vm_name: str, parameters: _models.VirtualMachineCaptureParameters, *, content_type: str = "application/json", **kwargs: Any ) -> LROPoller[_models.VirtualMachineCaptureResult]: """Captures the VM by copying virtual hard disks of the VM and outputs a template that can be used to create similar VMs. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param parameters: Parameters supplied to the Capture Virtual Machine operation. Required. :type parameters: ~azure.mgmt.compute.v2019_12_01.models.VirtualMachineCaptureParameters :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either VirtualMachineCaptureResult or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2019_12_01.models.VirtualMachineCaptureResult] :raises ~azure.core.exceptions.HttpResponseError: """ @overload def begin_capture( self, resource_group_name: str, vm_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any ) -> LROPoller[_models.VirtualMachineCaptureResult]: """Captures the VM by copying virtual hard disks of the VM and outputs a template that can be used to create similar VMs. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param parameters: Parameters supplied to the Capture Virtual Machine operation. Required. :type parameters: IO :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either VirtualMachineCaptureResult or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2019_12_01.models.VirtualMachineCaptureResult] :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace def begin_capture( self, resource_group_name: str, vm_name: str, parameters: Union[_models.VirtualMachineCaptureParameters, IO], **kwargs: Any ) -> LROPoller[_models.VirtualMachineCaptureResult]: """Captures the VM by copying virtual hard disks of the VM and outputs a template that can be used to create similar VMs. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param parameters: Parameters supplied to the Capture Virtual Machine operation. Is either a model type or a IO type. Required. :type parameters: ~azure.mgmt.compute.v2019_12_01.models.VirtualMachineCaptureParameters or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. Default value is None. :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either VirtualMachineCaptureResult or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2019_12_01.models.VirtualMachineCaptureResult] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachineCaptureResult] polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._capture_initial( # type: ignore resource_group_name=resource_group_name, vm_name=vm_name, parameters=parameters, api_version=api_version, content_type=content_type, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): deserialized = self._deserialize("VirtualMachineCaptureResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = cast( PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) ) # type: PollingMethod elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_capture.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/capture"} # type: ignore def _create_or_update_initial( self, resource_group_name: str, vm_name: str, parameters: Union[_models.VirtualMachine, IO], **kwargs: Any ) -> _models.VirtualMachine: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachine] content_type = content_type or "application/json" _json = None _content = None if isinstance(parameters, (IO, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "VirtualMachine") request = build_create_or_update_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, content=_content, template_url=self._create_or_update_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize("VirtualMachine", pipeline_response) if response.status_code == 201: deserialized = self._deserialize("VirtualMachine", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}"} # type: ignore @overload def begin_create_or_update( self, resource_group_name: str, vm_name: str, parameters: _models.VirtualMachine, *, content_type: str = "application/json", **kwargs: Any ) -> LROPoller[_models.VirtualMachine]: """The operation to create or update a virtual machine. Please note some properties can be set only during virtual machine creation. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param parameters: Parameters supplied to the Create Virtual Machine operation. Required. :type parameters: ~azure.mgmt.compute.v2019_12_01.models.VirtualMachine :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either VirtualMachine or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2019_12_01.models.VirtualMachine] :raises ~azure.core.exceptions.HttpResponseError: """ @overload def begin_create_or_update( self, resource_group_name: str, vm_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any ) -> LROPoller[_models.VirtualMachine]: """The operation to create or update a virtual machine. Please note some properties can be set only during virtual machine creation. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param parameters: Parameters supplied to the Create Virtual Machine operation. Required. :type parameters: IO :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either VirtualMachine or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2019_12_01.models.VirtualMachine] :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace def begin_create_or_update( self, resource_group_name: str, vm_name: str, parameters: Union[_models.VirtualMachine, IO], **kwargs: Any ) -> LROPoller[_models.VirtualMachine]: """The operation to create or update a virtual machine. Please note some properties can be set only during virtual machine creation. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param parameters: Parameters supplied to the Create Virtual Machine operation. Is either a model type or a IO type. Required. :type parameters: ~azure.mgmt.compute.v2019_12_01.models.VirtualMachine or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. Default value is None. :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either VirtualMachine or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2019_12_01.models.VirtualMachine] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachine] polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._create_or_update_initial( # type: ignore resource_group_name=resource_group_name, vm_name=vm_name, parameters=parameters, api_version=api_version, content_type=content_type, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): deserialized = self._deserialize("VirtualMachine", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}"} # type: ignore def _update_initial( self, resource_group_name: str, vm_name: str, parameters: Union[_models.VirtualMachineUpdate, IO], **kwargs: Any ) -> _models.VirtualMachine: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachine] content_type = content_type or "application/json" _json = None _content = None if isinstance(parameters, (IO, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "VirtualMachineUpdate") request = build_update_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, content=_content, template_url=self._update_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize("VirtualMachine", pipeline_response) if response.status_code == 201: deserialized = self._deserialize("VirtualMachine", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}"} # type: ignore @overload def begin_update( self, resource_group_name: str, vm_name: str, parameters: _models.VirtualMachineUpdate, *, content_type: str = "application/json", **kwargs: Any ) -> LROPoller[_models.VirtualMachine]: """The operation to update a virtual machine. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param parameters: Parameters supplied to the Update Virtual Machine operation. Required. :type parameters: ~azure.mgmt.compute.v2019_12_01.models.VirtualMachineUpdate :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either VirtualMachine or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2019_12_01.models.VirtualMachine] :raises ~azure.core.exceptions.HttpResponseError: """ @overload def begin_update( self, resource_group_name: str, vm_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any ) -> LROPoller[_models.VirtualMachine]: """The operation to update a virtual machine. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param parameters: Parameters supplied to the Update Virtual Machine operation. Required. :type parameters: IO :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either VirtualMachine or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2019_12_01.models.VirtualMachine] :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace def begin_update( self, resource_group_name: str, vm_name: str, parameters: Union[_models.VirtualMachineUpdate, IO], **kwargs: Any ) -> LROPoller[_models.VirtualMachine]: """The operation to update a virtual machine. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param parameters: Parameters supplied to the Update Virtual Machine operation. Is either a model type or a IO type. Required. :type parameters: ~azure.mgmt.compute.v2019_12_01.models.VirtualMachineUpdate or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. Default value is None. :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either VirtualMachine or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2019_12_01.models.VirtualMachine] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachine] polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._update_initial( # type: ignore resource_group_name=resource_group_name, vm_name=vm_name, parameters=parameters, api_version=api_version, content_type=content_type, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): deserialized = self._deserialize("VirtualMachine", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}"} # type: ignore def _delete_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, vm_name: str, **kwargs: Any ) -> None: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] request = build_delete_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self._delete_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}"} # type: ignore @distributed_trace def begin_delete(self, resource_group_name: str, vm_name: str, **kwargs: Any) -> LROPoller[None]: """The operation to delete a virtual machine. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, vm_name=vm_name, api_version=api_version, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}"} # type: ignore @distributed_trace def get( self, resource_group_name: str, vm_name: str, expand: str = "instanceView", **kwargs: Any ) -> _models.VirtualMachine: """Retrieves information about the model view or the instance view of a virtual machine. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param expand: The expand expression to apply on the operation. Known values are "instanceView" and None. Default value is "instanceView". :type expand: str :keyword callable cls: A custom type or function that will be passed the direct response :return: VirtualMachine or the result of cls(response) :rtype: ~azure.mgmt.compute.v2019_12_01.models.VirtualMachine :raises ~azure.core.exceptions.HttpResponseError: """ error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachine] request = build_get_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, expand=expand, api_version=api_version, template_url=self.get.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize("VirtualMachine", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}"} # type: ignore @distributed_trace def instance_view( self, resource_group_name: str, vm_name: str, **kwargs: Any ) -> _models.VirtualMachineInstanceView: """Retrieves information about the run-time state of a virtual machine. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: VirtualMachineInstanceView or the result of cls(response) :rtype: ~azure.mgmt.compute.v2019_12_01.models.VirtualMachineInstanceView :raises ~azure.core.exceptions.HttpResponseError: """ error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachineInstanceView] request = build_instance_view_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self.instance_view.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize("VirtualMachineInstanceView", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized instance_view.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/instanceView"} # type: ignore def _convert_to_managed_disks_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, vm_name: str, **kwargs: Any ) -> None: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] request = build_convert_to_managed_disks_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self._convert_to_managed_disks_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _convert_to_managed_disks_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/convertToManagedDisks"} # type: ignore @distributed_trace def begin_convert_to_managed_disks(self, resource_group_name: str, vm_name: str, **kwargs: Any) -> LROPoller[None]: """Converts virtual machine disks from blob-based to managed disks. Virtual machine must be stop-deallocated before invoking this operation. :code:`<br>`For Windows, please refer to `Convert a virtual machine from unmanaged disks to managed disks. <https://docs.microsoft.com/en-us/azure/virtual-machines/windows/convert-unmanaged-to-managed-disks>`_.:code:`<br>`For Linux, please refer to `Convert a virtual machine from unmanaged disks to managed disks. <https://docs.microsoft.com/en-us/azure/virtual-machines/linux/convert-unmanaged-to-managed-disks>`_. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._convert_to_managed_disks_initial( # type: ignore resource_group_name=resource_group_name, vm_name=vm_name, api_version=api_version, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_convert_to_managed_disks.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/convertToManagedDisks"} # type: ignore def _deallocate_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, vm_name: str, **kwargs: Any ) -> None: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] request = build_deallocate_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self._deallocate_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _deallocate_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/deallocate"} # type: ignore @distributed_trace def begin_deallocate(self, resource_group_name: str, vm_name: str, **kwargs: Any) -> LROPoller[None]: """Shuts down the virtual machine and releases the compute resources. You are not billed for the compute resources that this virtual machine uses. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._deallocate_initial( # type: ignore resource_group_name=resource_group_name, vm_name=vm_name, api_version=api_version, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_deallocate.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/deallocate"} # type: ignore @distributed_trace def generalize( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, vm_name: str, **kwargs: Any ) -> None: """Sets the OS state of the virtual machine to generalized. It is recommended to sysprep the virtual machine before performing this operation. :code:`<br>`For Windows, please refer to `Create a managed image of a generalized VM in Azure <https://docs.microsoft.com/en-us/azure/virtual-machines/windows/capture-image-resource>`_.:code:`<br>`For Linux, please refer to `How to create an image of a virtual machine or VHD <https://docs.microsoft.com/en-us/azure/virtual-machines/linux/capture-image>`_. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] request = build_generalize_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self.generalize.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) generalize.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/generalize"} # type: ignore @distributed_trace def list(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.VirtualMachine"]: """Lists all of the virtual machines in the specified resource group. Use the nextLink property in the response to get the next page of virtual machines. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either VirtualMachine or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2019_12_01.models.VirtualMachine] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachineListResult] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): if not next_link: request = build_list_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self.list.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore else: # make call to next link with the client's api-version _parsed_next_link = urlparse(next_link) _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) _next_request_params["api-version"] = self._config.api_version request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" return request def extract_data(pipeline_response): deserialized = self._deserialize("VirtualMachineListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged(get_next, extract_data) list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines"} # type: ignore @distributed_trace def list_all(self, status_only: Optional[str] = None, **kwargs: Any) -> Iterable["_models.VirtualMachine"]: """Lists all of the virtual machines in the specified subscription. Use the nextLink property in the response to get the next page of virtual machines. :param status_only: statusOnly=true enables fetching run time status of all Virtual Machines in the subscription. Default value is None. :type status_only: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either VirtualMachine or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2019_12_01.models.VirtualMachine] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachineListResult] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): if not next_link: request = build_list_all_request( subscription_id=self._config.subscription_id, status_only=status_only, api_version=api_version, template_url=self.list_all.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore else: # make call to next link with the client's api-version _parsed_next_link = urlparse(next_link) _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) _next_request_params["api-version"] = self._config.api_version request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" return request def extract_data(pipeline_response): deserialized = self._deserialize("VirtualMachineListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged(get_next, extract_data) list_all.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Compute/virtualMachines"} # type: ignore @distributed_trace def list_available_sizes( self, resource_group_name: str, vm_name: str, **kwargs: Any ) -> Iterable["_models.VirtualMachineSize"]: """Lists all available virtual machine sizes to which the specified virtual machine can be resized. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either VirtualMachineSize or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2019_12_01.models.VirtualMachineSize] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[_models.VirtualMachineSizeListResult] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): if not next_link: request = build_list_available_sizes_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self.list_available_sizes.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore else: # make call to next link with the client's api-version _parsed_next_link = urlparse(next_link) _next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query)) _next_request_params["api-version"] = self._config.api_version request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore request.method = "GET" return request def extract_data(pipeline_response): deserialized = self._deserialize("VirtualMachineSizeListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged(get_next, extract_data) list_available_sizes.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/vmSizes"} # type: ignore def _power_off_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, vm_name: str, skip_shutdown: bool = False, **kwargs: Any ) -> None: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] request = build_power_off_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, skip_shutdown=skip_shutdown, api_version=api_version, template_url=self._power_off_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _power_off_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/powerOff"} # type: ignore @distributed_trace def begin_power_off( self, resource_group_name: str, vm_name: str, skip_shutdown: bool = False, **kwargs: Any ) -> LROPoller[None]: """The operation to power off (stop) a virtual machine. The virtual machine can be restarted with the same provisioned resources. You are still charged for this virtual machine. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param skip_shutdown: The parameter to request non-graceful VM shutdown. True value for this flag indicates non-graceful shutdown whereas false indicates otherwise. Default value for this flag is false if not specified. Default value is False. :type skip_shutdown: bool :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._power_off_initial( # type: ignore resource_group_name=resource_group_name, vm_name=vm_name, skip_shutdown=skip_shutdown, api_version=api_version, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_power_off.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/powerOff"} # type: ignore def _reapply_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, vm_name: str, **kwargs: Any ) -> None: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] request = build_reapply_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self._reapply_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _reapply_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/reapply"} # type: ignore @distributed_trace def begin_reapply(self, resource_group_name: str, vm_name: str, **kwargs: Any) -> LROPoller[None]: """The operation to reapply a virtual machine's state. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._reapply_initial( # type: ignore resource_group_name=resource_group_name, vm_name=vm_name, api_version=api_version, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_reapply.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/reapply"} # type: ignore def _restart_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, vm_name: str, **kwargs: Any ) -> None: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] request = build_restart_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self._restart_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _restart_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/restart"} # type: ignore @distributed_trace def begin_restart(self, resource_group_name: str, vm_name: str, **kwargs: Any) -> LROPoller[None]: """The operation to restart a virtual machine. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._restart_initial( # type: ignore resource_group_name=resource_group_name, vm_name=vm_name, api_version=api_version, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_restart.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/restart"} # type: ignore def _start_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, vm_name: str, **kwargs: Any ) -> None: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] request = build_start_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self._start_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _start_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/start"} # type: ignore @distributed_trace def begin_start(self, resource_group_name: str, vm_name: str, **kwargs: Any) -> LROPoller[None]: """The operation to start a virtual machine. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._start_initial( # type: ignore resource_group_name=resource_group_name, vm_name=vm_name, api_version=api_version, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_start.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/start"} # type: ignore def _redeploy_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, vm_name: str, **kwargs: Any ) -> None: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] request = build_redeploy_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self._redeploy_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _redeploy_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/redeploy"} # type: ignore @distributed_trace def begin_redeploy(self, resource_group_name: str, vm_name: str, **kwargs: Any) -> LROPoller[None]: """Shuts down the virtual machine, moves it to a new node, and powers it back on. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._redeploy_initial( # type: ignore resource_group_name=resource_group_name, vm_name=vm_name, api_version=api_version, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_redeploy.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/redeploy"} # type: ignore def _reimage_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, vm_name: str, parameters: Optional[Union[_models.VirtualMachineReimageParameters, IO]] = None, **kwargs: Any ) -> None: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] cls = kwargs.pop("cls", None) # type: ClsType[None] content_type = content_type or "application/json" _json = None _content = None if isinstance(parameters, (IO, bytes)): _content = parameters else: if parameters is not None: _json = self._serialize.body(parameters, "VirtualMachineReimageParameters") else: _json = None request = build_reimage_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, content=_content, template_url=self._reimage_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _reimage_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/reimage"} # type: ignore @overload def begin_reimage( self, resource_group_name: str, vm_name: str, parameters: Optional[_models.VirtualMachineReimageParameters] = None, *, content_type: str = "application/json", **kwargs: Any ) -> LROPoller[None]: """Reimages the virtual machine which has an ephemeral OS disk back to its initial state. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param parameters: Parameters supplied to the Reimage Virtual Machine operation. Default value is None. :type parameters: ~azure.mgmt.compute.v2019_12_01.models.VirtualMachineReimageParameters :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ @overload def begin_reimage( self, resource_group_name: str, vm_name: str, parameters: Optional[IO] = None, *, content_type: str = "application/json", **kwargs: Any ) -> LROPoller[None]: """Reimages the virtual machine which has an ephemeral OS disk back to its initial state. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param parameters: Parameters supplied to the Reimage Virtual Machine operation. Default value is None. :type parameters: IO :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace def begin_reimage( self, resource_group_name: str, vm_name: str, parameters: Optional[Union[_models.VirtualMachineReimageParameters, IO]] = None, **kwargs: Any ) -> LROPoller[None]: """Reimages the virtual machine which has an ephemeral OS disk back to its initial state. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param parameters: Parameters supplied to the Reimage Virtual Machine operation. Is either a model type or a IO type. Default value is None. :type parameters: ~azure.mgmt.compute.v2019_12_01.models.VirtualMachineReimageParameters or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. Default value is None. :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] cls = kwargs.pop("cls", None) # type: ClsType[None] polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._reimage_initial( # type: ignore resource_group_name=resource_group_name, vm_name=vm_name, parameters=parameters, api_version=api_version, content_type=content_type, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_reimage.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/reimage"} # type: ignore def _perform_maintenance_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, vm_name: str, **kwargs: Any ) -> None: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] request = build_perform_maintenance_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self._perform_maintenance_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _perform_maintenance_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/performMaintenance"} # type: ignore @distributed_trace def begin_perform_maintenance(self, resource_group_name: str, vm_name: str, **kwargs: Any) -> LROPoller[None]: """Shuts down the virtual machine, moves it to an already updated node, and powers it back on during the self-service phase of planned maintenance. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._perform_maintenance_initial( # type: ignore resource_group_name=resource_group_name, vm_name=vm_name, api_version=api_version, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) if polling is True: polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_perform_maintenance.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/performMaintenance"} # type: ignore @distributed_trace def simulate_eviction( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, vm_name: str, **kwargs: Any ) -> None: """The operation to simulate the eviction of spot virtual machine. The eviction will occur within 30 minutes of calling the API. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str cls = kwargs.pop("cls", None) # type: ClsType[None] request = build_simulate_eviction_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self.simulate_eviction.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) simulate_eviction.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/simulateEviction"} # type: ignore def _run_command_initial( self, resource_group_name: str, vm_name: str, parameters: Union[_models.RunCommandInput, IO], **kwargs: Any ) -> Optional[_models.RunCommandResult]: error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.RunCommandResult]] content_type = content_type or "application/json" _json = None _content = None if isinstance(parameters, (IO, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "RunCommandInput") request = build_run_command_request( resource_group_name=resource_group_name, vm_name=vm_name, subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, content=_content, template_url=self._run_command_initial.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) # type: ignore pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize("RunCommandResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _run_command_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/runCommand"} # type: ignore @overload def begin_run_command( self, resource_group_name: str, vm_name: str, parameters: _models.RunCommandInput, *, content_type: str = "application/json", **kwargs: Any ) -> LROPoller[_models.RunCommandResult]: """Run command on the VM. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param parameters: Parameters supplied to the Run command operation. Required. :type parameters: ~azure.mgmt.compute.v2019_12_01.models.RunCommandInput :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either RunCommandResult or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2019_12_01.models.RunCommandResult] :raises ~azure.core.exceptions.HttpResponseError: """ @overload def begin_run_command( self, resource_group_name: str, vm_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any ) -> LROPoller[_models.RunCommandResult]: """Run command on the VM. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param parameters: Parameters supplied to the Run command operation. Required. :type parameters: IO :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Known values are: 'application/json', 'text/json'. Default value is "application/json". :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either RunCommandResult or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2019_12_01.models.RunCommandResult] :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace def begin_run_command( self, resource_group_name: str, vm_name: str, parameters: Union[_models.RunCommandInput, IO], **kwargs: Any ) -> LROPoller[_models.RunCommandResult]: """Run command on the VM. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param vm_name: The name of the virtual machine. Required. :type vm_name: str :param parameters: Parameters supplied to the Run command operation. Is either a model type or a IO type. Required. :type parameters: ~azure.mgmt.compute.v2019_12_01.models.RunCommandInput or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json', 'text/json'. Default value is None. :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either RunCommandResult or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.compute.v2019_12_01.models.RunCommandResult] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version = kwargs.pop("api_version", _params.pop("api-version", "2019-12-01")) # type: str content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str] cls = kwargs.pop("cls", None) # type: ClsType[_models.RunCommandResult] polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod] lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) cont_token = kwargs.pop("continuation_token", None) # type: Optional[str] if cont_token is None: raw_result = self._run_command_initial( # type: ignore resource_group_name=resource_group_name, vm_name=vm_name, parameters=parameters, api_version=api_version, content_type=content_type, cls=lambda x, y, z: x, headers=_headers, params=_params, **kwargs ) kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): deserialized = self._deserialize("RunCommandResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized if polling is True: polling_method = cast( PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) ) # type: PollingMethod elif polling is False: polling_method = cast(PollingMethod, NoPolling()) else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output, ) return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_run_command.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachines/{vmName}/runCommand"} # type: ignore
6f3462b065e1eb0c051d3a4dd2aa4e0fdd81ec81
1a166165ab8287d01cbb377a13efdb5eff5dfef0
/sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_09_01/aio/operations/_azure_firewalls_operations.py
ce7bf9e6e32d162196e02b3f33bf07cfe339d996
[ "MIT", "LicenseRef-scancode-generic-cla", "LGPL-2.1-or-later" ]
permissive
manoj0806/azure-sdk-for-python
7a14b202ff80f528abd068bf50334e91001a9686
aab999792db1132232b2f297c76800590a901142
refs/heads/master
2023-04-19T16:11:31.984930
2021-04-29T23:19:49
2021-04-29T23:19:49
363,025,016
1
0
MIT
2021-04-30T04:23:35
2021-04-30T04:23:35
null
UTF-8
Python
false
false
29,781
py
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class AzureFirewallsOperations: """AzureFirewallsOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.network.v2019_09_01.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config async def _delete_initial( self, resource_group_name: str, azure_firewall_name: str, **kwargs ) -> None: cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-09-01" accept = "application/json" # Construct URL url = self._delete_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore async def begin_delete( self, resource_group_name: str, azure_firewall_name: str, **kwargs ) -> AsyncLROPoller[None]: """Deletes the specified Azure Firewall. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param azure_firewall_name: The name of the Azure Firewall. :type azure_firewall_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._delete_initial( resource_group_name=resource_group_name, azure_firewall_name=azure_firewall_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore async def get( self, resource_group_name: str, azure_firewall_name: str, **kwargs ) -> "_models.AzureFirewall": """Gets the specified Azure Firewall. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param azure_firewall_name: The name of the Azure Firewall. :type azure_firewall_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: AzureFirewall, or the result of cls(response) :rtype: ~azure.mgmt.network.v2019_09_01.models.AzureFirewall :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewall"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-09-01" accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('AzureFirewall', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore async def _create_or_update_initial( self, resource_group_name: str, azure_firewall_name: str, parameters: "_models.AzureFirewall", **kwargs ) -> "_models.AzureFirewall": cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewall"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._create_or_update_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'AzureFirewall') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('AzureFirewall', pipeline_response) if response.status_code == 201: deserialized = self._deserialize('AzureFirewall', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore async def begin_create_or_update( self, resource_group_name: str, azure_firewall_name: str, parameters: "_models.AzureFirewall", **kwargs ) -> AsyncLROPoller["_models.AzureFirewall"]: """Creates or updates the specified Azure Firewall. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param azure_firewall_name: The name of the Azure Firewall. :type azure_firewall_name: str :param parameters: Parameters supplied to the create or update Azure Firewall operation. :type parameters: ~azure.mgmt.network.v2019_09_01.models.AzureFirewall :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either AzureFirewall or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_09_01.models.AzureFirewall] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewall"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, azure_firewall_name=azure_firewall_name, parameters=parameters, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('AzureFirewall', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore async def _update_tags_initial( self, resource_group_name: str, azure_firewall_name: str, parameters: "_models.TagsObject", **kwargs ) -> Optional["_models.AzureFirewall"]: cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.AzureFirewall"]] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-09-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._update_tags_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'TagsObject') body_content_kwargs['content'] = body_content request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: deserialized = self._deserialize('AzureFirewall', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized _update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore async def begin_update_tags( self, resource_group_name: str, azure_firewall_name: str, parameters: "_models.TagsObject", **kwargs ) -> AsyncLROPoller["_models.AzureFirewall"]: """Updates tags of an Azure Firewall resource. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param azure_firewall_name: The name of the Azure Firewall. :type azure_firewall_name: str :param parameters: Parameters supplied to update azure firewall tags. :type parameters: ~azure.mgmt.network.v2019_09_01.models.TagsObject :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of AsyncLROPoller that returns either AzureFirewall or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2019_09_01.models.AzureFirewall] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewall"] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._update_tags_initial( resource_group_name=resource_group_name, azure_firewall_name=azure_firewall_name, parameters=parameters, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): deserialized = self._deserialize('AzureFirewall', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore def list( self, resource_group_name: str, **kwargs ) -> AsyncIterable["_models.AzureFirewallListResult"]: """Lists all Azure Firewalls in a resource group. :param resource_group_name: The name of the resource group. :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AzureFirewallListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_09_01.models.AzureFirewallListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewallListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-09-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('AzureFirewallListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls'} # type: ignore def list_all( self, **kwargs ) -> AsyncIterable["_models.AzureFirewallListResult"]: """Gets all the Azure Firewalls in a subscription. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either AzureFirewallListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_09_01.models.AzureFirewallListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewallListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-09-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_all.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('AzureFirewallListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/azureFirewalls'} # type: ignore
1a7330518eff453cd30004ae2fb01e90bfe63fbe
80616a1bfacc2081d983c3d8ec5b3f55295a323b
/serial_coms_list.py
2bee4f29591ba132b7218896bc837498d8e321c6
[]
no_license
choyai/kis-personal-space-bot
47a681b679b7b6caf2c3b10124603e4475e3b73b
8dbce0b6cd88a1b326a2e9e4c9b3cf5120a083ee
refs/heads/master
2020-06-07T11:19:19.921167
2019-06-24T07:14:15
2019-06-24T07:14:15
193,010,563
1
0
null
null
null
null
UTF-8
Python
false
false
8,085
py
# from scipy.spatial import distance as dist # from imutils import perspective # from imutils import contours import numpy as np # import argparse # import imutils # import cv2 import serial import time import struct # from InverseKinematics import * # from Graph import astar import pickle from math import pi from random import randint countsPerMillimeter = (321 / 300 * 400) / (np.pi * 10) countsPerMillimeter_z = (12 * 66) / (np.pi * 12) # Connect to mcu setPath = { } def autoConnect(baud, portName): while(1): try: serialDevice = serial.Serial() serialDevice.baudrate = baud # serialDevice.parity = 'E' serialDevice.port = portName serialDevice.timeout = 1 serialDevice.rts = 0 serialDevice.dtr = 0 serialDevice.open() print('connected to mcu') return serialDevice except: print('connection failed') pass def sendCommand(command, ser): ser.write(bytes(command)) # while(1): # if ser.inWaiting() > 0: # # data = ser.read(1) # # print("data =", ord(data)) # response = ser.readline().decode('utf-8') # print(response) # # if response != 'received' or 'starting': # # pass # if response == 'resend': # ser.write(bytes(command)) # elif response == 'done': # return 1 # elif response == 'starting': # break # else: # pass # # return 0 # # break # # else: # # ser.write(bytes(command)) def setHome(ser): buffer = [255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0] checksum = 0 for i in buffer: checksum += i checksum = checksum % 256 buffer.append(checksum) print('sending ') print(buffer) sendCommand(buffer, ser) def setPosXY(x, y, ser): buffer = [255, 255, 1] # a = int(np.sqrt(2) / 2 * (y - x)) # b = int(np.sqrt(2) / 2 * (y + x)) print("x = " + str(x / countsPerMillimeter)) print("y = " + str(y / countsPerMillimeter)) # print("a = " + str(a)) # print("b = " + str(b)) x_sign = 0 if x >= 0 else 1 y_sign = 0 if y >= 0 else 1 buffer.append(x_sign) buffer.extend(split_large_ints(abs(x))) buffer.append(y_sign) buffer.extend(split_large_ints(abs(y))) checksum = 0 for i in buffer: checksum += i checksum = checksum % 256 buffer.append(checksum) print('sending ') print(buffer) sendCommand(buffer, ser) # # # def setPosXY_mm(x, y, ser, x_pix, y_pix, countsPerMillimeter=countsPerMillimeter): # buffer = [255, 255, 1] # x = x * countsPerMillimeter # y = y * countsPerMillimeter # # a = int(np.sqrt(2) / 2 * (y - x)) # b = int(np.sqrt(2) / 2 * (y + x)) # print("x = " + str(x / countsPerMillimeter)) # print("y = " + str(y / countsPerMillimeter)) # print("a = " + str(a)) # print("b = " + str(b)) # a_sign = 0 if a >= 0 else 1 # b_sign = 0 if b >= 0 else 1 # buffer.extend(split_large_ints(abs(a))) # buffer.extend(split_large_ints(abs(b))) # buffer.extend([a_sign, b_sign]) # checksum = 0 # for i in buffer: # checksum += i # checksum = checksum % 256 # buffer.append(checksum) # print('sending ') # print(buffer) # sendCommand(buffer, ser) # # # def setPosZ(z, ser): # buffer = [255, 255, 2] # buffer.extend(split_large_ints(z)) # buffer.extend([0, 0, 0, 0]) # checksum = 0 # for i in buffer: # checksum += i # checksum = checksum % 256 # buffer.append(checksum) # print('sending ') # print(buffer) # sendCommand(buffer, ser) # # # def setPosZ_mm(z, ser, countsPerMillimeter_z): # buffer = [255, 255, 2] # z = int(z * countsPerMillimeter_z) # buffer.extend(split_large_ints(z)) # buffer.extend([0, 0, 0, 0]) # checksum = 0 # for i in buffer: # checksum += i # checksum = checksum % 256 # buffer.append(checksum) # print('sending ') # print(buffer) # sendCommand(buffer, ser) # # # def gripClose(ser): # buffer = [255, 255, 3, 0, 0, 0, 0, 0, 0] # checksum = 0 # for i in buffer: # checksum += i # checksum = checksum % 256 # buffer.append(checksum) # print('sending ') # print(buffer) # sendCommand(buffer, ser) # # # def gripOpen(ser): # buffer = [255, 255, 4, 0, 0, 0, 0, 0, 0] # checksum = 0 # for i in buffer: # checksum += i # checksum = checksum % 256 # buffer.append(checksum) # print('sending ') # print(buffer) # sendCommand(buffer, ser) # # # def gripHalf(duty, ser): # buffer = [255, 255, 10, ] # buffer.extend(split_large_ints(duty)) # buffer.extend([0, 0, 0, 0]) # checksum = 0 # for i in buffer: # checksum += i # checksum = checksum % 256 # buffer.append(checksum) # print('sending ') # print(buffer) # sendCommand(buffer, ser) # # # def gripRotate(angle, ser): # buffer = [255, 255, 5] # buffer.extend(split_large_ints(angle)) # buffer.extend([0, 0, 0, 0]) # checksum = 0 # for i in buffer: # checksum += i # checksum = checksum % 256 # buffer.append(checksum) # print('sending ') # print(buffer) # sendCommand(buffer, ser) # # # def setAGains(K_P, K_I, K_D, ser): # buffer = [255, 255, 6] # buffer.extend(split_floats(K_P)) # buffer.extend(split_floats(K_I)) # buffer.extend(split_floats(K_D)) # checksum = 0 # for i in buffer: # checksum += i # checksum = checksum % 256 # buffer.append(checksum) # print('sending ') # print(buffer) # sendCommand(buffer, ser) # # # def setBGains(K_P, K_I, K_D, ser): # buffer = [255, 255, 7] # buffer.extend(split_floats(K_P)) # buffer.extend(split_floats(K_I)) # buffer.extend(split_floats(K_D)) # checksum = 0 # for i in buffer: # checksum += i # checksum = checksum % 256 # buffer.append(checksum) # print('sending ') # print(buffer) # sendCommand(buffer, ser) # # # def setZGains(K_P, K_I, K_D, ser): # buffer = [255, 255, 8] # buffer.extend(split_floats(K_P)) # buffer.extend(split_floats(K_I)) # buffer.extend(split_floats(K_D)) # checksum = 0 # for i in buffer: # checksum += i # checksum = checksum % 256 # buffer.append(checksum) # print('sending ') # print(buffer) # sendCommand(buffer, ser) # # # def setTolerances(ser): # buffer = [255, 255, 9] # tolerances = [] # for al in ['a', 'b', 'z']: # while(1): # try: # tolerance = (int(input("set tolerance_" + al + ": "))) # buffer.extend(split_large_ints(tolerance)) # break # except: # print("try again") # checksum = 0 # for i in buffer: # checksum += i # checksum = checksum % 256 # buffer.append(checksum) # print('sending ') # print(buffer) # sendCommand(buffer, ser) # splits large ints into msb and lsb. Doesn't support ints larger than 16 bits def split_large_ints(num): # numstring = str(hex(num)) # lsB = '0x' # msB = '0x' # if len(numstring) < 5: # msB = '0x00' # else: # if len(numstring) == 5: # msB += numstring[2] # else: # msB = msB + numstring[len(numstring) - 4] + \ # numstring[len(numstring) - 3] # if len(numstring) < 4: # lsB += numstring[len(numstring) - 1] # else: # lsB = lsB + numstring[len(numstring) - 2] + \ # numstring[len(numstring) - 1] msB = (num // 256) % 256 lsB = num % 256 return [msB, lsB] # splits floats from their decimals and turns them into ints def split_floats(num): a, b = divmod(num, 1.0) a = int(a) % 256 b = int(b * 256) return [a, b]
1de6c30ae80ba4c72a4ba8bcee65ddbea304c734
747142c31420254647caf85cc8532f4d13b00146
/django-web/bare-bones-web/adoptions/adoptions/models.py
96eccf0f66d1489ffac826247486abafdc94a5ca
[]
no_license
jankidepala/machine-learning-IOT
fdf5f9b9c767405a940b1963d5f68b219e437c51
125bac2a3a023779e79f89dcf4b7304e6e2601ae
refs/heads/master
2022-11-19T05:35:50.180173
2020-07-25T08:15:28
2020-07-25T08:15:28
107,033,248
0
0
null
null
null
null
UTF-8
Python
false
false
506
py
from django.db import models # Create your models here. class Pet(models.Model): name = models.CharField(max_length=101) submitter = models.CharField(max_length=101) species = models.CharField(max_length=30) breed = models.CharField(max_length=30, blank=True) name = models.CharField(max_length=101) submission_date = models.DateTimeField() vaccinations = models.ManyToManyField('Vaccine', blank=True) class Vaccine(models.Model): name = models.CharField(max_length=50)
140216cf494403c143fccbc18bd45cf02c208cdb
789a540bbb79c334cbeaf3687876bfd939e4290b
/app/handlers/private/admin/message/menu/message_distribution/distribution_options/wait_confirm_activate_schedule.py
d89fe3846c47add5b6d184ed3e7278c4db191b2a
[]
no_license
ExissBrr/TRIGON-GARANT-BOT
2cc96f5f6f195f4e76c164db4f8acafbfa5b7662
812acf060eb92e6fad21568a75e6dba7ce0da4d9
refs/heads/main
2023-07-04T18:22:43.507453
2021-08-17T14:51:30
2021-08-17T14:51:30
392,725,437
0
0
null
null
null
null
UTF-8
Python
false
false
813
py
from aiogram.dispatcher import FSMContext from aiogram.types import Message from app.data import text from app.loader import dp from app.states.private.message_distribution import MessageSendingStates from app.utils.bot import send_main_keyboard from app.utils.db_api.models.messages_for_sending import MessageForSending @dp.message_handler(state=MessageSendingStates.wait_confirm_activate_schedule) async def delete_schedule(message: Message, lang_code, state: FSMContext, user, state_data: dict): schedule_id = int(state_data.get('message_id')) schedule_message = await MessageForSending.get(schedule_id) await schedule_message.update_data(is_active=True) await message.answer( text=text[lang_code].admin.message.schedule_was_activated ) await send_main_keyboard(user, state)
0ce5d4785f5a66c6f189dd549293a858ec245133
6a3c2f5d5622ee0e8a58d871efd1cb7344fd5a13
/extras/scripts/travis_yml_generator.py
e9077b52beeafbee052524ffdeee430a3e52297f
[ "Apache-2.0" ]
permissive
StetHD/fruit
745e24dbb3d899f56adf3220bb0409470ec853c3
b9806196a70e9b92ddf90cb255f2d31ea8d6bcae
refs/heads/master
2020-03-31T01:17:50.610355
2018-09-16T08:06:25
2018-09-16T08:06:25
null
0
0
null
null
null
null
UTF-8
Python
false
false
10,312
py
#!/usr/bin/env python3 # Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import yaml # "smoke tests" are run before other build matrix rows. build_matrix_smoke_test_rows = [] build_matrix_rows = [] def determine_compiler_kind(compiler): if compiler.startswith('gcc'): return 'gcc' elif compiler.startswith('clang'): return 'clang' else: raise Exception('Unexpected compiler: %s' % compiler) def determine_tests(asan, ubsan, smoke_tests, use_precompiled_headers_in_tests, exclude_tests, include_only_tests): tests = [] has_debug_build = False tests += ['ReleasePlain'] if asan: has_debug_build = True if ubsan: tests += ['DebugAsanUbsan'] else: tests += ['DebugAsan'] if ubsan and not asan: raise Exception('Enabling UBSan but not ASan is not currently supported.') if not has_debug_build: tests += ['DebugPlain'] for smoke_test in smoke_tests: if smoke_test not in tests: tests += [smoke_test] excessive_excluded_tests = set(exclude_tests) - set(tests) if excessive_excluded_tests: raise Exception( 'Some tests were excluded but were not going to run anyway: %s. ' 'Tests to run (ignoring the possible NoPch prefix): %s' % (excessive_excluded_tests, tests)) if include_only_tests is not None: if exclude_tests != []: raise Exception('Using exclude_tests and include_only_tests together is not supported.') tests = include_only_tests else: tests = [test for test in tests if test not in exclude_tests] if not use_precompiled_headers_in_tests: tests = [test + 'NoPch' for test in tests] return tests def generate_export_statements_for_env(env): return ' '.join(['export %s=\'%s\';' % (var_name, value) for (var_name, value) in sorted(env.items())]) def generate_env_string_for_env(env): return ' '.join(['%s=%s' % (var_name, value) for (var_name, value) in sorted(env.items())]) def add_ubuntu_tests(ubuntu_version, compiler, os='linux', stl=None, asan=True, ubsan=True, use_precompiled_headers_in_tests=True, smoke_tests=[], exclude_tests=[], include_only_tests=None): env = { 'UBUNTU': ubuntu_version, 'COMPILER': compiler } if stl is not None: env['STL'] = stl compiler_kind = determine_compiler_kind(compiler) export_statements = 'export OS=' + os + '; ' + generate_export_statements_for_env(env=env) test_environment_template = {'os': 'linux', 'compiler': compiler_kind, 'install': '%s extras/scripts/travis_ci_install_linux.sh' % export_statements} tests = determine_tests(asan, ubsan, smoke_tests, use_precompiled_headers_in_tests=use_precompiled_headers_in_tests, exclude_tests=exclude_tests, include_only_tests=include_only_tests) for test in tests: test_environment = test_environment_template.copy() test_environment['script'] = '%s extras/scripts/postsubmit.sh %s' % (export_statements, test) # The TEST variable has no effect on the test run, but allows to see the test name in the Travis CI dashboard. test_environment['env'] = generate_env_string_for_env(env) + " TEST=%s" % test if test in smoke_tests: build_matrix_smoke_test_rows.append(test_environment) else: build_matrix_rows.append(test_environment) def add_osx_tests(compiler, xcode_version=None, stl=None, asan=True, ubsan=True, use_precompiled_headers_in_tests=True, smoke_tests=[], exclude_tests=[], include_only_tests=None): env = {'COMPILER': compiler} if stl is not None: env['STL'] = stl compiler_kind = determine_compiler_kind(compiler) export_statements = 'export OS=osx; ' + generate_export_statements_for_env(env=env) test_environment_template = {'os': 'osx', 'compiler': compiler_kind, 'install': '%s extras/scripts/travis_ci_install_osx.sh' % export_statements} if xcode_version is not None: test_environment_template['osx_image'] = 'xcode%s' % xcode_version tests = determine_tests(asan, ubsan, smoke_tests, use_precompiled_headers_in_tests=use_precompiled_headers_in_tests, exclude_tests=exclude_tests, include_only_tests=include_only_tests) for test in tests: test_environment = test_environment_template.copy() test_environment['script'] = '%s extras/scripts/postsubmit.sh %s' % (export_statements, test) # The TEST variable has no effect on the test run, but allows to see the test name in the Travis CI dashboard. test_environment['env'] = generate_env_string_for_env(env) + " TEST=%s" % test if test in smoke_tests: build_matrix_smoke_test_rows.append(test_environment) else: build_matrix_rows.append(test_environment) def add_bazel_tests(ubuntu_version, smoke_tests=[]): env = { 'UBUNTU': ubuntu_version, 'COMPILER': 'bazel', } test = 'DebugPlain' export_statements = 'export OS=linux; ' + generate_export_statements_for_env(env=env) test_environment = {'os': 'linux', 'compiler': 'gcc', 'env': generate_env_string_for_env(env), 'install': '%s extras/scripts/travis_ci_install_linux.sh' % export_statements, 'script': '%s extras/scripts/postsubmit.sh %s' % (export_statements, test)} if test in smoke_tests: build_matrix_smoke_test_rows.append(test_environment) else: build_matrix_rows.append(test_environment) # TODO: re-enable ASan/UBSan once they work in Travis CI. ATM (as of 18 November 2017) they fail due to https://github.com/google/sanitizers/issues/837 add_ubuntu_tests(ubuntu_version='18.04', compiler='gcc-8', asan=False, ubsan=False, smoke_tests=['DebugPlain', 'ReleasePlain']) add_ubuntu_tests(ubuntu_version='18.04', compiler='clang-4.0', stl='libstdc++') add_ubuntu_tests(ubuntu_version='18.04', compiler='clang-5.0', stl='libstdc++') add_ubuntu_tests(ubuntu_version='18.04', compiler='clang-6.0', stl='libstdc++', smoke_tests=['DebugPlain', 'DebugAsanUbsan', 'ReleasePlain']) add_bazel_tests(ubuntu_version='16.04', smoke_tests=['DebugPlain']) # ASan/UBSan are disabled for all these, the analysis on later versions is better anyway. # Also, in some combinations they wouldn't work. add_ubuntu_tests(ubuntu_version='14.04', compiler='gcc-5', asan=False, ubsan=False) add_ubuntu_tests(ubuntu_version='14.04', compiler='clang-3.5', stl='libstdc++', asan=False, ubsan=False) add_ubuntu_tests(ubuntu_version='14.04', compiler='clang-3.9', stl='libstdc++', asan=False, ubsan=False) add_ubuntu_tests(ubuntu_version='14.04', compiler='clang-3.5', stl='libc++', asan=False, ubsan=False) add_ubuntu_tests(ubuntu_version='14.04', compiler='clang-3.9', stl='libc++', asan=False, ubsan=False) # Asan/Ubsan are disabled because it generates lots of warnings like: # warning: direct access in [...] to global weak symbol guard variable for [...] means the weak symbol cannot be # overridden at runtime. This was likely caused by different translation units being compiled with different # visibility settings. # and the build eventually fails or times out. add_osx_tests(compiler='gcc-5', xcode_version='8', asan=False, ubsan=False) add_osx_tests(compiler='gcc-6', xcode_version='8', asan=False, ubsan=False, smoke_tests=['DebugPlain']) add_osx_tests(compiler='clang-4.0', xcode_version='8', stl='libc++', smoke_tests=['DebugPlain']) # UBSan is disabled because AppleClang does not support -fsanitize=undefined. add_osx_tests(compiler='clang-default', xcode_version='7.3', stl='libc++', ubsan=False) # UBSan is disabled because AppleClang does not support -fsanitize=undefined. add_osx_tests(compiler='clang-default', xcode_version='8.2', stl='libc++', ubsan=False) add_osx_tests(compiler='clang-default', xcode_version='9.4', stl='libc++') add_osx_tests(compiler='clang-default', xcode_version='10', stl='libc++', smoke_tests=['DebugPlain']) # ** Disabled combinations ** # # These fail with "'type_traits' file not found" (the <type_traits> header is missing). # # add_osx_tests('gcc-default', stl='libstdc++') # add_osx_tests('clang-default', stl='libstdc++') # add_osx_tests('clang-3.5', stl='libstdc++') # add_osx_tests('clang-3.6', stl='libstdc++') # # # The compiler complains that the 2-argument constructor of std::pair is ambiguous, even after # adding explicit casts to the exact types of the expected overload. # # add_osx_tests('clang-default', stl='libc++') # # # This triggers an assert error in the compiler, with the message: # "expected to get called on an inlined function!" [...] function isMSExternInline, file Decl.cpp, line 2647. # # add_osx_tests('clang-3.5', stl='libc++', asan=False, ubsan=False) # # # This fails with this error: # /usr/include/c++/v1/string:1938:44: error: 'basic_string<_CharT, _Traits, _Allocator>' is missing # exception specification 'noexcept(is_nothrow_copy_constructible<allocator_type>::value)' # TODO: Try again every once in a while (to re-enable these once the bug in libc++ is fixed). # # add_ubuntu_tests(ubuntu_version='16.04', compiler='clang-3.8', stl='libc++', asan=False, ubsan=False) # yaml_file = { 'sudo': 'required', 'dist': 'trusty', 'services' : ['docker'], 'language': 'cpp', 'branches': { 'only': ['master'], }, 'matrix': { 'fast_finish': True, 'include': build_matrix_smoke_test_rows + build_matrix_rows, }, } class CustomDumper(yaml.SafeDumper): def ignore_aliases(self, _data): return True print('#') print('# This file was auto-generated from extras/scripts/travis_yml_generator.py, DO NOT EDIT') print('#') print(yaml.dump(yaml_file, default_flow_style=False, Dumper=CustomDumper))
faded2f996b520e49df380a399e225a2a20919d5
f34219ce57fbe6b446fb4f1aa966f136a9b3bf42
/ex26.py
6dc076019fe108fa44d2e7b17af7da7080d86319
[]
no_license
DikranHachikyan/python-programming-20200323
467f60c07ac47e4146e63967eeb1acb76b8dd21c
e6d6185561969fc478caaedcd0981476d56d5c5b
refs/heads/master
2021-04-20T09:52:25.151484
2020-04-09T07:24:33
2020-04-09T07:24:33
249,673,012
0
0
null
null
null
null
UTF-8
Python
false
false
370
py
# 1. декларация def addNumbers(a, b, c = None): res = 0 if c: res = a + b + c else: res = a + b return res if __name__ == '__main__': # 2. извикване x, y = 7, 8 r = addNumbers(x,y) print(f'{x} + {y} = {r}') z = 10 r = addNumbers(x, y, z) print(f'{x} + {y} + {z} = {r}')
a635b80d79ea126dcfd89d7fcde391bb6f7f0a2e
42caf42f20b348c3c87d7d4f9cd59e61bc5e9f4e
/result/views.py
19d1dd5da5edeb9256feb16f0c0e82e38c6bad15
[]
no_license
Dihfahsih1/Automated-Result-School-Management
cfe43cc37c64a101447e50688f956190c67032ba
7c4be1b70d26e89939f38f1df3a3ca04e7e3e96e
refs/heads/master
2022-11-30T02:57:44.827519
2020-08-15T12:46:28
2020-08-15T12:46:28
287,744,827
1
0
null
null
null
null
UTF-8
Python
false
false
42,180
py
from django.shortcuts import get_object_or_404, redirect, render from django.contrib.auth.decorators import login_required from .decorators import lecturer_required, student_required from .forms import * from .models import User, Student, Course, CourseAllocation, TakenCourse, Session, Semester, CarryOverStudent, RepeatingStudent from django.views.generic import CreateView, UpdateView, DeleteView from django.utils.decorators import method_decorator from django.contrib.auth import update_session_auth_hash, authenticate from django.contrib.auth.forms import PasswordChangeForm from django.urls import reverse_lazy from django.http import HttpResponseRedirect from django.contrib import messages #pdf from django.core.files.storage import FileSystemStorage from django.http import HttpResponse, JsonResponse from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle, Image from reportlab.lib.styles import getSampleStyleSheet, black, ParagraphStyle from reportlab.lib.enums import TA_JUSTIFY,TA_LEFT,TA_CENTER,TA_RIGHT from reportlab.platypus.tables import Table from reportlab.lib.units import inch from reportlab.lib import colors cm = 2.54 from ARMS.settings import MEDIA_ROOT, BASE_DIR, STATIC_URL import os @login_required def home(request): """ Shows our dashboard containing number of students, courses, lecturers, repating students, carry over students and 1st class students in an interactive graph """ students = Student.objects.all().count() staff = User.objects.filter(is_lecturer=True).count() courses = Course.objects.all().count() current_semester = Semester.objects.get(is_current_semester=True) no_of_1st_class_students = Result.objects.filter(cgpa__gte=4.5).count() no_of_carry_over_students = CarryOverStudent.objects.all().count() no_of_students_to_repeat = RepeatingStudent.objects.all().count() context = { "no_of_students": students, "no_of_staff":staff, "no_of_courses": courses, "no_of_1st_class_students": no_of_1st_class_students, "no_of_students_to_repeat": no_of_students_to_repeat, "no_of_carry_over_students": no_of_carry_over_students, } return render(request, 'result/home.html', context) def get_chart(request, *args, **kwargs): all_query_score = () levels = (100, 200, 300, 400, 500) # all the levels in the department # iterate through the levels above for i in levels: # gather all the courses registered by the students of the current level in the loop all_query_score += (TakenCourse.objects.filter(student__level=i),) # for level #100 first_level_total = 0 # get the total score for all the courses registered by the students of this level for i in all_query_score[0]: first_level_total += i.total first_level_avg = 0 if not all_query_score[0].count() == 0: # calculate the average of all the students of this level first_level_avg = first_level_total / all_query_score[0].count() # do same as above for # 200 Level students second_level_total = 0 for i in all_query_score[1]: second_level_total += i.total second_level_avg = 0 if not all_query_score[1].count() == 0: second_level_avg = second_level_total / all_query_score[1].count() # do same as above for # 300 Level students third_level_total = 0 for i in all_query_score[2]: third_level_total += i.total third_level_avg = 0 if not all_query_score[2].count() == 0: third_level_avg = third_level_total / all_query_score[2].count() # do same as above for # 400 Level students fourth_level_total = 0 for i in all_query_score[3]: fourth_level_total += i.total fourth_level_avg = 0 if not all_query_score[3].count() == 0: fourth_level_avg = fourth_level_total / all_query_score[3].count() # do same as above for # 500 Level students fifth_level_total = 0 for i in all_query_score[4]: fifth_level_total += i.total fifth_level_avg = 0 if not all_query_score[4].count() == 0: fifth_level_avg = fifth_level_total / all_query_score[4].count() labels = ["100 Level", "200 Level", "300 Level", "400 Level", "500 Level"] default_level_average = [first_level_avg, second_level_avg, third_level_avg, fourth_level_avg, fifth_level_avg] average_data = { "labels": labels, "default_level_average": default_level_average, } return JsonResponse(average_data) @login_required def profile(request): """ Show profile of any user that fire out the request """ current_semester = Semester.objects.get(is_current_semester=True) if request.user.is_lecturer: courses = Course.objects.filter(allocated_course__lecturer__pk=request.user.id).filter(semester=current_semester) return render(request, 'account/profile.html', {"courses": courses,}) elif request.user.is_student: level = Student.objects.get(user__pk=request.user.id) courses = TakenCourse.objects.filter(student__user__id=request.user.id, course__level=level.level) context = { 'courses': courses, 'level': level, } return render(request, 'account/profile.html', context) else: staff = User.objects.filter(is_lecturer=True) return render(request, 'account/profile.html', { "staff": staff }) @login_required def user_profile(request, id): """ Show profile of any selected user """ if request.user.id == id: return redirect("/profile/") current_semester = Semester.objects.get(is_current_semester=True) user = User.objects.get(pk=id) if user.is_lecturer: courses = Course.objects.filter(allocated_course__lecturer__pk=id).filter(semester=current_semester) context = { "user": user, "courses": courses, } return render(request, 'account/user_profile.html', context) elif user.is_student: level = Student.objects.get(user__pk=id) courses = TakenCourse.objects.filter(student__user__id=id, course__level=level.level) context = { "user_type": "student", 'courses': courses, 'level': level, 'user':user, } return render(request, 'account/user_profile.html', context) else: context = { "user": user, "user_type": "superuser" } return render(request, 'account/user_profile.html', context) @login_required def profile_update(request): """ Check if the fired request is a POST then grap changes and update the records otherwise we show an empty form """ user = request.user.id user = User.objects.get(pk=user) if request.method == 'POST': form = ProfileForm(request.POST) if form.is_valid(): user.first_name = form.cleaned_data.get('first_name') user.last_name = form.cleaned_data.get('last_name') user.email = form.cleaned_data.get('email') user.phone = form.cleaned_data.get('phone') user.address = form.cleaned_data.get('address') if request.FILES: user.picture = request.FILES['picture'] user.save() messages.success(request, 'Your profile was successfully edited.') return redirect("/profile/") else: form = ProfileForm(instance=user, initial={ 'firstname': user.first_name, 'lastname': user.last_name, 'email': user.email, 'phone': user.phone, 'picture': user.picture, }) return render(request, 'account/profile_update.html', {'form': form}) @login_required @lecturer_required def course_list(request): """ Show list of all registered courses in the system """ courses = Course.objects.all() context = { "courses":courses, } return render(request, 'course/course_list.html', context) @login_required @lecturer_required def student_list(request): """ Show list of all registered students in the system """ students = Student.objects.all() user_type = "Student" context = { "students": students, "user_type": user_type, } return render(request, 'students/student_list.html', context) @login_required @lecturer_required def staff_list(request): """ Show list of all registered staff """ staff = User.objects.filter(is_student=False) user_type = "Staff" context = { "staff": staff, "user_type": user_type, } return render(request, 'staff/staff_list.html', context) @login_required @lecturer_required def session_list_view(request): """ Show list of all sessions """ sessions = Session.objects.all().order_by('-session') return render(request, 'result/manage_session.html', {"sessions": sessions,}) @login_required @lecturer_required def session_add_view(request): """ check request method, if POST we add session otherwise show empty form """ if request.method == 'POST': form = SessionForm(request.POST) if form.is_valid(): form.save() messages.success(request, 'Session added successfully ! ') else: form = SessionForm() return render(request, 'result/session_update.html', {'form': form}) @login_required @lecturer_required def session_update_view(request, pk): session = Session.objects.get(pk=pk) if request.method == 'POST': a = request.POST.get('is_current_session') if a == '2': unset = Session.objects.get(is_current_session=True) unset.is_current_session = False unset.save() form = SessionForm(request.POST, instance=session) if form.is_valid(): form.save() messages.success(request, 'Session updated successfully ! ') else: form = SessionForm(request.POST, instance=session) if form.is_valid(): form.save() messages.success(request, 'Session updated successfully ! ') else: form = SessionForm(instance=session) return render(request, 'result/session_update.html', {'form': form}) @login_required @lecturer_required def session_delete_view(request, pk): session = get_object_or_404(Session, pk=pk) if session.is_current_session == True: messages.info(request, "You cannot delete current session") return redirect('manage_session') else: session.delete() messages.success(request, "Session successfully deleted") return redirect('manage_semester') @login_required @lecturer_required def semester_list_view(request): semesters = Semester.objects.all().order_by('-semester') return render(request, 'result/manage_semester.html', {"semesters": semesters,}) @login_required @lecturer_required def semester_add_view(request): if request.method == 'POST': form = SemesterForm(request.POST) if form.is_valid(): data = form.data.get('is_current_semester') # returns string of 'True' if the user selected Yes if data == 'True': semester = form.data.get('semester') ss = form.data.get('session') session = Session.objects.get(pk=ss) try: if Semester.objects.get(semester=semester, session=ss): messages.info(request, semester + " semester in " + session.session +" session already exist") return redirect('create_new_semester') except: semester = Semester.objects.get(is_current_semester=True) semester.is_current_semester = False semester.save() form.save() form.save() messages.success(request, 'Semester added successfully ! ') return redirect('manage_semester') else: form = SemesterForm() return render(request, 'result/semester_update.html', {'form': form}) @login_required @lecturer_required def semester_update_view(request, pk): semester = Semester.objects.get(pk=pk) if request.method == 'POST': if request.POST.get('is_current_semester') == 'True': # returns string of 'True' if the user selected yes for 'is current semester' unset_semester = Semester.objects.get(is_current_semester=True) unset_semester.is_current_semester = False unset_semester.save() unset_session = Session.objects.get(is_current_session=True) unset_session.is_current_session = False unset_session.save() new_session = request.POST.get('session') form = SemesterForm(request.POST, instance=semester) if form.is_valid(): set_session = Session.objects.get(pk=new_session) set_session.is_current_session = True set_session.save() form.save() messages.success(request, 'Semester updated successfully !') return redirect('manage_semester') else: form = SemesterForm(request.POST, instance=semester) if form.is_valid(): form.save() return redirect('manage_semester') else: form = SemesterForm(instance=semester) return render(request, 'result/semester_update.html', {'form': form}) @login_required @lecturer_required def semester_delete_view(request, pk): semester = get_object_or_404(Semester, pk=pk) if semester.is_current_semester == True: messages.info(request, "You cannot delete current semester") return redirect('manage_semester') else: semester.delete() messages.success(request, "Semester successfully deleted") return redirect('manage_semester') @method_decorator([login_required, lecturer_required], name='dispatch') class StaffAddView(CreateView): model = User form_class = StaffAddForm template_name = 'registration/add_staff.html' def get_context_data(self, **kwargs): kwargs['user_type'] = 'staff' return super().get_context_data(**kwargs) def form_valid(self, form): user = form.save() return redirect('staff_list') @login_required @lecturer_required def edit_staff(request, pk): staff = get_object_or_404(User, pk=pk) if request.method == "POST": form = StaffAddForm(request.POST, instance=staff) if form.is_valid(): staff.save() return redirect('staff_list') else: form = StaffAddForm(instance=staff) return render(request, 'registration/edit_staff.html', {'form': form}) @login_required @lecturer_required def delete_staff(request, pk): staff = get_object_or_404(User, pk=pk) staff.delete() return redirect('staff_list') @method_decorator([login_required, lecturer_required], name='dispatch') class StudentAddView(CreateView): model = User form_class = StudentAddForm template_name = 'registration/add_student.html' def get_context_data(self, **kwargs): kwargs['user_type'] = 'student' return super().get_context_data(**kwargs) def form_valid(self, form): user = form.save() return redirect('student_list') @login_required @lecturer_required def edit_student(request, pk): student = get_object_or_404(Student, pk=pk) if request.method == "POST": form = StudentAddForm(request.POST, instance=student) if form.is_valid(): form.save() return redirect('student_list') else: form = StudentAddForm(instance=student) return render(request, 'registration/edit_student.html', {'form': form}) @login_required @lecturer_required def delete_student(request, pk): student = get_object_or_404(Student, pk=pk) student.delete() return redirect('student_list') @method_decorator([login_required, lecturer_required], name='dispatch') class CourseAddView(CreateView): model = Course form_class = CourseAddForm template_name = 'course/course_form.html' def form_valid(self, form): form.save() return redirect('course_allocation') @login_required @lecturer_required def course_edit(request, pk): course = get_object_or_404(Course, pk=pk) if request.method == "POST": form = CourseAddForm(request.POST, instance=course) if form.is_valid(): course.save() messages.success(request, "Successfully Updated") return redirect('course_list') else: form = CourseAddForm(instance=course) return render(request, 'course/course_form.html', {'form': form}) @method_decorator([login_required, lecturer_required], name='dispatch') class CourseAllocationView(CreateView): form_class = CourseAllocationForm template_name = 'course/course_allocation.html' def get_form_kwargs(self): kwargs = super(CourseAllocationView, self).get_form_kwargs() kwargs['user'] = self.request.user return kwargs def form_valid(self, form): # if a staff has been allocated a course before update it else create new lecturer = form.cleaned_data['lecturer'] selected_courses = form.cleaned_data['courses'] courses = () for course in selected_courses: courses += (course.pk,) print(courses) try: a = CourseAllocation.objects.get(lecturer=lecturer) except: a = CourseAllocation.objects.create(lecturer=lecturer) for i in range(0, selected_courses.count()): a.courses.add(courses[i]) a.save() return redirect('course_allocation_view') @login_required @student_required def course_registration(request): if request.method == 'POST': ids = () data = request.POST.copy() data.pop('csrfmiddlewaretoken', None) # remove csrf_token for key in data.keys(): ids = ids + (str(key),) for s in range(0,len(ids)): student = Student.objects.get(user__pk=request.user.id) course = Course.objects.get(pk=ids[s]) obj = TakenCourse.objects.create(student=student, course=course) obj.save() messages.success(request, 'Courses Registered Successfully!') return redirect('course_registration') else: student = Student.objects.get(user__pk=request.user.id) taken_courses = TakenCourse.objects.filter(student__user__id=request.user.id) t = () for i in taken_courses: t += (i.course.pk,) current_semester = Semester.objects.get(is_current_semester=True) courses = Course.objects.filter(level=student.level).exclude(id__in=t) all_courses = Course.objects.filter(level=student.level) no_course_is_registered = False # Check if no course is registered all_courses_are_registered = False registered_courses = Course.objects.filter(level=student.level).filter(id__in=t) if registered_courses.count() == 0: # Check if number of registered courses is 0 no_course_is_registered = True if registered_courses.count() == all_courses.count(): all_courses_are_registered = True total_first_semester_unit = 0 total_sec_semester_unit = 0 total_registered_unit = 0 for i in courses: if i.semester == "First": total_first_semester_unit += int(i.courseUnit) if i.semester == "Second": total_sec_semester_unit += int(i.courseUnit) for i in registered_courses: total_registered_unit += int(i.courseUnit) context = { "all_courses_are_registered": all_courses_are_registered, "no_course_is_registered": no_course_is_registered, "current_semester":current_semester, "courses":courses, "total_first_semester_unit": total_first_semester_unit, "total_sec_semester_unit": total_sec_semester_unit, "registered_courses": registered_courses, "total_registered_unit": total_registered_unit, "student": student, } return render(request, 'course/course_registration.html', context) @login_required @student_required def course_drop(request): if request.method == 'POST': ids = () data = request.POST.copy() data.pop('csrfmiddlewaretoken', None) # remove csrf_token for key in data.keys(): ids = ids + (str(key),) for s in range(0,len(ids)): student = Student.objects.get(user__pk=request.user.id) course = Course.objects.get(pk=ids[s]) obj = TakenCourse.objects.get(student=student, course=course) obj.delete() messages.success(request, 'Successfully Dropped!') return redirect('course_registration') @login_required @lecturer_required def delete_course(request, pk): course = get_object_or_404(Course, pk=pk) course.delete() messages.success(request, 'Deleted successfully!') return redirect('course_list') @login_required @lecturer_required def add_score(request): """ Shows a page where a lecturer will select a course allocated to him for score entry. in a specific semester and session """ current_session = Session.objects.get(is_current_session=True) current_semester = get_object_or_404(Semester, is_current_semester=True, session=current_session) semester = Course.objects.filter(allocated_course__lecturer__pk=request.user.id, semester=current_semester) courses = Course.objects.filter(allocated_course__lecturer__pk=request.user.id).filter(semester=current_semester) context = { "courses": courses, } return render(request, 'result/add_score.html', context) @login_required @lecturer_required def add_score_for(request, id): """ Shows a page where a lecturer will add score for studens that are taking courses allocated to him in a specific semester and session """ current_semester = Semester.objects.get(is_current_semester=True) if request.method == 'GET': courses = Course.objects.filter(allocated_course__lecturer__pk=request.user.id).filter(semester=current_semester) course = Course.objects.get(pk=id) students = TakenCourse.objects.filter(course__allocated_course__lecturer__pk=request.user.id).filter(course__id=id).filter(course__semester=current_semester) context = { "courses":courses, "course": course, "students":students, } return render(request, 'result/add_score_for.html', context) if request.method == 'POST': ids = () data = request.POST.copy() data.pop('csrfmiddlewaretoken', None) # remove csrf_token for key in data.keys(): ids = ids + (str(key),) # gather all the all students id (i.e the keys) in a tuple for s in range(0,len(ids)): # iterate over the list of student ids gathered above student = TakenCourse.objects.get(id=ids[s]) courses = Course.objects.filter(level=student.student.level).filter(semester=current_semester) # all courses of a specific level in current semester total_unit_in_semester = 0 for i in courses: if i == courses.count(): break else: total_unit_in_semester += int(i.courseUnit) score = data.getlist(ids[s]) # get list of score for current student in the loop ca = score[0] # subscript the list to get the fisrt value > ca score exam = score[1] # do thesame for exam score obj = TakenCourse.objects.get(pk=ids[s]) # get the current student data obj.ca = ca # set current student ca score obj.exam = exam # set current student exam score obj.total = obj.get_total(ca=ca,exam=exam) obj.grade = obj.get_grade(ca=ca,exam=exam) obj.comment = obj.get_comment(obj.grade) obj.carry_over(obj.grade) obj.is_repeating() obj.save() gpa = obj.calculate_gpa(total_unit_in_semester) cgpa = obj.calculate_cgpa() try: a = Result.objects.get(student=student.student, semester=current_semester, level=student.student.level) a.gpa = gpa a.cgpa = cgpa a.save() except: Result.objects.get_or_create(student=student.student, gpa=gpa, semester=current_semester, level=student.student.level) messages.success(request, 'Successfully Recorded! ') return HttpResponseRedirect(reverse_lazy('add_score_for', kwargs={'id': id})) return HttpResponseRedirect(reverse_lazy('add_score_for', kwargs={'id': id})) @login_required @student_required def view_result(request): student = Student.objects.get(user__pk=request.user.id) current_semester = Semester.objects.get(is_current_semester=True) courses = TakenCourse.objects.filter(student__user__pk=request.user.id, course__level=student.level) result = Result.objects.filter(student__user__pk=request.user.id) current_semester_grades = {} previousCGPA = 0 previousLEVEL = 0 for i in result: if not int(i.level) - 100 == 0: # TODO think n check the logic previousLEVEL = i.level try: a = Result.objects.get(student__user__pk=request.user.id, level=previousLEVEL, semester="Second") previousCGPA = a.cgpa break except: previousCGPA = 0 else: break context = { "courses": courses, "result":result, "student": student, "previousCGPA": previousCGPA, } return render(request, 'students/view_results.html', context) @login_required def change_password(request): if request.method == 'POST': form = PasswordChangeForm(request.user, request.POST) if form.is_valid(): user = form.save() update_session_auth_hash(request, user) messages.success(request, 'Your password was successfully updated!') else: messages.error(request, 'Please correct the errors below. ') else: form = PasswordChangeForm(request.user) return render(request, 'account/change_password.html', { 'form': form, }) @login_required @lecturer_required def course_allocation_view(request): allocated_courses = CourseAllocation.objects.all() return render(request, 'course/course_allocation_view.html', {"allocated_courses": allocated_courses}) @login_required @lecturer_required def withheld_course(request, pk): course = CourseAllocation.objects.get(pk=pk) course.delete() messages.success(request, 'successfully deallocated!') return redirect("course_allocation_view") @login_required def carry_over(request): if request.method == "POST": value = () data = request.POST.copy() data.pop('csrfmiddlewaretoken', None) # remove csrf_token for val in data.values(): value += (val,) course = value[0] session = value[1] courses = CarryOverStudent.objects.filter(course__courseCode=course, session=session) all_courses = Course.objects.all() sessions = Session.objects.all() signal_template = True context = { "all_courses": all_courses, "courses": courses, "signal_template": signal_template, "sessions":sessions } return render(request, 'course/carry_over.html', context) else: all_courses = Course.objects.all() sessions = Session.objects.all() return render(request, 'course/carry_over.html', { "all_courses": all_courses, "sessions":sessions }) @login_required def repeat_list(request): students = RepeatingStudent.objects.all() return render(request, 'students/repeaters.html', {"students": students}) @login_required def first_class_list(request): students = Result.objects.filter(cgpa__gte=4.5) return render(request, 'students/first_class_students.html', {"students": students}) @login_required @lecturer_required def result_sheet_pdf_view(request, id): current_semester = Semester.objects.get(is_current_semester=True) current_session = Session.objects.get(is_current_session=True) result = TakenCourse.objects.filter(course__pk=id) no_of_pass = TakenCourse.objects.filter(course__pk=id, comment="PASS").count() no_of_fail = TakenCourse.objects.filter(course__pk=id, comment="FAIL").count() fname = str(current_semester) + '_semester_' + str(current_session) + '_session_' + 'resultSheet.pdf' fname = fname.replace("/", "-") flocation = '/tmp/'+fname doc = SimpleDocTemplate(flocation, rightMargin=0, leftMargin=6.5 * cm, topMargin=0.3 * cm, bottomMargin=0) styles = getSampleStyleSheet() styles.add(ParagraphStyle( name="ParagraphTitle", fontSize=11, fontName="FreeSansBold")) Story = [Spacer(1,.2)] style = styles["Normal"] logo = MEDIA_ROOT + "/logo/android-chrome-144x144.png" print(logo) im = Image(logo, 1*inch, 1*inch) im.__setattr__("_offs_x", -280) im.__setattr__("_offs_y", -45) Story.append(im) style = getSampleStyleSheet() normal = style["Normal"] normal.alignment = TA_CENTER normal.fontName = "Helvetica" normal.fontSize = 12 normal.leading = 15 title = "<b> "+str(current_semester) + " Semester " + str(current_session) + " Result Sheet</b>" title = Paragraph(title.upper(), normal) Story.append(title) Story.append(Spacer(1,0.1*inch)) style = getSampleStyleSheet() normal = style["Normal"] normal.alignment = TA_CENTER normal.fontName = "Helvetica" normal.fontSize = 10 normal.leading = 15 title = "<b>Course lecturer: " + request.user.get_full_name() + "</b>" title = Paragraph(title.upper(), normal) Story.append(title) Story.append(Spacer(1,0.1*inch)) normal = style["Normal"] normal.alignment = TA_CENTER normal.fontName = "Helvetica" normal.fontSize = 10 normal.leading = 15 level = result.filter(course_id=id).first() title = "<b>Level: </b>" + str(level.course.level+"L") title = Paragraph(title.upper(), normal) Story.append(title) Story.append(Spacer(1,.6*inch)) elements = [] count = 0 header = [('S/N', 'ID NUMBER', 'CA', 'EXAM', 'GRADE', 'COMMENT')] table_header=Table(header,1*[1.2*inch], 1*[0.5*inch]) table_header.setStyle( TableStyle([ ('ALIGN',(-2,-2), (-2,-2),'CENTER'), ('TEXTCOLOR',(1,0),(1,0),colors.blue), ('TEXTCOLOR',(-1,0),(-1,0),colors.blue), ('ALIGN',(0,-1),(-1,-1),'CENTER'), ('VALIGN',(0,-1),(-1,-1),'MIDDLE'), ('TEXTCOLOR',(0,-1),(-1,-1),colors.blue), ('INNERGRID', (0,0), (-1,-1), 0.25, colors.black), ('BOX', (0,0), (-1,-1), 0.25, colors.black), ])) Story.append(table_header) for student in result: data = [(count+1, student.student.id_number.upper(), student.ca, student.exam, student.grade, student.comment)] color = colors.black if student.grade == 'F': color = colors.red count += 1 t=Table(data,1*[1.2*inch], 1*[0.5*inch]) t.setStyle( TableStyle([ ('ALIGN',(-2,-2), (-2,-2),'CENTER'), ('ALIGN',(1,0), (1,0),'CENTER'), ('ALIGN',(-1,0), (-1,0),'CENTER'), ('ALIGN',(-3,0), (-3,0),'CENTER'), ('ALIGN',(-4,0), (-4,0),'CENTER'), ('ALIGN',(-6,0), (-6,0),'CENTER'), ('TEXTCOLOR',(0,-1),(-1,-1),color), ('INNERGRID', (0,0), (-1,-1), 0.25, colors.black), ('BOX', (0,0), (-1,-1), 0.25, colors.black), ])) Story.append(t) Story.append(Spacer(1,1*inch)) style_right = ParagraphStyle(name='right', parent=styles['Normal'], alignment=TA_RIGHT) tbl_data = [ [Paragraph("<b>Date:</b>_______________________________________", styles["Normal"]), Paragraph("<b>No. of PASS:</b> " + str(no_of_pass), style_right)], [Paragraph("<b>Siganture / Stamp:</b> _____________________________", styles["Normal"]), Paragraph("<b>No. of FAIL: </b>" + str(no_of_fail), style_right)]] tbl = Table(tbl_data) Story.append(tbl) doc.build(Story) fs = FileSystemStorage("/tmp") with fs.open(fname) as pdf: response = HttpResponse(pdf, content_type='application/pdf') response['Content-Disposition'] = 'inline; filename='+fname+'' return response return response @login_required @student_required def course_registration_form(request): current_semester = Semester.objects.get(is_current_semester=True) current_session = Session.objects.get(is_current_session=True) courses = TakenCourse.objects.filter(student__user__id=request.user.id) fname = request.user.username + '.pdf' fname = fname.replace("/", "-") flocation = '/tmp/'+fname doc = SimpleDocTemplate(flocation, rightMargin=15, leftMargin=15, topMargin=0, bottomMargin=0) styles = getSampleStyleSheet() Story = [Spacer(1,0.5)] Story.append(Spacer(1,0.4*inch)) style = styles["Normal"] style = getSampleStyleSheet() normal = style["Normal"] normal.alignment = TA_CENTER normal.fontName = "Helvetica" normal.fontSize = 12 normal.leading = 18 title = "<b>MODIBBO ADAMA UNIVERSITY OF TECHNOLOGY, YOLA</b>" title = Paragraph(title.upper(), normal) Story.append(title) style = getSampleStyleSheet() school = style["Normal"] school.alignment = TA_CENTER school.fontName = "Helvetica" school.fontSize = 10 school.leading = 18 school_title = "<b>SCHOOL OF MANAGEMENT AND INFORMATION TECHNOLOGY</b>" school_title = Paragraph(school_title.upper(), school) Story.append(school_title) style = getSampleStyleSheet() Story.append(Spacer(1,0.1*inch)) department = style["Normal"] department.alignment = TA_CENTER department.fontName = "Helvetica" department.fontSize = 9 department.leading = 18 department_title = "<b>DEPARTMENT OF INFORMATION MANAGEMENT TECHNOLOGY</b>" department_title = Paragraph(department_title, department) Story.append(department_title) Story.append(Spacer(1,.3*inch)) title = "<b><u>STUDENT REGISTRATION FORM</u></b>" title = Paragraph(title.upper(), normal) Story.append(title) student = Student.objects.get(user__pk=request.user.id) style_right = ParagraphStyle(name='right', parent=styles['Normal']) tbl_data = [ [Paragraph("<b>Registration Number : " + request.user.username.upper() + "</b>", styles["Normal"])], [Paragraph("<b>Name : " + request.user.get_full_name().upper() + "</b>", styles["Normal"])], [Paragraph("<b>Session : " + current_session.session.upper() + "</b>", styles["Normal"]), Paragraph("<b>Level: " + student.level + "</b>", styles["Normal"]) ]] tbl = Table(tbl_data) Story.append(tbl) Story.append(Spacer(1, 0.6*inch)) style = getSampleStyleSheet() semester = style["Normal"] semester.alignment = TA_LEFT semester.fontName = "Helvetica" semester.fontSize = 9 semester.leading = 18 semester_title = "<b>FIRST SEMESTER</b>" semester_title = Paragraph(semester_title, semester) Story.append(semester_title) elements = [] # FIRST SEMESTER count = 0 header = [('S/No', 'Course Code', 'Course Title', 'Unit', Paragraph('Name, Siganture of course lecturer & Date', style['Normal']))] table_header = Table(header,1*[1.4*inch], 1*[0.5*inch]) table_header.setStyle( TableStyle([ ('ALIGN',(-2,-2), (-2,-2),'CENTER'), ('VALIGN',(-2,-2), (-2,-2),'MIDDLE'), ('ALIGN',(1,0), (1,0),'CENTER'), ('VALIGN',(1,0), (1,0),'MIDDLE'), ('ALIGN',(0,0), (0,0),'CENTER'), ('VALIGN',(0,0), (0,0),'MIDDLE'), ('ALIGN',(-4,0), (-4,0),'LEFT'), ('VALIGN',(-4,0), (-4,0),'MIDDLE'), ('ALIGN',(-3,0), (-3,0),'LEFT'), ('VALIGN',(-3,0), (-3,0),'MIDDLE'), ('TEXTCOLOR',(0,-1),(-1,-1),colors.black), ('INNERGRID', (0,0), (-1,-1), 0.25, colors.black), ('BOX', (0,0), (-1,-1), 0.25, colors.black), ])) Story.append(table_header) first_semester_unit = 0 for course in courses: if course.course.semester == FIRST: first_semester_unit += int(course.course.courseUnit) data = [(count+1, course.course.courseCode.upper(), course.course.courseTitle, course.course.courseUnit, '')] color = colors.black count += 1 table_body=Table(data,1*[1.4*inch], 1*[0.3*inch]) table_body.setStyle( TableStyle([ ('ALIGN',(-2,-2), (-2,-2),'CENTER'), ('ALIGN',(1,0), (1,0),'CENTER'), ('ALIGN',(0,0), (0,0),'CENTER'), ('ALIGN',(-4,0), (-4,0),'LEFT'), ('TEXTCOLOR',(0,-1),(-1,-1),colors.black), ('INNERGRID', (0,0), (-1,-1), 0.25, colors.black), ('BOX', (0,0), (-1,-1), 0.25, colors.black), ])) Story.append(table_body) style = getSampleStyleSheet() semester = style["Normal"] semester.alignment = TA_LEFT semester.fontName = "Helvetica" semester.fontSize = 8 semester.leading = 18 semester_title = "<b>Total Units : " + str(first_semester_unit) + "</b>" semester_title = Paragraph(semester_title, semester) Story.append(semester_title) # FIRST SEMESTER ENDS HERE Story.append(Spacer(1, 0.6*inch)) style = getSampleStyleSheet() semester = style["Normal"] semester.alignment = TA_LEFT semester.fontName = "Helvetica" semester.fontSize = 9 semester.leading = 18 semester_title = "<b>SECOND SEMESTER</b>" semester_title = Paragraph(semester_title, semester) Story.append(semester_title) # SECOND SEMESTER count = 0 header = [('S/No', 'Course Code', 'Course Title', 'Unit', Paragraph('<b>Name, Siganture of course lecturer & Date</b>', style['Normal']))] table_header = Table(header,1*[1.4*inch], 1*[0.5*inch]) table_header.setStyle( TableStyle([ ('ALIGN',(-2,-2), (-2,-2),'CENTER'), ('VALIGN',(-2,-2), (-2,-2),'MIDDLE'), ('ALIGN',(1,0), (1,0),'CENTER'), ('VALIGN',(1,0), (1,0),'MIDDLE'), ('ALIGN',(0,0), (0,0),'CENTER'), ('VALIGN',(0,0), (0,0),'MIDDLE'), ('ALIGN',(-4,0), (-4,0),'LEFT'), ('VALIGN',(-4,0), (-4,0),'MIDDLE'), ('ALIGN',(-3,0), (-3,0),'LEFT'), ('VALIGN',(-3,0), (-3,0),'MIDDLE'), ('TEXTCOLOR',(0,-1),(-1,-1),colors.black), ('INNERGRID', (0,0), (-1,-1), 0.25, colors.black), ('BOX', (0,0), (-1,-1), 0.25, colors.black), ])) Story.append(table_header) second_semester_unit = 0 for course in courses: if course.course.semester == SECOND: second_semester_unit += int(course.course.courseUnit) data = [(count+1, course.course.courseCode.upper(), course.course.courseTitle, course.course.courseUnit, '')] color = colors.black count += 1 table_body=Table(data,1*[1.4*inch], 1*[0.3*inch]) table_body.setStyle( TableStyle([ ('ALIGN',(-2,-2), (-2,-2),'CENTER'), ('ALIGN',(1,0), (1,0),'CENTER'), ('ALIGN',(0,0), (0,0),'CENTER'), ('ALIGN',(-4,0), (-4,0),'LEFT'), ('TEXTCOLOR',(0,-1),(-1,-1),colors.black), ('INNERGRID', (0,0), (-1,-1), 0.25, colors.black), ('BOX', (0,0), (-1,-1), 0.25, colors.black), ])) Story.append(table_body) style = getSampleStyleSheet() semester = style["Normal"] semester.alignment = TA_LEFT semester.fontName = "Helvetica" semester.fontSize = 8 semester.leading = 18 semester_title = "<b>Total Units : " + str(second_semester_unit) + "</b>" semester_title = Paragraph(semester_title, semester) Story.append(semester_title) Story.append(Spacer(1, 2)) style = getSampleStyleSheet() certification = style["Normal"] certification.alignment = TA_JUSTIFY certification.fontName = "Helvetica" certification.fontSize = 8 certification.leading = 18 student = Student.objects.get(user__pk=request.user.id) certification_text = "CERTIFICATION OF REGISTRATION: I certify that <b>" + str(request.user.get_full_name().upper()) + "</b>\ has been duly registered for the <b>" + student.level + " level </b> of study in the department\ of INFORMATION MANAGEMENT TECHNOLOGY and that the courses and units registered are as approved by the senate of the University" certification_text = Paragraph(certification_text, certification) Story.append(certification_text) # FIRST SEMESTER ENDS HERE logo = MEDIA_ROOT + "/logo/android-chrome-144x144.png" im = Image(logo, 1.5*inch, 1.5*inch) im.__setattr__("_offs_x", -228) im.__setattr__("_offs_y", 625) Story.append(im) picture = BASE_DIR + request.user.get_picture() im = Image(picture, 1.0*inch, 1.0*inch) im.__setattr__("_offs_x", 218) im.__setattr__("_offs_y", 625) Story.append(im) doc.build(Story) fs = FileSystemStorage("/tmp") with fs.open(fname) as pdf: response = HttpResponse(pdf, content_type='application/pdf') response['Content-Disposition'] = 'inline; filename='+fname+'' return response return response
f1c7573bba1a0892112b60e9cdc75fae7385b0fa
e83cfcdf15ddb362b4611488dc4982bf7832464b
/libs/python/test/test_endpoint_indigo_traceroute.py
feff4e323b776909d67c5b7d97dade312242b3e0
[]
no_license
shivamjaiswal64/api-rest-meetings
ce2b59edb0b588de8a4bd0f6ff6addecc557cca6
f4750f72ca5f8358c4dc00737abc104a28d62934
refs/heads/master
2020-03-22T14:33:39.825474
2018-07-07T01:22:40
2018-07-07T01:22:40
null
0
0
null
null
null
null
UTF-8
Python
false
false
6,785
py
# coding: utf-8 """ BlueJeans Meetings REST API ## Video That Works Where You Do. This site provides developers access to APIs from BlueJean's Meetings meeting service. From here you will be able to make actual API calls to manage User Accounts, Meetings, and Recordings. Also, you can pull analytical data and current state information. With these APIs you can quickly integrate **BlueJeans** video administration into your applications. <hr> <h2 class=\"info\"> What is a BlueJeans Meeting?</h2> <img src=\"./img/bjnmeeting.png\" style=\"padding-left:20px; width:50%\"> A BlueJeans meeting is a collaboration session of 2 or more participants joining a virtual meeting-room in the cloud. The first person to join is put into a waiting-room until other participant(s) join. When the meeting starts, all participants will be connected over video & audio. <u>Types of Meeting Rooms</u> There are two types of Meeting Rooms available to a registered user. They are described as: <ul> <li>Scheduled – A room created for a specific date and time. It is assigned a randomized and unique numeric ID by BlueJeans.</li> <li>Personal – Every user is given a personal meeting room with a customized Meeting Identifier. People typically use their mobile phone number as their Meeting ID.</li> </ul> <u>Meeting Room Security</u> A meeting room can <i>optionally</i> be protected by a randomized access code. THe access code ensures security of participants who join. Also the access code can specify a Moderator role whose user(s) have additional capabilities to administer a meeting. <ul> <li>When set, participants would be required to enter the access code in order to join.</li> <li>The scheduler's profile has a user-specified moderator passcode which can be input to allow designated user(s) greater privileges to help organize meetings.</li> </ul> <hr> <h2 id='gs' class=\"info\" onclick=\"hideit('gs')\"><span id=\"gsicon\" class=\"glyphicon glyphicon-chevron-right\"></span> Getting Started</h2> <div id=\"gsbody\" style=\"display:none\"> Before you start using BlueJeans' APIs, you must first have a BlueJeans account enabled for API Access. Contact <a href=\"mailto:[email protected])\">BlueJeans Support</a> for assistance. <br /><br />Once you have an account, you may start writing application code to authenticate and make API calls. *Alternatively*, you can use this developer site to test the BlueJeans' APIs and develop a level of familiarity before you write production code. <br /> ### To Make API Calls from This Site If you want to use this developer site to try various BlueJeans' APIs, here are the steps required to authenticate and enable your Developer Session to place API calls. 1. Choose Method for Authenticating * Click on the desired Authentication method from below. * Click on the **Try It Out** button. 1. Make Authentication request * Follow APIs instructions and input the API parameters. * Click on the blue **Execute** button. * If successful, the API returns with JSON data containing a field called **access_token**. Copy/save this value. 1. Authorize BlueJeans Developer Session. * Click on the green **Authorize button**. * The site will show you a pop-up window for authorization. * Enter your access token in the field named **api_key** * Click on the **Authorize** button Your current BlueJeans developer session is now authenticated and ready to place API calls. The web site will automatically include your access token on any API calls you make. </div> <hr> <h2 id='oauth' class=\"info\" onclick=\"hideit('oauth')\"><span id=\"oauthicon\" class=\"glyphicon glyphicon-chevron-right\"></span> About Meetings Authentication</h2> <div id=\"oauthbody\" style=\"display:none\"> All API transactions (excluding Authentication) require an access token per **OAuth standards**. BlueJeans provides multiple methods for obtaining an access token. Additionally there are diffferent scopes of token access. ### Grant Types Bluejeans provides 4 different methods for users to Authenticate. Successful authentication allows BlueJeans to grant an access token to perform API operations. * Password Credentials Grant – Authenticate with a username and password and receive an access token with user level permission. Known as two-legged OAuth. * Meeting Credentials Grant – Authenticate with a meeting ID and meeting passcode and receive an access token with meeting level permission. Known as two-legged OAuth. * Client Credentials Grant – Authenticate with a client ID and client secret and receive an access token with enterprise level permission. Known as two-legged OAuth. * Authorization Code Grant – Authentication for your developer's application occurs through a redirection to a BlueJeans authentication page. The application receives an authorization code to be submitted, along with other tokens, to receive an access token. Known as three-legged OAuth. For more information please refer to the [OAuth specification](https://oauth.net/). ### Access & Permissions BlueJeans defines 3 levels of API access into the system. When an access token is granted, it carries one of these 3 levels. The scope of system functionality depends upon the token's access level. * Meeting-level – Scope of APIs is limited to individual meetings. * User-level – Scope depends on the requested permissions. * App-level – provisioned either by BlueJeans personnel, or the BlueJeans Enterprise Admin, an app, is issued a client key and secret key. These tokens then are used by the BlueJeans Authentication API to receive the token. The token's scope provides access to the entire enterprise and all of its users. All endpoints in this document that require **Enterprise Admin** access will be marked as such. </div> <hr> OpenAPI spec version: 1.0.0 Contact: [email protected] Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import os import sys import unittest import BlueJeansMeetingsRestApi from BlueJeansMeetingsRestApi.rest import ApiException from BlueJeansMeetingsRestApi.models.endpoint_indigo_traceroute import EndpointIndigoTraceroute class TestEndpointIndigoTraceroute(unittest.TestCase): """ EndpointIndigoTraceroute unit test stubs """ def setUp(self): pass def tearDown(self): pass def testEndpointIndigoTraceroute(self): """ Test EndpointIndigoTraceroute """ # FIXME: construct object with mandatory attributes with example values #model = BlueJeansMeetingsRestApi.models.endpoint_indigo_traceroute.EndpointIndigoTraceroute() pass if __name__ == '__main__': unittest.main()