hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d9e359c85a06bdc44937457ee401aaa8bebc7f50 | 2,560 | py | Python | tclCommands/TclCommandListSys.py | DannyPol/flatcam | 25a8634d0658e98b7fae31a095f8bef40c1b3067 | [
"MIT"
] | 1 | 2022-02-11T06:19:34.000Z | 2022-02-11T06:19:34.000Z | tclCommands/TclCommandListSys.py | MRemy2/FlatCam | d4f941335ca8a8d5351aab23b396f99da06a9029 | [
"MIT"
] | null | null | null | tclCommands/TclCommandListSys.py | MRemy2/FlatCam | d4f941335ca8a8d5351aab23b396f99da06a9029 | [
"MIT"
] | null | null | null | # ##########################################################
# FlatCAM: 2D Post-processing for Manufacturing #
# File Author: Marius Adrian Stanciu (c) #
# Date: 8/17/2019 #
# MIT Licence #
# ##########################################################
from tclCommands.TclCommand import *
| 37.647059 | 120 | 0.55 |
d9e3f1d0e6ad9650ceb745dc1536525917eaef63 | 2,694 | py | Python | ogs5py/fileclasses/mcp/core.py | MuellerSeb/ogs5py | 752e7bd2298fbd476406d168f6b7d1a85863dccd | [
"MIT"
] | 3 | 2018-05-27T15:39:07.000Z | 2018-10-29T17:02:11.000Z | ogs5py/fileclasses/mcp/core.py | MuellerSeb/ogs5py | 752e7bd2298fbd476406d168f6b7d1a85863dccd | [
"MIT"
] | 1 | 2018-11-12T11:32:12.000Z | 2018-11-12T13:07:48.000Z | ogs5py/fileclasses/mcp/core.py | MuellerSeb/ogs5py | 752e7bd2298fbd476406d168f6b7d1a85863dccd | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Class for the ogs COMPONENT_PROPERTIES file."""
from ogs5py.fileclasses.base import BlockFile
| 25.415094 | 78 | 0.485523 |
d9e4389915d24f650fdb65abd21f0125dba8ab5c | 175,651 | py | Python | keystone/tests/unit/test_v3_assignment.py | crowdy/keystone | 1e7ecca881a51144d61ae8026e1a77d6669997e2 | [
"Apache-2.0"
] | null | null | null | keystone/tests/unit/test_v3_assignment.py | crowdy/keystone | 1e7ecca881a51144d61ae8026e1a77d6669997e2 | [
"Apache-2.0"
] | 4 | 2021-10-05T12:01:08.000Z | 2021-10-10T07:06:33.000Z | keystone/tests/unit/test_v3_assignment.py | crowdy/keystone | 1e7ecca881a51144d61ae8026e1a77d6669997e2 | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import random
import uuid
import freezegun
import http.client
from testtools import matchers
from keystone.common import provider_api
import keystone.conf
from keystone import exception
from keystone.resource.backends import base as resource_base
from keystone.tests import unit
from keystone.tests.unit import test_v3
CONF = keystone.conf.CONF
PROVIDERS = provider_api.ProviderAPIs
def test_get_effective_role_assignments(self):
"""Call ``GET /role_assignments?effective``.
Test Plan:
- Create two extra user for tests
- Add these users to a group
- Add a role assignment for the group on a domain
- Get a list of all role assignments, checking one has been added
- Then get a list of all effective role assignments - the group
assignment should have turned into assignments on the domain
for each of the group members.
"""
user1 = unit.create_user(PROVIDERS.identity_api,
domain_id=self.domain['id'])
user2 = unit.create_user(PROVIDERS.identity_api,
domain_id=self.domain['id'])
PROVIDERS.identity_api.add_user_to_group(user1['id'], self.group['id'])
PROVIDERS.identity_api.add_user_to_group(user2['id'], self.group['id'])
collection_url = '/role_assignments'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
resource_url=collection_url)
existing_assignments = len(r.result.get('role_assignments'))
gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
group_id=self.group_id,
role_id=self.role_id)
self.put(gd_entity['links']['assignment'])
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 1,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gd_entity)
# Now re-read the collection asking for effective roles - this
# should mean the group assignment is translated into the two
# member user assignments
collection_url = '/role_assignments?effective'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 2,
resource_url=collection_url)
ud_entity = self.build_role_assignment_entity(
link=gd_entity['links']['assignment'], domain_id=self.domain_id,
user_id=user1['id'], role_id=self.role_id)
self.assertRoleAssignmentInListResponse(r, ud_entity)
ud_entity = self.build_role_assignment_entity(
link=gd_entity['links']['assignment'], domain_id=self.domain_id,
user_id=user2['id'], role_id=self.role_id)
self.assertRoleAssignmentInListResponse(r, ud_entity)
def test_check_effective_values_for_role_assignments(self):
"""Call ``GET & HEAD /role_assignments?effective=value``.
Check the various ways of specifying the 'effective'
query parameter. If the 'effective' query parameter
is included then this should always be treated as meaning 'True'
unless it is specified as:
{url}?effective=0
This is by design to match the agreed way of handling
policy checking on query/filter parameters.
Test Plan:
- Create two extra user for tests
- Add these users to a group
- Add a role assignment for the group on a domain
- Get a list of all role assignments, checking one has been added
- Then issue various request with different ways of defining
the 'effective' query parameter. As we have tested the
correctness of the data coming back when we get effective roles
in other tests, here we just use the count of entities to
know if we are getting effective roles or not
"""
user1 = unit.create_user(PROVIDERS.identity_api,
domain_id=self.domain['id'])
user2 = unit.create_user(PROVIDERS.identity_api,
domain_id=self.domain['id'])
PROVIDERS.identity_api.add_user_to_group(user1['id'], self.group['id'])
PROVIDERS.identity_api.add_user_to_group(user2['id'], self.group['id'])
collection_url = '/role_assignments'
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
resource_url=collection_url)
existing_assignments = len(r.result.get('role_assignments'))
gd_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
group_id=self.group_id,
role_id=self.role_id)
self.put(gd_entity['links']['assignment'])
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 1,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gd_entity)
# Now re-read the collection asking for effective roles,
# using the most common way of defining "effective'. This
# should mean the group assignment is translated into the two
# member user assignments
collection_url = '/role_assignments?effective'
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 2,
resource_url=collection_url)
# Now set 'effective' to false explicitly - should get
# back the regular roles
collection_url = '/role_assignments?effective=0'
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 1,
resource_url=collection_url)
# Now try setting 'effective' to 'False' explicitly- this is
# NOT supported as a way of setting a query or filter
# parameter to false by design. Hence we should get back
# effective roles.
collection_url = '/role_assignments?effective=False'
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 2,
resource_url=collection_url)
# Now set 'effective' to True explicitly
collection_url = '/role_assignments?effective=True'
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(
r,
expected_length=existing_assignments + 2,
resource_url=collection_url)
def test_filtered_role_assignments(self):
"""Call ``GET /role_assignments?filters``.
Test Plan:
- Create extra users, group, role and project for tests
- Make the following assignments:
Give group1, role1 on project1 and domain
Give user1, role2 on project1 and domain
Make User1 a member of Group1
- Test a series of single filter list calls, checking that
the correct results are obtained
- Test a multi-filtered list call
- Test listing all effective roles for a given user
- Test the equivalent of the list of roles in a project scoped
token (all effective roles for a user on a project)
"""
# Since the default fixtures already assign some roles to the
# user it creates, we also need a new user that will not have any
# existing assignments
user1 = unit.create_user(PROVIDERS.identity_api,
domain_id=self.domain['id'])
user2 = unit.create_user(PROVIDERS.identity_api,
domain_id=self.domain['id'])
group1 = unit.new_group_ref(domain_id=self.domain['id'])
group1 = PROVIDERS.identity_api.create_group(group1)
PROVIDERS.identity_api.add_user_to_group(user1['id'], group1['id'])
PROVIDERS.identity_api.add_user_to_group(user2['id'], group1['id'])
project1 = unit.new_project_ref(domain_id=self.domain['id'])
PROVIDERS.resource_api.create_project(project1['id'], project1)
self.role1 = unit.new_role_ref()
PROVIDERS.role_api.create_role(self.role1['id'], self.role1)
self.role2 = unit.new_role_ref()
PROVIDERS.role_api.create_role(self.role2['id'], self.role2)
# Now add one of each of the six types of assignment
gd_entity = self.build_role_assignment_entity(
domain_id=self.domain_id, group_id=group1['id'],
role_id=self.role1['id'])
self.put(gd_entity['links']['assignment'])
ud_entity = self.build_role_assignment_entity(domain_id=self.domain_id,
user_id=user1['id'],
role_id=self.role2['id'])
self.put(ud_entity['links']['assignment'])
gp_entity = self.build_role_assignment_entity(
project_id=project1['id'],
group_id=group1['id'],
role_id=self.role1['id'])
self.put(gp_entity['links']['assignment'])
up_entity = self.build_role_assignment_entity(
project_id=project1['id'],
user_id=user1['id'],
role_id=self.role2['id'])
self.put(up_entity['links']['assignment'])
gs_entity = self.build_role_assignment_entity(
system='all',
group_id=group1['id'],
role_id=self.role1['id'])
self.put(gs_entity['links']['assignment'])
us_entity = self.build_role_assignment_entity(
system='all',
user_id=user1['id'],
role_id=self.role2['id'])
self.put(us_entity['links']['assignment'])
us2_entity = self.build_role_assignment_entity(
system='all',
user_id=user2['id'],
role_id=self.role2['id'])
self.put(us2_entity['links']['assignment'])
# Now list by various filters to make sure we get back the right ones
collection_url = ('/role_assignments?scope.project.id=%s' %
project1['id'])
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, up_entity)
self.assertRoleAssignmentInListResponse(r, gp_entity)
collection_url = ('/role_assignments?scope.domain.id=%s' %
self.domain['id'])
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, ud_entity)
self.assertRoleAssignmentInListResponse(r, gd_entity)
collection_url = '/role_assignments?user.id=%s' % user1['id']
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=3,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, up_entity)
self.assertRoleAssignmentInListResponse(r, ud_entity)
collection_url = '/role_assignments?group.id=%s' % group1['id']
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=3,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gd_entity)
self.assertRoleAssignmentInListResponse(r, gp_entity)
collection_url = '/role_assignments?role.id=%s' % self.role1['id']
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=3,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, gd_entity)
self.assertRoleAssignmentInListResponse(r, gp_entity)
self.assertRoleAssignmentInListResponse(r, gs_entity)
collection_url = '/role_assignments?role.id=%s' % self.role2['id']
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=4,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, ud_entity)
self.assertRoleAssignmentInListResponse(r, up_entity)
self.assertRoleAssignmentInListResponse(r, us_entity)
# Let's try combining two filers together....
collection_url = (
'/role_assignments?user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
'user_id': user1['id'],
'project_id': project1['id']})
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=1,
resource_url=collection_url)
self.assertRoleAssignmentInListResponse(r, up_entity)
# Now for a harder one - filter for user with effective
# roles - this should return role assignment that were directly
# assigned as well as by virtue of group membership
collection_url = ('/role_assignments?effective&user.id=%s' %
user1['id'])
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=4,
resource_url=collection_url)
# Should have the two direct roles...
self.assertRoleAssignmentInListResponse(r, up_entity)
self.assertRoleAssignmentInListResponse(r, ud_entity)
# ...and the two via group membership...
gp1_link = self.build_role_assignment_link(
project_id=project1['id'],
group_id=group1['id'],
role_id=self.role1['id'])
gd1_link = self.build_role_assignment_link(domain_id=self.domain_id,
group_id=group1['id'],
role_id=self.role1['id'])
up1_entity = self.build_role_assignment_entity(
link=gp1_link, project_id=project1['id'],
user_id=user1['id'], role_id=self.role1['id'])
ud1_entity = self.build_role_assignment_entity(
link=gd1_link, domain_id=self.domain_id, user_id=user1['id'],
role_id=self.role1['id'])
self.assertRoleAssignmentInListResponse(r, up1_entity)
self.assertRoleAssignmentInListResponse(r, ud1_entity)
# ...and for the grand-daddy of them all, simulate the request
# that would generate the list of effective roles in a project
# scoped token.
collection_url = (
'/role_assignments?effective&user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
'user_id': user1['id'],
'project_id': project1['id']})
r = self.get(collection_url, expected_status=http.client.OK)
self.head(collection_url, expected_status=http.client.OK)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
# Should have one direct role and one from group membership...
self.assertRoleAssignmentInListResponse(r, up_entity)
self.assertRoleAssignmentInListResponse(r, up1_entity)
def test_list_system_role_assignments(self):
# create a bunch of roles
user_system_role_id = self._create_new_role()
user_domain_role_id = self._create_new_role()
user_project_role_id = self._create_new_role()
group_system_role_id = self._create_new_role()
group_domain_role_id = self._create_new_role()
group_project_role_id = self._create_new_role()
# create a user and grant the user a role on the system, domain, and
# project
user = self._create_user()
url = '/system/users/%s/roles/%s' % (user['id'], user_system_role_id)
self.put(url)
url = '/domains/%s/users/%s/roles/%s' % (
self.domain_id, user['id'], user_domain_role_id
)
self.put(url)
url = '/projects/%s/users/%s/roles/%s' % (
self.project_id, user['id'], user_project_role_id
)
self.put(url)
# create a group and grant the group a role on the system, domain, and
# project
group = self._create_group()
url = '/system/groups/%s/roles/%s' % (
group['id'], group_system_role_id
)
self.put(url)
url = '/domains/%s/groups/%s/roles/%s' % (
self.domain_id, group['id'], group_domain_role_id
)
self.put(url)
url = '/projects/%s/groups/%s/roles/%s' % (
self.project_id, group['id'], group_project_role_id
)
self.put(url)
# /v3/role_assignments?scope.system=all should return two assignments
response = self.get('/role_assignments?scope.system=all')
self.assertValidRoleAssignmentListResponse(response, expected_length=2)
for assignment in response.json_body['role_assignments']:
self.assertTrue(assignment['scope']['system']['all'])
if assignment.get('user'):
self.assertEqual(user_system_role_id, assignment['role']['id'])
if assignment.get('group'):
self.assertEqual(
group_system_role_id,
assignment['role']['id']
)
# /v3/role_assignments?scope_system=all&user.id=$USER_ID should return
# one role assignment
url = '/role_assignments?scope.system=all&user.id=%s' % user['id']
response = self.get(url)
self.assertValidRoleAssignmentListResponse(response, expected_length=1)
self.assertEqual(
user_system_role_id,
response.json_body['role_assignments'][0]['role']['id']
)
# /v3/role_assignments?scope_system=all&group.id=$GROUP_ID should
# return one role assignment
url = '/role_assignments?scope.system=all&group.id=%s' % group['id']
response = self.get(url)
self.assertValidRoleAssignmentListResponse(response, expected_length=1)
self.assertEqual(
group_system_role_id,
response.json_body['role_assignments'][0]['role']['id']
)
# /v3/role_assignments?user.id=$USER_ID should return 3 assignments
# and system should be in that list of assignments
url = '/role_assignments?user.id=%s' % user['id']
response = self.get(url)
self.assertValidRoleAssignmentListResponse(response, expected_length=3)
for assignment in response.json_body['role_assignments']:
if 'system' in assignment['scope']:
self.assertEqual(
user_system_role_id, assignment['role']['id']
)
if 'domain' in assignment['scope']:
self.assertEqual(
user_domain_role_id, assignment['role']['id']
)
if 'project' in assignment['scope']:
self.assertEqual(
user_project_role_id, assignment['role']['id']
)
# /v3/role_assignments?group.id=$GROUP_ID should return 3 assignments
# and system should be in that list of assignments
url = '/role_assignments?group.id=%s' % group['id']
response = self.get(url)
self.assertValidRoleAssignmentListResponse(response, expected_length=3)
for assignment in response.json_body['role_assignments']:
if 'system' in assignment['scope']:
self.assertEqual(
group_system_role_id, assignment['role']['id']
)
if 'domain' in assignment['scope']:
self.assertEqual(
group_domain_role_id, assignment['role']['id']
)
if 'project' in assignment['scope']:
self.assertEqual(
group_project_role_id, assignment['role']['id']
)
class RoleAssignmentBaseTestCase(test_v3.RestfulTestCase,
test_v3.AssignmentTestMixin):
"""Base class for testing /v3/role_assignments API behavior."""
MAX_HIERARCHY_BREADTH = 3
MAX_HIERARCHY_DEPTH = CONF.max_project_tree_depth - 1
def load_sample_data(self):
"""Create sample data to be used on tests.
Created data are i) a role and ii) a domain containing: a project
hierarchy and 3 users within 3 groups.
"""
def create_project_hierarchy(parent_id, depth):
"""Create a random project hierarchy."""
if depth == 0:
return
breadth = random.randint(1, self.MAX_HIERARCHY_BREADTH)
subprojects = []
for i in range(breadth):
subprojects.append(unit.new_project_ref(
domain_id=self.domain_id, parent_id=parent_id))
PROVIDERS.resource_api.create_project(
subprojects[-1]['id'], subprojects[-1]
)
new_parent = subprojects[random.randint(0, breadth - 1)]
create_project_hierarchy(new_parent['id'], depth - 1)
super(RoleAssignmentBaseTestCase, self).load_sample_data()
# Create a domain
self.domain = unit.new_domain_ref()
self.domain_id = self.domain['id']
PROVIDERS.resource_api.create_domain(self.domain_id, self.domain)
# Create a project hierarchy
self.project = unit.new_project_ref(domain_id=self.domain_id)
self.project_id = self.project['id']
PROVIDERS.resource_api.create_project(self.project_id, self.project)
# Create a random project hierarchy
create_project_hierarchy(self.project_id,
random.randint(1, self.MAX_HIERARCHY_DEPTH))
# Create 3 users
self.user_ids = []
for i in range(3):
user = unit.new_user_ref(domain_id=self.domain_id)
user = PROVIDERS.identity_api.create_user(user)
self.user_ids.append(user['id'])
# Create 3 groups
self.group_ids = []
for i in range(3):
group = unit.new_group_ref(domain_id=self.domain_id)
group = PROVIDERS.identity_api.create_group(group)
self.group_ids.append(group['id'])
# Put 2 members on each group
PROVIDERS.identity_api.add_user_to_group(
user_id=self.user_ids[i], group_id=group['id']
)
PROVIDERS.identity_api.add_user_to_group(
user_id=self.user_ids[i % 2], group_id=group['id']
)
PROVIDERS.assignment_api.create_grant(
user_id=self.user_id, project_id=self.project_id,
role_id=self.role_id
)
# Create a role
self.role = unit.new_role_ref()
self.role_id = self.role['id']
PROVIDERS.role_api.create_role(self.role_id, self.role)
# Set default user and group to be used on tests
self.default_user_id = self.user_ids[0]
self.default_group_id = self.group_ids[0]
def get_role_assignments(self, expected_status=http.client.OK, **filters):
"""Return the result from querying role assignment API + queried URL.
Calls GET /v3/role_assignments?<params> and returns its result, where
<params> is the HTTP query parameters form of effective option plus
filters, if provided. Queried URL is returned as well.
:returns: a tuple containing the list role assignments API response and
queried URL.
"""
query_url = self._get_role_assignments_query_url(**filters)
response = self.get(query_url, expected_status=expected_status)
return (response, query_url)
def _get_role_assignments_query_url(self, **filters):
"""Return non-effective role assignments query URL from given filters.
:param filters: query parameters are created with the provided filters
on role assignments attributes. Valid filters are:
role_id, domain_id, project_id, group_id, user_id and
inherited_to_projects.
:returns: role assignments query URL.
"""
return self.build_role_assignment_query_url(**filters)
class RoleAssignmentFailureTestCase(RoleAssignmentBaseTestCase):
"""Class for testing invalid query params on /v3/role_assignments API.
Querying domain and project, or user and group results in a HTTP 400 Bad
Request, since a role assignment must contain only a single pair of (actor,
target). In addition, since filtering on role assignments applies only to
the final result, effective mode cannot be combined with i) group or ii)
domain and inherited, because it would always result in an empty list.
"""
class RoleAssignmentDirectTestCase(RoleAssignmentBaseTestCase):
"""Class for testing direct assignments on /v3/role_assignments API.
Direct assignments on a domain or project have effect on them directly,
instead of on their project hierarchy, i.e they are non-inherited. In
addition, group direct assignments are not expanded to group's users.
Tests on this class make assertions on the representation and API filtering
of direct assignments.
"""
def _test_get_role_assignments(self, **filters):
"""Generic filtering test method.
According to the provided filters, this method:
- creates a new role assignment;
- asserts that list role assignments API reponds correctly;
- deletes the created role assignment.
:param filters: filters to be considered when listing role assignments.
Valid filters are: role_id, domain_id, project_id,
group_id, user_id and inherited_to_projects.
"""
# Fills default assignment with provided filters
test_assignment = self._set_default_assignment_attributes(**filters)
# Create new role assignment for this test
PROVIDERS.assignment_api.create_grant(**test_assignment)
# Get expected role assignments
expected_assignments = self._list_expected_role_assignments(
**test_assignment)
# Get role assignments from API
response, query_url = self.get_role_assignments(**test_assignment)
self.assertValidRoleAssignmentListResponse(response,
resource_url=query_url)
self.assertEqual(len(expected_assignments),
len(response.result.get('role_assignments')))
# Assert that expected role assignments were returned by the API call
for assignment in expected_assignments:
self.assertRoleAssignmentInListResponse(response, assignment)
# Delete created role assignment
PROVIDERS.assignment_api.delete_grant(**test_assignment)
def _set_default_assignment_attributes(self, **attribs):
"""Insert default values for missing attributes of role assignment.
If no actor, target or role are provided, they will default to values
from sample data.
:param attribs: info from a role assignment entity. Valid attributes
are: role_id, domain_id, project_id, group_id, user_id
and inherited_to_projects.
"""
if not any(target in attribs
for target in ('domain_id', 'projects_id')):
attribs['project_id'] = self.project_id
if not any(actor in attribs for actor in ('user_id', 'group_id')):
attribs['user_id'] = self.default_user_id
if 'role_id' not in attribs:
attribs['role_id'] = self.role_id
return attribs
def _list_expected_role_assignments(self, **filters):
"""Given the filters, it returns expected direct role assignments.
:param filters: filters that will be considered when listing role
assignments. Valid filters are: role_id, domain_id,
project_id, group_id, user_id and
inherited_to_projects.
:returns: the list of the expected role assignments.
"""
return [self.build_role_assignment_entity(**filters)]
# Test cases below call the generic test method, providing different filter
# combinations. Filters are provided as specified in the method name, after
# 'by'. For example, test_get_role_assignments_by_project_user_and_role
# calls the generic test method with project_id, user_id and role_id.
class RoleAssignmentInheritedTestCase(RoleAssignmentDirectTestCase):
"""Class for testing inherited assignments on /v3/role_assignments API.
Inherited assignments on a domain or project have no effect on them
directly, but on the projects under them instead.
Tests on this class do not make assertions on the effect of inherited
assignments, but in their representation and API filtering.
"""
def _test_get_role_assignments(self, **filters):
"""Add inherited_to_project filter to expected entity in tests."""
super(RoleAssignmentInheritedTestCase,
self)._test_get_role_assignments(inherited_to_projects=True,
**filters)
class RoleAssignmentEffectiveTestCase(RoleAssignmentInheritedTestCase):
"""Class for testing inheritance effects on /v3/role_assignments API.
Inherited assignments on a domain or project have no effect on them
directly, but on the projects under them instead.
Tests on this class make assertions on the effect of inherited assignments
and API filtering.
"""
def _get_role_assignments_query_url(self, **filters):
"""Return effective role assignments query URL from given filters.
For test methods in this class, effetive will always be true. As in
effective mode, inherited_to_projects, group_id, domain_id and
project_id will always be desconsidered from provided filters.
:param filters: query parameters are created with the provided filters.
Valid filters are: role_id, domain_id, project_id,
group_id, user_id and inherited_to_projects.
:returns: role assignments query URL.
"""
query_filters = filters.copy()
query_filters.pop('inherited_to_projects')
query_filters.pop('group_id', None)
query_filters.pop('domain_id', None)
query_filters.pop('project_id', None)
return self.build_role_assignment_query_url(effective=True,
**query_filters)
def _list_expected_role_assignments(self, **filters):
"""Given the filters, it returns expected direct role assignments.
:param filters: filters that will be considered when listing role
assignments. Valid filters are: role_id, domain_id,
project_id, group_id, user_id and
inherited_to_projects.
:returns: the list of the expected role assignments.
"""
# Get assignment link, to be put on 'links': {'assignment': link}
assignment_link = self.build_role_assignment_link(**filters)
# Expand group membership
user_ids = [None]
if filters.get('group_id'):
user_ids = [user['id'] for user in
PROVIDERS.identity_api.list_users_in_group(
filters['group_id'])]
else:
user_ids = [self.default_user_id]
# Expand role inheritance
project_ids = [None]
if filters.get('domain_id'):
project_ids = [project['id'] for project in
PROVIDERS.resource_api.list_projects_in_domain(
filters.pop('domain_id'))]
else:
project_ids = [project['id'] for project in
PROVIDERS.resource_api.list_projects_in_subtree(
self.project_id)]
# Compute expected role assignments
assignments = []
for project_id in project_ids:
filters['project_id'] = project_id
for user_id in user_ids:
filters['user_id'] = user_id
assignments.append(self.build_role_assignment_entity(
link=assignment_link, **filters))
return assignments
class AssignmentInheritanceTestCase(test_v3.RestfulTestCase,
test_v3.AssignmentTestMixin):
"""Test inheritance crud and its effects."""
def test_list_role_assignments_for_inherited_domain_grants(self):
"""Call ``GET /role_assignments with inherited domain grants``.
Test Plan:
- Create 4 roles
- Create a domain with a user and two projects
- Assign two direct roles to project1
- Assign a spoiler role to project2
- Issue the URL to add inherited role to the domain
- Issue the URL to check it is indeed on the domain
- Issue the URL to check effective roles on project1 - this
should return 3 roles.
"""
role_list = []
for _ in range(4):
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
role_list.append(role)
domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(domain['id'], domain)
user1 = unit.create_user(
PROVIDERS.identity_api, domain_id=domain['id']
)
project1 = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project1['id'], project1)
project2 = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project2['id'], project2)
# Add some roles to the project
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[0]['id'])
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[1]['id'])
# ..and one on a different project as a spoiler
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project2['id'], role_list[2]['id'])
# Now create our inherited role on the domain
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': domain['id'],
'user_id': user1['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[3]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[3],
resource_url=collection_url)
# Now use the list domain role assignments api to check if this
# is included
collection_url = (
'/role_assignments?user.id=%(user_id)s'
'&scope.domain.id=%(domain_id)s' % {
'user_id': user1['id'],
'domain_id': domain['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=1,
resource_url=collection_url)
ud_entity = self.build_role_assignment_entity(
domain_id=domain['id'], user_id=user1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, ud_entity)
# Now ask for effective list role assignments - the role should
# turn into a project role, along with the two direct roles that are
# on the project
collection_url = (
'/role_assignments?effective&user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
'user_id': user1['id'],
'project_id': project1['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=3,
resource_url=collection_url)
# An effective role for an inherited role will be a project
# entity, with a domain link to the inherited assignment
ud_url = self.build_role_assignment_link(
domain_id=domain['id'], user_id=user1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
up_entity = self.build_role_assignment_entity(
link=ud_url, project_id=project1['id'],
user_id=user1['id'], role_id=role_list[3]['id'],
inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, up_entity)
def _test_list_role_assignments_include_names(self, role1):
"""Call ``GET /role_assignments with include names``.
Test Plan:
- Create a domain with a group and a user
- Create a project with a group and a user
"""
role1 = unit.new_role_ref()
PROVIDERS.role_api.create_role(role1['id'], role1)
user1 = unit.create_user(
PROVIDERS.identity_api, domain_id=self.domain_id
)
group = unit.new_group_ref(domain_id=self.domain_id)
group = PROVIDERS.identity_api.create_group(group)
project1 = unit.new_project_ref(domain_id=self.domain_id)
PROVIDERS.resource_api.create_project(project1['id'], project1)
expected_entity1 = self.build_role_assignment_entity_include_names(
role_ref=role1,
project_ref=project1,
user_ref=user1)
self.put(expected_entity1['links']['assignment'])
expected_entity2 = self.build_role_assignment_entity_include_names(
role_ref=role1,
domain_ref=self.domain,
group_ref=group)
self.put(expected_entity2['links']['assignment'])
expected_entity3 = self.build_role_assignment_entity_include_names(
role_ref=role1,
domain_ref=self.domain,
user_ref=user1)
self.put(expected_entity3['links']['assignment'])
expected_entity4 = self.build_role_assignment_entity_include_names(
role_ref=role1,
project_ref=project1,
group_ref=group)
self.put(expected_entity4['links']['assignment'])
collection_url_domain = (
'/role_assignments?include_names&scope.domain.id=%(domain_id)s' % {
'domain_id': self.domain_id})
rs_domain = self.get(collection_url_domain)
collection_url_project = (
'/role_assignments?include_names&'
'scope.project.id=%(project_id)s' % {
'project_id': project1['id']})
rs_project = self.get(collection_url_project)
collection_url_group = (
'/role_assignments?include_names&group.id=%(group_id)s' % {
'group_id': group['id']})
rs_group = self.get(collection_url_group)
collection_url_user = (
'/role_assignments?include_names&user.id=%(user_id)s' % {
'user_id': user1['id']})
rs_user = self.get(collection_url_user)
collection_url_role = (
'/role_assignments?include_names&role.id=%(role_id)s' % {
'role_id': role1['id']})
rs_role = self.get(collection_url_role)
# Make sure all entities were created successfully
self.assertEqual(http.client.OK, rs_domain.status_int)
self.assertEqual(http.client.OK, rs_project.status_int)
self.assertEqual(http.client.OK, rs_group.status_int)
self.assertEqual(http.client.OK, rs_user.status_int)
# Make sure we can get back the correct number of entities
self.assertValidRoleAssignmentListResponse(
rs_domain,
expected_length=2,
resource_url=collection_url_domain)
self.assertValidRoleAssignmentListResponse(
rs_project,
expected_length=2,
resource_url=collection_url_project)
self.assertValidRoleAssignmentListResponse(
rs_group,
expected_length=2,
resource_url=collection_url_group)
self.assertValidRoleAssignmentListResponse(
rs_user,
expected_length=2,
resource_url=collection_url_user)
self.assertValidRoleAssignmentListResponse(
rs_role,
expected_length=4,
resource_url=collection_url_role)
# Verify all types of entities have the correct format
self.assertRoleAssignmentInListResponse(rs_domain, expected_entity2)
self.assertRoleAssignmentInListResponse(rs_project, expected_entity1)
self.assertRoleAssignmentInListResponse(rs_group, expected_entity4)
self.assertRoleAssignmentInListResponse(rs_user, expected_entity3)
self.assertRoleAssignmentInListResponse(rs_role, expected_entity1)
def test_remove_assignment_for_project_acting_as_domain(self):
"""Test goal: remove assignment for project acting as domain.
Ensure when we have two role assignments for the project
acting as domain, one dealing with it as a domain and other as a
project, we still able to remove those assignments later.
Test plan:
- Create a role and a domain with a user;
- Grant a role for this user in this domain;
- Grant a role for this user in the same entity as a project;
- Ensure that both assignments were created and it was valid;
- Remove the domain assignment for the user and show that the project
assignment for him still valid
"""
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(domain['id'], domain)
user = unit.create_user(PROVIDERS.identity_api, domain_id=domain['id'])
assignment_domain = self.build_role_assignment_entity(
role_id=role['id'], domain_id=domain['id'], user_id=user['id'],
inherited_to_projects=False)
assignment_project = self.build_role_assignment_entity(
role_id=role['id'], project_id=domain['id'], user_id=user['id'],
inherited_to_projects=False)
self.put(assignment_domain['links']['assignment'])
self.put(assignment_project['links']['assignment'])
collection_url = '/role_assignments?user.id=%(user_id)s' % (
{'user_id': user['id']})
result = self.get(collection_url)
# We have two role assignments based in both roles for the domain and
# project scope
self.assertValidRoleAssignmentListResponse(
result, expected_length=2, resource_url=collection_url)
self.assertRoleAssignmentInListResponse(result, assignment_domain)
domain_url = '/domains/%s/users/%s/roles/%s' % (
domain['id'], user['id'], role['id'])
self.delete(domain_url)
collection_url = '/role_assignments?user.id=%(user_id)s' % (
{'user_id': user['id']})
result = self.get(collection_url)
# Now we only have one assignment for the project scope since the
# domain scope was removed.
self.assertValidRoleAssignmentListResponse(
result, expected_length=1, resource_url=collection_url)
self.assertRoleAssignmentInListResponse(result, assignment_project)
def test_list_inherited_role_assignments_include_names(self):
"""Call ``GET /role_assignments?include_names``.
Test goal: ensure calling list role assignments including names
honors the inherited role assignments flag.
Test plan:
- Create a role and a domain with a user;
- Create a inherited role assignment;
- List role assignments for that user;
- List role assignments for that user including names.
"""
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(domain['id'], domain)
user = unit.create_user(PROVIDERS.identity_api, domain_id=domain['id'])
# Create and store expected assignment refs
assignment = self.build_role_assignment_entity(
role_id=role['id'], domain_id=domain['id'], user_id=user['id'],
inherited_to_projects=True)
assignment_names = self.build_role_assignment_entity_include_names(
role_ref=role, domain_ref=domain, user_ref=user,
inherited_assignment=True)
# Ensure expected assignment refs are inherited and have the same URL
self.assertEqual('projects',
assignment['scope']['OS-INHERIT:inherited_to'])
self.assertEqual('projects',
assignment_names['scope']['OS-INHERIT:inherited_to'])
self.assertEqual(assignment['links']['assignment'],
assignment_names['links']['assignment'])
self.put(assignment['links']['assignment'])
collection_url = '/role_assignments?user.id=%(user_id)s' % (
{'user_id': user['id']})
result = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
result, expected_length=1, resource_url=collection_url)
self.assertRoleAssignmentInListResponse(result, assignment)
collection_url = ('/role_assignments?include_names&'
'user.id=%(user_id)s' % {'user_id': user['id']})
result = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
result, expected_length=1, resource_url=collection_url)
self.assertRoleAssignmentInListResponse(result, assignment_names)
def test_list_role_assignments_for_disabled_inheritance_extension(self):
"""Call ``GET /role_assignments with inherited domain grants``.
Test Plan:
- Issue the URL to add inherited role to the domain
- Issue the URL to check effective roles on project include the
inherited role
- Disable the extension
- Re-check the effective roles, proving the inherited role no longer
shows up.
"""
role_list = []
for _ in range(4):
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
role_list.append(role)
domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(domain['id'], domain)
user1 = unit.create_user(
PROVIDERS.identity_api, domain_id=domain['id']
)
project1 = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project1['id'], project1)
project2 = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project2['id'], project2)
# Add some roles to the project
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[0]['id'])
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[1]['id'])
# ..and one on a different project as a spoiler
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project2['id'], role_list[2]['id'])
# Now create our inherited role on the domain
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': domain['id'],
'user_id': user1['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[3]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[3],
resource_url=collection_url)
# Get effective list role assignments - the role should
# turn into a project role, along with the two direct roles that are
# on the project
collection_url = (
'/role_assignments?effective&user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
'user_id': user1['id'],
'project_id': project1['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=3,
resource_url=collection_url)
ud_url = self.build_role_assignment_link(
domain_id=domain['id'], user_id=user1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
up_entity = self.build_role_assignment_entity(
link=ud_url, project_id=project1['id'],
user_id=user1['id'], role_id=role_list[3]['id'],
inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, up_entity)
def test_list_role_assignments_for_inherited_group_domain_grants(self):
"""Call ``GET /role_assignments with inherited group domain grants``.
Test Plan:
- Create 4 roles
- Create a domain with a user and two projects
- Assign two direct roles to project1
- Assign a spoiler role to project2
- Issue the URL to add inherited role to the domain
- Issue the URL to check it is indeed on the domain
- Issue the URL to check effective roles on project1 - this
should return 3 roles.
"""
role_list = []
for _ in range(4):
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
role_list.append(role)
domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(domain['id'], domain)
user1 = unit.create_user(
PROVIDERS.identity_api, domain_id=domain['id']
)
user2 = unit.create_user(
PROVIDERS.identity_api, domain_id=domain['id']
)
group1 = unit.new_group_ref(domain_id=domain['id'])
group1 = PROVIDERS.identity_api.create_group(group1)
PROVIDERS.identity_api.add_user_to_group(
user1['id'], group1['id']
)
PROVIDERS.identity_api.add_user_to_group(
user2['id'], group1['id']
)
project1 = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project1['id'], project1)
project2 = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project2['id'], project2)
# Add some roles to the project
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[0]['id'])
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[1]['id'])
# ..and one on a different project as a spoiler
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project2['id'], role_list[2]['id'])
# Now create our inherited role on the domain
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
'domain_id': domain['id'],
'group_id': group1['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[3]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[3],
resource_url=collection_url)
# Now use the list domain role assignments api to check if this
# is included
collection_url = (
'/role_assignments?group.id=%(group_id)s'
'&scope.domain.id=%(domain_id)s' % {
'group_id': group1['id'],
'domain_id': domain['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=1,
resource_url=collection_url)
gd_entity = self.build_role_assignment_entity(
domain_id=domain['id'], group_id=group1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, gd_entity)
# Now ask for effective list role assignments - the role should
# turn into a user project role, along with the two direct roles
# that are on the project
collection_url = (
'/role_assignments?effective&user.id=%(user_id)s'
'&scope.project.id=%(project_id)s' % {
'user_id': user1['id'],
'project_id': project1['id']})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=3,
resource_url=collection_url)
# An effective role for an inherited role will be a project
# entity, with a domain link to the inherited assignment
up_entity = self.build_role_assignment_entity(
link=gd_entity['links']['assignment'], project_id=project1['id'],
user_id=user1['id'], role_id=role_list[3]['id'],
inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, up_entity)
def test_filtered_role_assignments_for_inherited_grants(self):
"""Call ``GET /role_assignments?scope.OS-INHERIT:inherited_to``.
Test Plan:
- Create 5 roles
- Create a domain with a user, group and two projects
- Assign three direct spoiler roles to projects
- Issue the URL to add an inherited user role to the domain
- Issue the URL to add an inherited group role to the domain
- Issue the URL to filter by inherited roles - this should
return just the 2 inherited roles.
"""
role_list = []
for _ in range(5):
role = unit.new_role_ref()
PROVIDERS.role_api.create_role(role['id'], role)
role_list.append(role)
domain = unit.new_domain_ref()
PROVIDERS.resource_api.create_domain(domain['id'], domain)
user1 = unit.create_user(
PROVIDERS.identity_api, domain_id=domain['id']
)
group1 = unit.new_group_ref(domain_id=domain['id'])
group1 = PROVIDERS.identity_api.create_group(group1)
project1 = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project1['id'], project1)
project2 = unit.new_project_ref(domain_id=domain['id'])
PROVIDERS.resource_api.create_project(project2['id'], project2)
# Add some spoiler roles to the projects
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project1['id'], role_list[0]['id'])
PROVIDERS.assignment_api.add_role_to_user_and_project(
user1['id'], project2['id'], role_list[1]['id'])
# Create a non-inherited role as a spoiler
PROVIDERS.assignment_api.create_grant(
role_list[2]['id'], user_id=user1['id'], domain_id=domain['id'])
# Now create two inherited roles on the domain, one for a user
# and one for a domain
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/users/%(user_id)s/roles' % {
'domain_id': domain['id'],
'user_id': user1['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[3]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[3],
resource_url=collection_url)
base_collection_url = (
'/OS-INHERIT/domains/%(domain_id)s/groups/%(group_id)s/roles' % {
'domain_id': domain['id'],
'group_id': group1['id']})
member_url = '%(collection_url)s/%(role_id)s/inherited_to_projects' % {
'collection_url': base_collection_url,
'role_id': role_list[4]['id']}
collection_url = base_collection_url + '/inherited_to_projects'
self.put(member_url)
self.head(member_url)
self.get(member_url, expected_status=http.client.NO_CONTENT)
r = self.get(collection_url)
self.assertValidRoleListResponse(r, ref=role_list[4],
resource_url=collection_url)
# Now use the list role assignments api to get a list of inherited
# roles on the domain - should get back the two roles
collection_url = (
'/role_assignments?scope.OS-INHERIT:inherited_to=projects')
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
expected_length=2,
resource_url=collection_url)
ud_entity = self.build_role_assignment_entity(
domain_id=domain['id'], user_id=user1['id'],
role_id=role_list[3]['id'], inherited_to_projects=True)
gd_entity = self.build_role_assignment_entity(
domain_id=domain['id'], group_id=group1['id'],
role_id=role_list[4]['id'], inherited_to_projects=True)
self.assertRoleAssignmentInListResponse(r, ud_entity)
self.assertRoleAssignmentInListResponse(r, gd_entity)
def _setup_hierarchical_projects_scenario(self):
"""Create basic hierarchical projects scenario.
This basic scenario contains a root with one leaf project and
two roles with the following names: non-inherited and inherited.
"""
# Create project hierarchy
root = unit.new_project_ref(domain_id=self.domain['id'])
leaf = unit.new_project_ref(domain_id=self.domain['id'],
parent_id=root['id'])
PROVIDERS.resource_api.create_project(root['id'], root)
PROVIDERS.resource_api.create_project(leaf['id'], leaf)
# Create 'non-inherited' and 'inherited' roles
non_inherited_role = unit.new_role_ref(name='non-inherited')
PROVIDERS.role_api.create_role(
non_inherited_role['id'], non_inherited_role
)
inherited_role = unit.new_role_ref(name='inherited')
PROVIDERS.role_api.create_role(inherited_role['id'], inherited_role)
return (root['id'], leaf['id'],
non_inherited_role['id'], inherited_role['id'])
def test_get_role_assignments_for_project_hierarchy(self):
"""Call ``GET /role_assignments``.
Test Plan:
- Create 2 roles
- Create a hierarchy of projects with one root and one leaf project
- Issue the URL to add a non-inherited user role to the root project
- Issue the URL to add an inherited user role to the root project
- Issue the URL to get all role assignments - this should return just
2 roles (non-inherited and inherited) in the root project.
"""
# Create default scenario
root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
self._setup_hierarchical_projects_scenario())
# Grant non-inherited role
non_inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_up_entity['links']['assignment'])
# Grant inherited role
inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_up_entity['links']['assignment'])
# Get role assignments
collection_url = '/role_assignments'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
resource_url=collection_url)
# Assert that the user has non-inherited role on root project
self.assertRoleAssignmentInListResponse(r, non_inher_up_entity)
# Assert that the user has inherited role on root project
self.assertRoleAssignmentInListResponse(r, inher_up_entity)
# Assert that the user does not have non-inherited role on leaf project
non_inher_up_entity = self.build_role_assignment_entity(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
# Assert that the user does not have inherited role on leaf project
inher_up_entity['scope']['project']['id'] = leaf_id
self.assertRoleAssignmentNotInListResponse(r, inher_up_entity)
def test_get_effective_role_assignments_for_project_hierarchy(self):
"""Call ``GET /role_assignments?effective``.
Test Plan:
- Create 2 roles
- Create a hierarchy of projects with one root and one leaf project
- Issue the URL to add a non-inherited user role to the root project
- Issue the URL to add an inherited user role to the root project
- Issue the URL to get effective role assignments - this should return
1 role (non-inherited) on the root project and 1 role (inherited) on
the leaf project.
"""
# Create default scenario
root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
self._setup_hierarchical_projects_scenario())
# Grant non-inherited role
non_inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_up_entity['links']['assignment'])
# Grant inherited role
inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_up_entity['links']['assignment'])
# Get effective role assignments
collection_url = '/role_assignments?effective'
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
resource_url=collection_url)
# Assert that the user has non-inherited role on root project
self.assertRoleAssignmentInListResponse(r, non_inher_up_entity)
# Assert that the user does not have inherited role on root project
self.assertRoleAssignmentNotInListResponse(r, inher_up_entity)
# Assert that the user does not have non-inherited role on leaf project
non_inher_up_entity = self.build_role_assignment_entity(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
# Assert that the user has inherited role on leaf project
inher_up_entity['scope']['project']['id'] = leaf_id
self.assertRoleAssignmentInListResponse(r, inher_up_entity)
def test_project_id_specified_if_include_subtree_specified(self):
"""When using include_subtree, you must specify a project ID."""
r = self.get('/role_assignments?include_subtree=True',
expected_status=http.client.BAD_REQUEST)
error_msg = ("scope.project.id must be specified if include_subtree "
"is also specified")
self.assertEqual(error_msg, r.result['error']['message'])
r = self.get('/role_assignments?scope.project.id&'
'include_subtree=True',
expected_status=http.client.BAD_REQUEST)
self.assertEqual(error_msg, r.result['error']['message'])
def test_get_role_assignments_for_project_tree(self):
"""Get role_assignment?scope.project.id=X&include_subtree``.
Test Plan:
- Create 2 roles and a hierarchy of projects with one root and one leaf
- Issue the URL to add a non-inherited user role to the root project
and the leaf project
- Issue the URL to get role assignments for the root project but
not the subtree - this should return just the root assignment
- Issue the URL to get role assignments for the root project and
it's subtree - this should return both assignments
- Check that explicitly setting include_subtree to False is the
equivalent to not including it at all in the query.
"""
# Create default scenario
root_id, leaf_id, non_inherited_role_id, unused_role_id = (
self._setup_hierarchical_projects_scenario())
# Grant non-inherited role to root and leaf projects
non_inher_entity_root = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_entity_root['links']['assignment'])
non_inher_entity_leaf = self.build_role_assignment_entity(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_entity_leaf['links']['assignment'])
# Without the subtree, we should get the one assignment on the
# root project
collection_url = (
'/role_assignments?scope.project.id=%(project)s' % {
'project': root_id})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r, resource_url=collection_url)
self.assertThat(r.result['role_assignments'], matchers.HasLength(1))
self.assertRoleAssignmentInListResponse(r, non_inher_entity_root)
# With the subtree, we should get both assignments
collection_url = (
'/role_assignments?scope.project.id=%(project)s'
'&include_subtree=True' % {
'project': root_id})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r, resource_url=collection_url)
self.assertThat(r.result['role_assignments'], matchers.HasLength(2))
self.assertRoleAssignmentInListResponse(r, non_inher_entity_root)
self.assertRoleAssignmentInListResponse(r, non_inher_entity_leaf)
# With subtree=0, we should also only get the one assignment on the
# root project
collection_url = (
'/role_assignments?scope.project.id=%(project)s'
'&include_subtree=0' % {
'project': root_id})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r, resource_url=collection_url)
self.assertThat(r.result['role_assignments'], matchers.HasLength(1))
self.assertRoleAssignmentInListResponse(r, non_inher_entity_root)
def test_get_effective_role_assignments_for_project_tree(self):
"""Get role_assignment ?project_id=X&include_subtree=True&effective``.
Test Plan:
- Create 2 roles and a hierarchy of projects with one root and 4 levels
of child project
- Issue the URL to add a non-inherited user role to the root project
and a level 1 project
- Issue the URL to add an inherited user role on the level 2 project
- Issue the URL to get effective role assignments for the level 1
project and it's subtree - this should return a role (non-inherited)
on the level 1 project and roles (inherited) on each of the level
2, 3 and 4 projects
"""
# Create default scenario
root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
self._setup_hierarchical_projects_scenario())
# Add some extra projects to the project hierarchy
level2 = unit.new_project_ref(domain_id=self.domain['id'],
parent_id=leaf_id)
level3 = unit.new_project_ref(domain_id=self.domain['id'],
parent_id=level2['id'])
level4 = unit.new_project_ref(domain_id=self.domain['id'],
parent_id=level3['id'])
PROVIDERS.resource_api.create_project(level2['id'], level2)
PROVIDERS.resource_api.create_project(level3['id'], level3)
PROVIDERS.resource_api.create_project(level4['id'], level4)
# Grant non-inherited role to root (as a spoiler) and to
# the level 1 (leaf) project
non_inher_entity_root = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_entity_root['links']['assignment'])
non_inher_entity_leaf = self.build_role_assignment_entity(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_entity_leaf['links']['assignment'])
# Grant inherited role to level 2
inher_entity = self.build_role_assignment_entity(
project_id=level2['id'], user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_entity['links']['assignment'])
# Get effective role assignments
collection_url = (
'/role_assignments?scope.project.id=%(project)s'
'&include_subtree=True&effective' % {
'project': leaf_id})
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(
r, resource_url=collection_url)
# There should be three assignments returned in total
self.assertThat(r.result['role_assignments'], matchers.HasLength(3))
# Assert that the user does not non-inherited role on root project
self.assertRoleAssignmentNotInListResponse(r, non_inher_entity_root)
# Assert that the user does have non-inherited role on leaf project
self.assertRoleAssignmentInListResponse(r, non_inher_entity_leaf)
# Assert that the user has inherited role on levels 3 and 4
inher_entity['scope']['project']['id'] = level3['id']
self.assertRoleAssignmentInListResponse(r, inher_entity)
inher_entity['scope']['project']['id'] = level4['id']
self.assertRoleAssignmentInListResponse(r, inher_entity)
def test_get_inherited_role_assignments_for_project_hierarchy(self):
"""Call ``GET /role_assignments?scope.OS-INHERIT:inherited_to``.
Test Plan:
- Create 2 roles
- Create a hierarchy of projects with one root and one leaf project
- Issue the URL to add a non-inherited user role to the root project
- Issue the URL to add an inherited user role to the root project
- Issue the URL to filter inherited to projects role assignments - this
should return 1 role (inherited) on the root project.
"""
# Create default scenario
root_id, leaf_id, non_inherited_role_id, inherited_role_id = (
self._setup_hierarchical_projects_scenario())
# Grant non-inherited role
non_inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.put(non_inher_up_entity['links']['assignment'])
# Grant inherited role
inher_up_entity = self.build_role_assignment_entity(
project_id=root_id, user_id=self.user['id'],
role_id=inherited_role_id, inherited_to_projects=True)
self.put(inher_up_entity['links']['assignment'])
# Get inherited role assignments
collection_url = ('/role_assignments'
'?scope.OS-INHERIT:inherited_to=projects')
r = self.get(collection_url)
self.assertValidRoleAssignmentListResponse(r,
resource_url=collection_url)
# Assert that the user does not have non-inherited role on root project
self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
# Assert that the user has inherited role on root project
self.assertRoleAssignmentInListResponse(r, inher_up_entity)
# Assert that the user does not have non-inherited role on leaf project
non_inher_up_entity = self.build_role_assignment_entity(
project_id=leaf_id, user_id=self.user['id'],
role_id=non_inherited_role_id)
self.assertRoleAssignmentNotInListResponse(r, non_inher_up_entity)
# Assert that the user does not have inherited role on leaf project
inher_up_entity['scope']['project']['id'] = leaf_id
self.assertRoleAssignmentNotInListResponse(r, inher_up_entity)
class ImpliedRolesTests(test_v3.RestfulTestCase, test_v3.AssignmentTestMixin,
unit.TestCase):
def _create_role(self):
"""Call ``POST /roles``."""
ref = unit.new_role_ref()
r = self.post('/roles', body={'role': ref})
return self.assertValidRoleResponse(r, ref)
def test_list_role_assignments_with_implied_roles(self):
"""Call ``GET /role_assignments`` with implied role grant.
Test Plan:
- Create a domain with a user and a project
- Create 3 roles
- Role 0 implies role 1 and role 1 implies role 2
- Assign the top role to the project
- Issue the URL to check effective roles on project - this
should return all 3 roles.
- Check the links of the 3 roles indicate the prior role where
appropriate
"""
(domain, user, project) = self._create_test_domain_user_project()
self._create_three_roles()
self._create_implied_role(self.role_list[0], self.role_list[1])
self._create_implied_role(self.role_list[1], self.role_list[2])
self._assign_top_role_to_user_on_project(user, project)
response = self.get(self._build_effective_role_assignments_url(user))
r = response
self._assert_all_roles_in_assignment(r, user)
self._assert_initial_assignment_in_effective(response, user, project)
self._assert_effective_role_for_implied_has_prior_in_links(
response, user, project, 0, 1)
self._assert_effective_role_for_implied_has_prior_in_links(
response, user, project, 1, 2)
def test_root_role_as_implied_role_forbidden(self):
"""Test root role is forbidden to be set as an implied role.
Create 2 roles that are prohibited from being an implied role.
Create 1 additional role which should be accepted as an implied
role. Assure the prohibited role names cannot be set as an implied
role. Assure the accepted role name which is not a member of the
prohibited implied role list can be successfully set an implied
role.
"""
prohibited_name1 = 'root1'
prohibited_name2 = 'root2'
accepted_name1 = 'implied1'
prohibited_names = [prohibited_name1, prohibited_name2]
self.config_fixture.config(group='assignment',
prohibited_implied_role=prohibited_names)
prior_role = self._create_role()
prohibited_role1 = self._create_named_role(prohibited_name1)
url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format(
prior_role_id=prior_role['id'],
implied_role_id=prohibited_role1['id'])
self.put(url, expected_status=http.client.FORBIDDEN)
prohibited_role2 = self._create_named_role(prohibited_name2)
url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format(
prior_role_id=prior_role['id'],
implied_role_id=prohibited_role2['id'])
self.put(url, expected_status=http.client.FORBIDDEN)
accepted_role1 = self._create_named_role(accepted_name1)
url = '/roles/{prior_role_id}/implies/{implied_role_id}'.format(
prior_role_id=prior_role['id'],
implied_role_id=accepted_role1['id'])
self.put(url, expected_status=http.client.CREATED)
class DomainSpecificRoleTests(test_v3.RestfulTestCase, unit.TestCase):
class ListUserProjectsTestCase(test_v3.RestfulTestCase):
"""Test for /users/<user>/projects."""
# FIXME(lbragstad): These tests contain system-level API calls, which means
# they will log a warning message if they are called with a project-scoped
# token, regardless of the role assignment on the project. We need to fix
# them by using a proper system-scoped admin token to make the call instead
# of a project scoped token.
# FIXME(lbragstad): These tests contain system-level API calls, which means
# they will log a warning message if they are called with a project-scoped
# token, regardless of the role assignment on the project. We need to fix
# them by using a proper system-scoped admin token to make the call instead
# of a project scoped token.
| 43.434965 | 79 | 0.628166 |
d9e48585d735333916bf5e8b10a68c72e4541093 | 248,866 | py | Python | pysnmp-with-texts/XXX-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/XXX-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/XXX-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module XXX-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/XXX-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:44:42 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsIntersection")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Gauge32, ModuleIdentity, iso, Integer32, enterprises, ObjectIdentity, Unsigned32, Counter64, IpAddress, Bits, Counter32, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "ModuleIdentity", "iso", "Integer32", "enterprises", "ObjectIdentity", "Unsigned32", "Counter64", "IpAddress", "Bits", "Counter32", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "NotificationType")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
company = ModuleIdentity((1, 3, 6, 1, 4, 1, 6688))
company.setRevisions(('2009-03-05 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: company.setRevisionsDescriptions(('1G MC supported',))
if mibBuilder.loadTexts: company.setLastUpdated('200903050000Z')
if mibBuilder.loadTexts: company.setOrganization('FiberRoad')
if mibBuilder.loadTexts: company.setContactInfo('www.fiberroad.com.cn')
if mibBuilder.loadTexts: company.setDescription('Media Converter NMS SNMP mib file')
ipProduct = ObjectIdentity((1, 3, 6, 1, 4, 1, 6688, 1))
if mibBuilder.loadTexts: ipProduct.setStatus('current')
if mibBuilder.loadTexts: ipProduct.setDescription('IP product line')
height2HU = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1))
systemMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1))
alarmMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2))
shelfNum = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4))).setMaxAccess("readonly")
if mibBuilder.loadTexts: shelfNum.setStatus('current')
if mibBuilder.loadTexts: shelfNum.setDescription('The number of shelf in current system')
shelfTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2), )
if mibBuilder.loadTexts: shelfTable.setStatus('current')
if mibBuilder.loadTexts: shelfTable.setDescription('Shelf table')
shelfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1), ).setIndexNames((0, "XXX-MIB", "shelfName"))
if mibBuilder.loadTexts: shelfEntry.setStatus('current')
if mibBuilder.loadTexts: shelfEntry.setDescription('Shelf entry definition')
shelfName = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("master", 1), ("slave1", 2), ("slave2", 3), ("slave3", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: shelfName.setStatus('current')
if mibBuilder.loadTexts: shelfName.setDescription('Shelf name')
psuA = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("nc", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: psuA.setStatus('current')
if mibBuilder.loadTexts: psuA.setDescription('The status fan A of current shelf')
psuB = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("nc", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: psuB.setStatus('current')
if mibBuilder.loadTexts: psuB.setDescription('The status psu B of current shelf')
volA = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("normal", 1), ("abnormal", 2), ("nc", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: volA.setStatus('current')
if mibBuilder.loadTexts: volA.setDescription('The voltage status of psuA of current shelf')
volB = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("normal", 1), ("abnormal", 2), ("nc", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: volB.setStatus('current')
if mibBuilder.loadTexts: volB.setDescription('The voltage status of psuB of current shelf')
fan = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("nc", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: fan.setStatus('current')
if mibBuilder.loadTexts: fan.setDescription('The status fan A of current shelf')
temperature = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 7), Integer32()).setUnits(' oC').setMaxAccess("readonly")
if mibBuilder.loadTexts: temperature.setStatus('current')
if mibBuilder.loadTexts: temperature.setDescription('The temperature status of current shelf')
coCardNum = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: coCardNum.setStatus('current')
if mibBuilder.loadTexts: coCardNum.setDescription('The number of center card inserting of current shelf')
rmtCardNum = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rmtCardNum.setStatus('current')
if mibBuilder.loadTexts: rmtCardNum.setDescription('The number of remote card inserting of current shelf')
slotObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3))
slotTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1), )
if mibBuilder.loadTexts: slotTable.setStatus('current')
if mibBuilder.loadTexts: slotTable.setDescription('Sparse table containing one entry for each slot in exist chassis in the system, indexed by shelfIdx and slotIdx.')
slotEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1), ).setIndexNames((0, "XXX-MIB", "shelfIdx"), (0, "XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: slotEntry.setStatus('current')
if mibBuilder.loadTexts: slotEntry.setDescription("in this table ,user can find the converter module's type inserted in the system's slot.then you can get the detail information about the specified type in the cardObjects table")
shelfIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("master", 1), ("slave1", 2), ("slave2", 3), ("slave3", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: shelfIdx.setStatus('current')
if mibBuilder.loadTexts: shelfIdx.setDescription('Chassis index - 1 = master management module, 2-4 = slave management module')
slotIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17))).clone(namedValues=NamedValues(("slot01", 1), ("slot02", 2), ("slot03", 3), ("slot04", 4), ("slot05", 5), ("slot06", 6), ("slot07", 7), ("slot08", 8), ("slot09", 9), ("slot10", 10), ("slot11", 11), ("slot12", 12), ("slot13", 13), ("slot14", 14), ("slot15", 15), ("slot16", 16), ("slot17", 17)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: slotIdx.setStatus('current')
if mibBuilder.loadTexts: slotIdx.setDescription("chassis's slot,whitch is a index in this table")
coCardType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 100, 101, 102))).clone(namedValues=NamedValues(("no-card", 0), ("ip113s", 1), ("ip113f", 2), ("mc-1g-e2o", 3), ("mc-1g-o2o", 4), ("mc-4-25g-oeo", 5), ("mc-ip175d", 6), ("mc-10g-oeo", 7), ("mc-10g-oee", 8), ("mc-FAN", 9), ("mc-10g-oeo-1r", 10), ("mc-2-5g", 11), ("mc-40g-oeo", 12), ("mc-2-5g-t", 13), ("mc-2-5g-f", 14), ("mc-2-5g-mux-t", 15), ("mc-2-5g-mux-f", 16), ("mc-1g-e2o-backup", 17), ("mc-e1-1sfp", 18), ("mc-e1-2sfp", 19), ("mc-100m-sfp", 20), ("mc-1g-o2o-backup", 21), ("mc-cwdm-4", 22), ("mc-cwdm-8", 23), ("mc-10g-oeo-2r", 24), ("mc-qca8334", 25), ("mc-e1t1", 26), ("fr600f-mm", 100), ("fr600f-ms", 101), ("not-support", 102)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: coCardType.setStatus('current')
if mibBuilder.loadTexts: coCardType.setDescription("local card's type inserted in the chassis")
coCardDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 4), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: coCardDesc.setStatus('current')
if mibBuilder.loadTexts: coCardDesc.setDescription("local card's description")
rmtCardType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 100, 101, 102))).clone(namedValues=NamedValues(("no-card", 0), ("ip113sr", 1), ("ip113f", 2), ("mc-1g-e2o", 3), ("mc-1g-o2o", 4), ("mc-4-25g-oeor", 5), ("mc-ip175dr", 6), ("mc-10g-oeor", 7), ("mc-10g-oeer", 8), ("mc-FANr", 9), ("mc-10g-oeo-1rr", 10), ("mc-2-5gr", 11), ("mc-40g-oeor", 12), ("mc-2-5g-tr", 13), ("mc-2-5g-fr", 14), ("mc-2-5g-mux-tr", 15), ("mc-2-5g-mux-fr", 16), ("mc-1g-e2o-backupr", 17), ("mc-e1-1sfpr", 18), ("mc-e1-2sfpr", 19), ("mc-100m-sfpr", 20), ("mc-1g-o2o-backupr", 21), ("mc-cwdmr-4", 22), ("mc-cwdmr-8", 23), ("mc-10g-oeo-2rr", 24), ("mc-qca8334r", 25), ("mc-e1t1r", 26), ("fr600f-mm", 100), ("fr600f-ms", 101), ("not-support", 102)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rmtCardType.setStatus('current')
if mibBuilder.loadTexts: rmtCardType.setDescription("remote card's type connect with the local converter")
rmtCardDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 3, 1, 1, 6), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rmtCardDesc.setStatus('current')
if mibBuilder.loadTexts: rmtCardDesc.setDescription("remote card's description")
cardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4))
nmuObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1))
nmuConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1))
nmuType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(100, 101, 102))).clone(namedValues=NamedValues(("fr600f-mm", 100), ("fr600f-ms", 101), ("not-support", 102)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: nmuType.setStatus('current')
if mibBuilder.loadTexts: nmuType.setDescription('The type of NMU (network management unit)')
ipaddr = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ipaddr.setStatus('current')
if mibBuilder.loadTexts: ipaddr.setDescription('The ethernet IP address of NMU (network management unit)')
subnet = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 3), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: subnet.setStatus('current')
if mibBuilder.loadTexts: subnet.setDescription('The etherent mask address of NMU (network management unit)')
gateway = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 4), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gateway.setStatus('current')
if mibBuilder.loadTexts: gateway.setDescription('The ethernet gateway address of NMU (network management unit)')
sysContact = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 5), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sysContact.setStatus('current')
if mibBuilder.loadTexts: sysContact.setDescription('Mirror of the system.sysContact.0')
sysName = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 6), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sysName.setStatus('current')
if mibBuilder.loadTexts: sysName.setDescription('Mirror of the system.sysName.0')
sysLocation = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 7), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sysLocation.setStatus('current')
if mibBuilder.loadTexts: sysLocation.setDescription('Mirror of the system.sysLocation.0')
trapHost1 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 8), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapHost1.setStatus('current')
if mibBuilder.loadTexts: trapHost1.setDescription("The first host's IP address used to receive trap messages, when set 0 it simply delete this entry. This applies to the trap host 2~4 below as well.")
trapHost2 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 9), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapHost2.setStatus('current')
if mibBuilder.loadTexts: trapHost2.setDescription("The second host's IP address used to receive trap messages")
trapHost3 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 10), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapHost3.setStatus('current')
if mibBuilder.loadTexts: trapHost3.setDescription("The third host's IP address used to receive trap messages")
trapHost4 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 1, 1, 11), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: trapHost4.setStatus('current')
if mibBuilder.loadTexts: trapHost4.setDescription("The fourth host's IP address used to receive trap messages")
mcCmObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2))
mcCmTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1), )
if mibBuilder.loadTexts: mcCmTable.setStatus('current')
if mibBuilder.loadTexts: mcCmTable.setDescription('MC Configuration table')
mcCmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcCmEntry.setStatus('current')
if mibBuilder.loadTexts: mcCmEntry.setDescription('MC Configuration entry definition')
mcShelfIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("master", 1), ("slave1", 2), ("slave2", 3), ("slave3", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcShelfIdx.setStatus('current')
if mibBuilder.loadTexts: mcShelfIdx.setDescription('Shelf index')
mcCardIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16))).clone(namedValues=NamedValues(("card01", 1), ("card02", 2), ("card03", 3), ("card04", 4), ("card05", 5), ("card06", 6), ("card07", 7), ("card08", 8), ("card09", 9), ("card10", 10), ("card11", 11), ("card12", 12), ("card13", 13), ("card14", 14), ("card15", 15), ("card16", 16)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcCardIdx.setStatus('current')
if mibBuilder.loadTexts: mcCardIdx.setDescription('Card index')
mcType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26))).clone(namedValues=NamedValues(("no-card", 0), ("ip113s", 1), ("ip113f", 2), ("mc-1g-e2o", 3), ("mc-1g-o2o", 4), ("mc-4-25g-oeo", 5), ("mc-ip175d", 6), ("mc-10g-oeo", 7), ("mc-10g-oee", 8), ("mc-FAN", 9), ("mc-10g-oeo-1r", 10), ("mc-2-5g", 11), ("mc-40g-oeo", 12), ("mc-2-5g-t", 13), ("mc-2-5g-f", 14), ("mc-2-5g-mux-t", 15), ("mc-2-5g-mux-f", 16), ("mc-1g-e2o-backup", 17), ("mc-e1-1sfp", 18), ("mc-e1-2sfp", 19), ("mc-100m-sfp", 20), ("mc-1g-o2o-backup", 21), ("mc-cwdm-4", 22), ("mc-cwdm-8", 23), ("mc-10g-oeo-2r", 24), ("mc-qca8334", 25), ("mc-e1t1", 26)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcType.setStatus('current')
if mibBuilder.loadTexts: mcType.setDescription("Center card's type")
mcTransceiverMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("bidi", 1), ("duplex-fiber", 2), ("sfp", 3), ("not-support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcTransceiverMode.setStatus('current')
if mibBuilder.loadTexts: mcTransceiverMode.setDescription("Center card's optical transceiver mode. 100M card support bidi/duplex-fiber; 1G card support bidi/duplex-fiber/sfp. Once sfp is given, the following mcTransceiverDist should be ignored.")
mcTransceiverDist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 120))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcTransceiverDist.setStatus('current')
if mibBuilder.loadTexts: mcTransceiverDist.setDescription("Center card's optical transceiver distance, 1 means 550m for duplex-fiber mode in case of 1G card, otherwise it represents the real distance (unit of km).")
mcPortState = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("locked", 1), ("unlocked", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcPortState.setStatus('current')
if mibBuilder.loadTexts: mcPortState.setDescription("Center card's port status, locked or unlocked")
mcTransmitMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("cut-through", 1), ("store-forward", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcTransmitMode.setStatus('current')
if mibBuilder.loadTexts: mcTransmitMode.setDescription("Center card's transmmit mode")
mcCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcCurWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcCurWorkMode.setDescription("Center card's current work mode")
mcCfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcCfgWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcCfgWorkMode.setDescription("Center card's configurable work mode")
mcLFPCfg = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcLFPCfg.setStatus('current')
if mibBuilder.loadTexts: mcLFPCfg.setDescription('Remote fault detect function, valid only on center MC card')
mcUpStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 11), Gauge32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcUpStream.setStatus('current')
if mibBuilder.loadTexts: mcUpStream.setDescription("Center card's up stream of MC")
mcDownStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 12), Gauge32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcDownStream.setStatus('current')
if mibBuilder.loadTexts: mcDownStream.setDescription("Center card's down stream of MC")
mcTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcTxlink.setStatus('current')
if mibBuilder.loadTexts: mcTxlink.setDescription("Center card's electrical port's link status")
mcFxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcFxlink.setStatus('current')
if mibBuilder.loadTexts: mcFxlink.setDescription("Center card's optical port's link status")
mcHWLFP = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcHWLFP.setStatus('current')
if mibBuilder.loadTexts: mcHWLFP.setDescription("Center card's HW LFP, not applicable for 1G card")
mcHWTransmitMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("cut-through", 1), ("store-forward", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcHWTransmitMode.setStatus('current')
if mibBuilder.loadTexts: mcHWTransmitMode.setDescription("Center card's HW transmit mode, not applicable for 1G card")
mcHWWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcHWWorkMode.setStatus('current')
if mibBuilder.loadTexts: mcHWWorkMode.setDescription("Center card's HW work mode, not applicable for 1G card")
mcHWRmtCtrlMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcHWRmtCtrlMode.setStatus('current')
if mibBuilder.loadTexts: mcHWRmtCtrlMode.setDescription("Center card's HW remote control mode (only valid for local card). the disable mode indicates that all SET operations must be prohibited")
mcNtwSfpExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("inserted", 1), ("removed", 2), ("na", 3), ("not-support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcNtwSfpExist.setStatus('current')
if mibBuilder.loadTexts: mcNtwSfpExist.setDescription("Center 1G card's Network SFP indication")
mcAccSfpExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("inserted", 1), ("removed", 2), ("na", 3), ("not-support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcAccSfpExist.setStatus('current')
if mibBuilder.loadTexts: mcAccSfpExist.setDescription("Center 1G card's Access SFP indication, applicable only for O2O type")
mcUtility = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("idle", 1), ("reset", 2), ("default", 3), ("set2hw", 4), ("not-support", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcUtility.setStatus('current')
if mibBuilder.loadTexts: mcUtility.setDescription('reset, default to factory, set to HW word, etc...')
mcRmtDetect = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("no-remote", 0), ("yes", 1), ("not-support", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtDetect.setStatus('current')
if mibBuilder.loadTexts: mcRmtDetect.setDescription('An identifier to indicate if there is a remote MC currently connecting to system or not')
mcRmtType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26))).clone(namedValues=NamedValues(("no-card", 0), ("ip113sr", 1), ("ip113f", 2), ("mc-1g-e2or", 3), ("mc-1g-o2or", 4), ("mc-4-25g-oeor", 5), ("mc-ip175dr", 6), ("mc-10g-oeor", 7), ("mc-10g-oeer", 8), ("mc-FANr", 9), ("mc-10g-oeo-1rr", 10), ("mc-2-5gr", 11), ("mc-40g-oeor", 12), ("mc-2-5g-tr", 13), ("mc-2-5g-fr", 14), ("mc-2-5g-mux-tr", 15), ("mc-2-5g-mux-fr", 16), ("mc-1g-e2o-backupr", 17), ("mc-e1-1sfpr", 18), ("mc-e1-2sfpr", 19), ("mc-100m-sfpr", 20), ("mc-1g-o2o-backupr", 21), ("mc-cwdmr-4", 22), ("mc-cwdmr-8", 23), ("mc-10g-oeo-2rr", 24), ("mc-qca8334r", 25), ("mc-e1t1r", 26)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtType.setStatus('current')
if mibBuilder.loadTexts: mcRmtType.setDescription("Remote card's type")
mcRmtTransmitMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("cut-through", 1), ("store-forward", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmtTransmitMode.setStatus('current')
if mibBuilder.loadTexts: mcRmtTransmitMode.setDescription("Remote card's transmmit mode")
mcRmtCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtCurWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmtCurWorkMode.setDescription("Remote card's current work mode")
mcRmtCfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmtCfgWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmtCfgWorkMode.setDescription("Remote card's configurable work mode")
mcRmtLFP = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 27), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmtLFP.setStatus('current')
if mibBuilder.loadTexts: mcRmtLFP.setDescription("Remote card's LFP lamp state")
mcRmtTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 28), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtTxlink.setStatus('current')
if mibBuilder.loadTexts: mcRmtTxlink.setDescription("Remote card's electrial port status")
mcRmtHWLFP = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 29), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtHWLFP.setStatus('current')
if mibBuilder.loadTexts: mcRmtHWLFP.setDescription("Remote card's HW LFP, not applicable for 1G card")
mcRmtHWTransmitMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 30), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("cut-through", 1), ("store-forward", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtHWTransmitMode.setStatus('current')
if mibBuilder.loadTexts: mcRmtHWTransmitMode.setDescription("Remote card's HW transmit mode, not applicable for 1G card")
mcRmtHWWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 31), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtHWWorkMode.setStatus('current')
if mibBuilder.loadTexts: mcRmtHWWorkMode.setDescription("Remote card's HW work mode, not applicable for 1G card")
mcRmtLoopback = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 32), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmtLoopback.setStatus('current')
if mibBuilder.loadTexts: mcRmtLoopback.setDescription("Remote card's HW Loopback state")
mcRmtPwrDown = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 33), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("powerdown", 1), ("normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtPwrDown.setStatus('current')
if mibBuilder.loadTexts: mcRmtPwrDown.setDescription("Remote card's power down state")
mcRmtAccSfpExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 34), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtAccSfpExist.setStatus('current')
if mibBuilder.loadTexts: mcRmtAccSfpExist.setDescription("Remote 1G card's Access SFP indication, applicable only for O2O type")
mcRmtUtility = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 1, 1, 35), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("no-card", 0), ("idle", 1), ("reset", 2), ("default", 3), ("set2hw", 4), ("not-support", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmtUtility.setStatus('current')
if mibBuilder.loadTexts: mcRmtUtility.setDescription("Rmote cards's reset, default to factory, set to HW word, etc...")
mcCm1gSpecificObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2))
mcCm1gIpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 1))
mcCm1gIpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 1, 1), )
if mibBuilder.loadTexts: mcCm1gIpTable.setStatus('current')
if mibBuilder.loadTexts: mcCm1gIpTable.setDescription('MC 1G Ip address table')
mcCm1gIpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcLoOrRmtFg"))
if mibBuilder.loadTexts: mcCm1gIpEntry.setStatus('current')
if mibBuilder.loadTexts: mcCm1gIpEntry.setDescription('MC 1G Ip address entry definition')
mcLoOrRmtFg = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("local", 1), ("remote", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcLoOrRmtFg.setStatus('current')
if mibBuilder.loadTexts: mcLoOrRmtFg.setDescription('location index, local or remote')
mcIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 1, 1, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcIpAddr.setStatus('current')
if mibBuilder.loadTexts: mcIpAddr.setDescription('The Ip address of the node')
mcCm1gSfpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2))
mcCm1gSfpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1), )
if mibBuilder.loadTexts: mcCm1gSfpTable.setStatus('current')
if mibBuilder.loadTexts: mcCm1gSfpTable.setDescription('MC 1G SFP table')
mcCm1gSfpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcLoOrRmtFg"))
if mibBuilder.loadTexts: mcCm1gSfpEntry.setStatus('current')
if mibBuilder.loadTexts: mcCm1gSfpEntry.setDescription('MC 1G SFP entry definition')
getSfpCmd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: getSfpCmd.setStatus('current')
if mibBuilder.loadTexts: getSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.')
sfpCompliance = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpCompliance.setStatus('current')
if mibBuilder.loadTexts: sfpCompliance.setDescription('SFP compliance (one byte) if 0 then the attributs of sfpTemperature/sfpTranPower/sfpRecvPower should be ignored')
sfpConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpConnector.setStatus('current')
if mibBuilder.loadTexts: sfpConnector.setDescription('SFP connector type (one byte) 0x01: SC 0x07: LC 0x22: RJ45 others: unsupported')
sfpTransCode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpTransCode.setStatus('current')
if mibBuilder.loadTexts: sfpTransCode.setDescription('SFP transceiver code (one byte) bit0: SingleMode bit1: Copper Module bit2: MultiMode bit3: MultiMode others: unsupported')
sfpSmLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpSmLength.setStatus('current')
if mibBuilder.loadTexts: sfpSmLength.setDescription('SFP link length for SingleMode, units of km. (one byte) applicable only when sfpTransCode is SingleMode')
sfpMmLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpMmLength.setStatus('current')
if mibBuilder.loadTexts: sfpMmLength.setDescription('SFP link length for MultiMode, units of 10m (one byte) applicable only when sfpTransCode is MultiMode')
sfpCopperLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpCopperLength.setStatus('current')
if mibBuilder.loadTexts: sfpCopperLength.setDescription('SFP link length for Copper, units of m (one byte) applicable only when sfpConnector is RJ45')
sfpBrSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpBrSpeed.setStatus('current')
if mibBuilder.loadTexts: sfpBrSpeed.setDescription('SFP nominal signalling rate, units of 100Mbit/s (one byte)')
sfpWavelength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpWavelength.setStatus('current')
if mibBuilder.loadTexts: sfpWavelength.setDescription('SFP laser wavelength (one word)')
sfpTemperature = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpTemperature.setStatus('current')
if mibBuilder.loadTexts: sfpTemperature.setDescription('SFP temperature (one type, signed)')
sfpTranPower = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpTranPower.setStatus('current')
if mibBuilder.loadTexts: sfpTranPower.setDescription('SFP tx power (one type, signed)')
sfpRecvPower = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpRecvPower.setStatus('current')
if mibBuilder.loadTexts: sfpRecvPower.setDescription('SFP rx power (one type, signed)')
sfpVoltage = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 2, 1, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sfpVoltage.setStatus('current')
if mibBuilder.loadTexts: sfpVoltage.setDescription('SFP voltage, units of 0.1mV (one word)')
mcCm1gAccSfpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3))
mcCm1gAccSfpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1), )
if mibBuilder.loadTexts: mcCm1gAccSfpTable.setStatus('current')
if mibBuilder.loadTexts: mcCm1gAccSfpTable.setDescription('MC 1G Access SFP table')
mcCm1gAccSfpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcLoOrRmtFg"))
if mibBuilder.loadTexts: mcCm1gAccSfpEntry.setStatus('current')
if mibBuilder.loadTexts: mcCm1gAccSfpEntry.setDescription('MC 1G Access SFP entry definition')
getAccSfpCmd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: getAccSfpCmd.setStatus('current')
if mibBuilder.loadTexts: getAccSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.')
accsfpCompliance = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpCompliance.setStatus('current')
if mibBuilder.loadTexts: accsfpCompliance.setDescription('SFP compliance (one byte) if 0 then the attributs of sfpTemperature/sfpTranPower/sfpRecvPower should be ignored')
accsfpConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpConnector.setStatus('current')
if mibBuilder.loadTexts: accsfpConnector.setDescription('SFP connector type (one byte) 0x01: SC 0x07: LC 0x22: RJ45 others: unsupported')
accsfpTransCode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpTransCode.setStatus('current')
if mibBuilder.loadTexts: accsfpTransCode.setDescription('SFP transceiver code (one byte) bit0: SingleMode bit2: MultiMode bit3: MultiMode others: unsupported')
accsfpSmLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpSmLength.setStatus('current')
if mibBuilder.loadTexts: accsfpSmLength.setDescription('SFP link length for SingleMode, units of km. (one byte) applicable only when sfpTransCode is SingleMode')
accsfpMmLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpMmLength.setStatus('current')
if mibBuilder.loadTexts: accsfpMmLength.setDescription('SFP link length for MultiMode, units of 10m (one byte) applicable only when sfpTransCode is MultiMode')
accsfpCopperLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpCopperLength.setStatus('current')
if mibBuilder.loadTexts: accsfpCopperLength.setDescription('SFP link length for Copper, units of m (one byte) applicable only when sfpConnector is RJ45')
accsfpBrSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpBrSpeed.setStatus('current')
if mibBuilder.loadTexts: accsfpBrSpeed.setDescription('SFP nominal signalling rate, units of 100Mbit/s (one byte)')
accsfpWavelength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpWavelength.setStatus('current')
if mibBuilder.loadTexts: accsfpWavelength.setDescription('SFP laser wavelength (one word)')
accsfpTemperature = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpTemperature.setStatus('current')
if mibBuilder.loadTexts: accsfpTemperature.setDescription('SFP temperature (one type, signed)')
accsfpTranPower = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpTranPower.setStatus('current')
if mibBuilder.loadTexts: accsfpTranPower.setDescription('SFP tx power (one type, signed)')
accsfpRecvPower = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpRecvPower.setStatus('current')
if mibBuilder.loadTexts: accsfpRecvPower.setDescription('SFP rx power (one type, signed)')
accsfpVoltage = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 2, 3, 1, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: accsfpVoltage.setStatus('current')
if mibBuilder.loadTexts: accsfpVoltage.setDescription('SFP voltage, units of 0.1mV (one word)')
mcIP175DObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3))
mcIP175DCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 1))
mcIP175DCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 1, 1), )
if mibBuilder.loadTexts: mcIP175DCardTable.setStatus('current')
if mibBuilder.loadTexts: mcIP175DCardTable.setDescription('MC IP175D Configuration table')
mcIP175DCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcIP175DCardEntry.setStatus('current')
if mibBuilder.loadTexts: mcIP175DCardEntry.setDescription('MC Configuration entry definition')
mcIP175DVlanMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("Normal", 1), ("mode1", 2), ("mode2", 3), ("not-support", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcIP175DVlanMode.setStatus('current')
if mibBuilder.loadTexts: mcIP175DVlanMode.setDescription("Center card's vlan mode")
mcIP175DPortObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2))
mcIP175DPortTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1), )
if mibBuilder.loadTexts: mcIP175DPortTable.setStatus('current')
if mibBuilder.loadTexts: mcIP175DPortTable.setDescription('MC IP175D Configuration table')
mcIP175DPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcIP175DPortIdx"))
if mibBuilder.loadTexts: mcIP175DPortEntry.setStatus('current')
if mibBuilder.loadTexts: mcIP175DPortEntry.setDescription('MC Configuration entry definition')
mcIP175DPortIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("port1", 1), ("port2", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcIP175DPortIdx.setStatus('current')
if mibBuilder.loadTexts: mcIP175DPortIdx.setDescription('Port index')
mcIP175DCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcIP175DCurWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcIP175DCurWorkMode.setDescription("Center card's port current work mode")
mcIP175DCfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcIP175DCfgWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcIP175DCfgWorkMode.setDescription("Center card's port configurable work mode")
mcIP175DUpStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 4), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(64, 100000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcIP175DUpStream.setStatus('current')
if mibBuilder.loadTexts: mcIP175DUpStream.setDescription("Center card's port up stream of MC")
mcIP175DDownStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 5), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(64, 100000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcIP175DDownStream.setStatus('current')
if mibBuilder.loadTexts: mcIP175DDownStream.setDescription("Center card's port down stream of MC")
mcIP175DTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcIP175DTxlink.setStatus('current')
if mibBuilder.loadTexts: mcIP175DTxlink.setDescription("Center card's port 1 electrical port's link status")
mcIP175DRmtCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcIP175DRmtCurWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcIP175DRmtCurWorkMode.setDescription("Remote card's port 1 current work mode")
mcIP175DRmtCfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcIP175DRmtCfgWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcIP175DRmtCfgWorkMode.setDescription("Remote card's port1 configurable work mode")
mcIP175DRmtTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 3, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcIP175DRmtTxlink.setStatus('current')
if mibBuilder.loadTexts: mcIP175DRmtTxlink.setDescription("Remote card's port electrial port status")
mc4_25G_OEOObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4)).setLabel("mc4-25G-OEOObjects")
mc4_25G_OEOCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1)).setLabel("mc4-25G-OEOCardObjects")
mc4_25G_OEOCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1), ).setLabel("mc4-25G-OEOCardTable")
if mibBuilder.loadTexts: mc4_25G_OEOCardTable.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEOCardTable.setDescription('MC 4.25G OEO Configuration table')
mc4_25G_OEOCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1), ).setLabel("mc4-25G-OEOCardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc4_25G_OEOCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEOCardEntry.setDescription('MC Configuration entry definition')
mc4_25G_OEOCurSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mc4-25G-OEOCurSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEOCurSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mc4_25G_OEOCurSpdMode.setDescription("Center card's config speed mode")
mc4_25G_OEOCfgSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mc4-25G-OEOCfgSpdMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc4_25G_OEOCfgSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mc4_25G_OEOCfgSpdMode.setDescription("Center card's current speed mode")
mc4_25G_OEOLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc4-25G-OEOLoopback").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc4_25G_OEOLoopback.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEOLoopback.setDescription("card's Loopback state")
mc4_25G_OEOWorkMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("repeater", 1), ("retimer", 2), ("not-support", 3)))).setLabel("mc4-25G-OEOWorkMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc4_25G_OEOWorkMode.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEOWorkMode.setDescription("card's Work Mode")
mc4_25G_OEONtwPD = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc4-25G-OEONtwPD").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEONtwPD.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEONtwPD.setDescription("Center card's network side PD status")
mc4_25G_OEOAccPD = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc4-25G-OEOAccPD").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEOAccPD.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEOAccPD.setDescription("Center card's access side PD status")
mc4_25G_OEOHWSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mc4-25G-OEOHWSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEOHWSpdMode.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEOHWSpdMode.setDescription("Center card's HW speed mode")
mc4_25G_OEOHWLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc4-25G-OEOHWLoopback").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEOHWLoopback.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEOHWLoopback.setDescription("card's HW Loopback state")
mc4_25G_OEOHWWorkMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("repeater", 1), ("retimer", 2), ("not-support", 3)))).setLabel("mc4-25G-OEOHWWorkMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEOHWWorkMode.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEOHWWorkMode.setDescription("card's HW Work Mode")
mc4_25G_OEO_Test_Lock = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Lock", 1), ("Unlock", 2)))).setLabel("mc4-25G-OEO-Test-Lock").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEO_Test_Lock.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEO_Test_Lock.setDescription('test result lock or unlock')
mc4_25G_OEO_Test_Error_Counter = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 11), Integer32()).setLabel("mc4-25G-OEO-Test-Error-Counter").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEO_Test_Error_Counter.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEO_Test_Error_Counter.setDescription('test result error counter')
mc4_25G_OEO_Test_Continue_Time = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 12), Integer32()).setLabel("mc4-25G-OEO-Test-Continue-Time").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEO_Test_Continue_Time.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEO_Test_Continue_Time.setDescription('test continue time unit is second')
mc4_25G_OEO_Test_Result = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Pass", 1), ("Error", 2)))).setLabel("mc4-25G-OEO-Test-Result").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc4_25G_OEO_Test_Result.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEO_Test_Result.setDescription('test result')
mc4_25G_OEO_Start_Test = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Start", 1), ("Stop", 2)))).setLabel("mc4-25G-OEO-Start-Test").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc4_25G_OEO_Start_Test.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEO_Start_Test.setDescription('start test and stop test')
mc4_25G_OEO_Get_Test_Rst = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("Get", 1)))).setLabel("mc4-25G-OEO-Get-Test-Rst").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc4_25G_OEO_Get_Test_Rst.setStatus('current')
if mibBuilder.loadTexts: mc4_25G_OEO_Get_Test_Rst.setDescription('get test result')
mcRmt4_25G_OEOCurSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mcRmt4-25G-OEOCurSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt4_25G_OEOCurSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmt4_25G_OEOCurSpdMode.setDescription("Center card's config speed mode")
mcRmt4_25G_OEOCfgSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mcRmt4-25G-OEOCfgSpdMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmt4_25G_OEOCfgSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmt4_25G_OEOCfgSpdMode.setDescription("Center card's current speed mode")
mcRmt4_25G_OEOLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mcRmt4-25G-OEOLoopback").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmt4_25G_OEOLoopback.setStatus('current')
if mibBuilder.loadTexts: mcRmt4_25G_OEOLoopback.setDescription("card's Loopback state")
mcRmt4_25G_OEOWorkMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("repeater", 1), ("retimer", 2), ("not-support", 3)))).setLabel("mcRmt4-25G-OEOWorkMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmt4_25G_OEOWorkMode.setStatus('current')
if mibBuilder.loadTexts: mcRmt4_25G_OEOWorkMode.setDescription("card's Work Mode")
mcRmt4_25G_OEOHWSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("Infini", 1), ("STM16", 2), ("STM4", 3), ("STM1", 4), ("FCx4", 5), ("FCx2", 6), ("FCx1", 7), ("GE", 8), ("FE", 9), ("ESCOM", 10), ("not-support", 11)))).setLabel("mcRmt4-25G-OEOHWSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt4_25G_OEOHWSpdMode.setStatus('current')
if mibBuilder.loadTexts: mcRmt4_25G_OEOHWSpdMode.setDescription("Center card's HW speed mode")
mcRmt4_25G_OEOHWLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mcRmt4-25G-OEOHWLoopback").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt4_25G_OEOHWLoopback.setStatus('current')
if mibBuilder.loadTexts: mcRmt4_25G_OEOHWLoopback.setDescription("card's HW Loopback state")
mcRmt4_25G_OEOHWWorkMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 4, 1, 1, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("repeater", 1), ("retimer", 2), ("not-support", 3)))).setLabel("mcRmt4-25G-OEOHWWorkMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt4_25G_OEOHWWorkMode.setStatus('current')
if mibBuilder.loadTexts: mcRmt4_25G_OEOHWWorkMode.setDescription("card's HW Work Mode")
mc10G_OEOObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5)).setLabel("mc10G-OEOObjects")
mc10G_OEOCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1)).setLabel("mc10G-OEOCardObjects")
mc10G_OEOCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1), ).setLabel("mc10G-OEOCardTable")
if mibBuilder.loadTexts: mc10G_OEOCardTable.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEOCardTable.setDescription('MC 10G OEO Configuration table')
mc10G_OEOCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1), ).setLabel("mc10G-OEOCardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc10G_OEOCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEOCardEntry.setDescription('MC Configuration entry definition')
mc10G_OEOCurSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mc10G-OEOCurSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEOCurSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEOCurSpdMode.setDescription("Center card's current speed mode 10G LAN(10.3125G) and 10G WAN(9.95328G)")
mc10G_OEOCfgSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mc10G-OEOCfgSpdMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEOCfgSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEOCfgSpdMode.setDescription("Center card's config speed mode 10G LAN(10.3125G) and 10G WAN(9.95328G)")
mc10G_OEOLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEOLoopback").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEOLoopback.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEOLoopback.setDescription("card's Loopback state")
mc10G_OEOSFP1 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEOSFP1").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEOSFP1.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEOSFP1.setDescription("Center card's SFP1 link status")
mc10G_OEOSFP2 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEOSFP2").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEOSFP2.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEOSFP2.setDescription("Center card's SFP2 link status")
mc10G_OEOHWSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mc10G-OEOHWSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEOHWSpdMode.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEOHWSpdMode.setDescription("Center card's HW speed mode")
mc10G_OEOHWLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEOHWLoopback").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEOHWLoopback.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEOHWLoopback.setDescription("card's HW Loopback state")
mc10G_OEO_Test_Lock = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Lock", 1), ("Unlock", 2)))).setLabel("mc10G-OEO-Test-Lock").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO_Test_Lock.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO_Test_Lock.setDescription('test result lock or unlock')
mc10G_OEO_Test_Error_Counter = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 9), Integer32()).setLabel("mc10G-OEO-Test-Error-Counter").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO_Test_Error_Counter.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO_Test_Error_Counter.setDescription('test result error counter')
mc10G_OEO_Test_Continue_Time = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 10), Integer32()).setLabel("mc10G-OEO-Test-Continue-Time").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO_Test_Continue_Time.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO_Test_Continue_Time.setDescription('test continue time unit is second')
mc10G_OEO_Test_Result = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Pass", 1), ("Error", 2)))).setLabel("mc10G-OEO-Test-Result").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO_Test_Result.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO_Test_Result.setDescription('test result')
mc10G_OEO_Start_Test = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("Start", 1), ("Stop", 2)))).setLabel("mc10G-OEO-Start-Test").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEO_Start_Test.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO_Start_Test.setDescription('start test and stop test')
mc10G_OEO_Get_Test_Rst = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("Get", 1)))).setLabel("mc10G-OEO-Get-Test-Rst").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEO_Get_Test_Rst.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO_Get_Test_Rst.setDescription('get test result')
mcRmt10G_OEOCurSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mcRmt10G-OEOCurSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt10G_OEOCurSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmt10G_OEOCurSpdMode.setDescription("Center card's current speed mode 10G LAN(10.3125G) and 10G WAN(9.95328G)")
mcRmt10G_OEOCfgSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mcRmt10G-OEOCfgSpdMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmt10G_OEOCfgSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmt10G_OEOCfgSpdMode.setDescription("Center card's config speed mode 10G LAN(10.3125G) and 10G WAN(9.95328G)")
mcRmt10G_OEOLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mcRmt10G-OEOLoopback").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmt10G_OEOLoopback.setStatus('current')
if mibBuilder.loadTexts: mcRmt10G_OEOLoopback.setDescription("card's Loopback state")
mcRmt10G_OEOHWSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 4))).clone(namedValues=NamedValues(("LAN", 1), ("WAN", 2), ("not-support", 4)))).setLabel("mcRmt10G-OEOHWSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt10G_OEOHWSpdMode.setStatus('current')
if mibBuilder.loadTexts: mcRmt10G_OEOHWSpdMode.setDescription("Center card's HW speed mode 10G LAN(10.3125G) and 10G WAN(9.95328G)")
mcRmt10G_OEOHWLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mcRmt10G-OEOHWLoopback").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt10G_OEOHWLoopback.setStatus('current')
if mibBuilder.loadTexts: mcRmt10G_OEOHWLoopback.setDescription("card's HW Loopback state")
mcRmt10G_OEOSFP1 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mcRmt10G-OEOSFP1").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt10G_OEOSFP1.setStatus('current')
if mibBuilder.loadTexts: mcRmt10G_OEOSFP1.setDescription("card's SFP1 link status")
mc10G_OEO_accType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mc10G-OEO-accType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO_accType.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO_accType.setDescription('')
mc10G_OEO_ntwType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mc10G-OEO-ntwType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO_ntwType.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO_ntwType.setDescription('')
mcRmt10G_OEO_accType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mcRmt10G-OEO-accType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt10G_OEO_accType.setStatus('current')
if mibBuilder.loadTexts: mcRmt10G_OEO_accType.setDescription('')
mcRmt10G_OEO_ntwType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 5, 1, 1, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mcRmt10G-OEO-ntwType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmt10G_OEO_ntwType.setStatus('current')
if mibBuilder.loadTexts: mcRmt10G_OEO_ntwType.setDescription('')
mc10G_OEEObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6)).setLabel("mc10G-OEEObjects")
mc10G_OEECardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1)).setLabel("mc10G-OEECardObjects")
mc10G_OEECardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1), ).setLabel("mc10G-OEECardTable")
if mibBuilder.loadTexts: mc10G_OEECardTable.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEECardTable.setDescription('MC 10G OEE Configuration table')
mc10G_OEECardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1), ).setLabel("mc10G-OEECardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc10G_OEECardEntry.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEECardEntry.setDescription('MC Configuration entry definition')
mc10G_OEETxlink = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEETxlink").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEETxlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEETxlink.setDescription("Center card's electrical port's link status")
mc10G_OEEFxlink = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEEFxlink").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEEFxlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEEFxlink.setDescription("Center card's optical port's link status")
mc10G_OEECurSpd = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("no-card", 0), ("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("m10G-Master", 7), ("m10G-Slave", 8), ("not-support", 9)))).setLabel("mc10G-OEECurSpd").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEECurSpd.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEECurSpd.setDescription("Local card's current spd")
mc10G_OEELoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEELoopMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEELoopMode.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEELoopMode.setDescription("card's Loopback state")
mc10G_OEESpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 7, 8))).clone(namedValues=NamedValues(("auto", 1), ("m10G-Master", 7), ("m10G-Slave", 8)))).setLabel("mc10G-OEESpdMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEESpdMode.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEESpdMode.setDescription("card's speed mode")
mc10G_OEEHWLoopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEEHWLoopback").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEEHWLoopback.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEEHWLoopback.setDescription("card's Loopback state")
mc10G_OEE_ntwType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mc10G-OEE-ntwType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEE_ntwType.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEE_ntwType.setDescription('')
mc10G_OEE_checkResult = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 6, 1, 1, 1, 8), Integer32()).setLabel("mc10G-OEE-checkResult").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEE_checkResult.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEE_checkResult.setDescription('test result')
mcFanObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 7))
mcFanCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 7, 1))
mcFanCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 7, 1, 1), )
if mibBuilder.loadTexts: mcFanCardTable.setStatus('current')
if mibBuilder.loadTexts: mcFanCardTable.setDescription('MC fan card table')
mcFanCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 7, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcFanCardEntry.setStatus('current')
if mibBuilder.loadTexts: mcFanCardEntry.setDescription('MC Configuration entry definition')
mcFanStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 7, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Normal", 1), ("Abnormal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcFanStatus.setStatus('mandatory')
if mibBuilder.loadTexts: mcFanStatus.setDescription("Center card's fan status")
mc40G_OEOObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8)).setLabel("mc40G-OEOObjects")
mc40G_OEOCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1)).setLabel("mc40G-OEOCardObjects")
mc40G_OEOCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1), ).setLabel("mc40G-OEOCardTable")
if mibBuilder.loadTexts: mc40G_OEOCardTable.setStatus('current')
if mibBuilder.loadTexts: mc40G_OEOCardTable.setDescription('MC 40G OEO Configuration table')
mc40G_OEOCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1), ).setLabel("mc40G-OEOCardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc40G_OEOCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc40G_OEOCardEntry.setDescription('MC Configuration entry definition')
mc40G_OEOQsfp1Lane1_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp1Lane1-link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane1_link.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane1_link.setDescription("Center card's Qsfp1 Lane1 link status")
mc40G_OEOQsfp1Lane2_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp1Lane2-link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane2_link.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane2_link.setDescription("Center card's Qsfp1 Lane2 link status")
mc40G_OEOQsfp1Lane3_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp1Lane3-link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane3_link.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane3_link.setDescription("Center card's Qsfp1 Lane3 link status")
mc40G_OEOQsfp1Lane4_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp1Lane4-link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane4_link.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOQsfp1Lane4_link.setDescription("Center card's Qsfp1 Lane4 link status")
mc40G_OEOQsfp2Lane1_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp2Lane1-link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane1_link.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane1_link.setDescription("Center card's Qsfp2 Lane1 link status")
mc40G_OEOQsfp2Lane2_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp2Lane2-link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane2_link.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane2_link.setDescription("Center card's Qsfp2 Lane2 link status")
mc40G_OEOQsfp2Lane3_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp2Lane3-link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane3_link.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane3_link.setDescription("Center card's Qsfp2 Lane3 link status")
mc40G_OEOQsfp2Lane4_link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc40G-OEOQsfp2Lane4-link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane4_link.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOQsfp2Lane4_link.setDescription("Center card's Qsfp2 Lane4 link status")
mc40G_OEOLane1LoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("line-side-enable", 1), ("host-side-enable", 2), ("disable", 3), ("not-support", 4)))).setLabel("mc40G-OEOLane1LoopMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc40G_OEOLane1LoopMode.setStatus('current')
if mibBuilder.loadTexts: mc40G_OEOLane1LoopMode.setDescription("card's Lane1 Loopback state")
mc40G_OEOLane2LoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("line-side-enable", 1), ("host-side-enable", 2), ("disable", 3), ("not-support", 4)))).setLabel("mc40G-OEOLane2LoopMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc40G_OEOLane2LoopMode.setStatus('current')
if mibBuilder.loadTexts: mc40G_OEOLane2LoopMode.setDescription("card's Lane2 Loopback state")
mc40G_OEOLane3LoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("line-side-enable", 1), ("host-side-enable", 2), ("disable", 3), ("not-support", 4)))).setLabel("mc40G-OEOLane3LoopMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc40G_OEOLane3LoopMode.setStatus('current')
if mibBuilder.loadTexts: mc40G_OEOLane3LoopMode.setDescription("card's Lane3 Loopback state")
mc40G_OEOLane4LoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("line-side-enable", 1), ("host-side-enable", 2), ("disable", 3), ("not-support", 4)))).setLabel("mc40G-OEOLane4LoopMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc40G_OEOLane4LoopMode.setStatus('current')
if mibBuilder.loadTexts: mc40G_OEOLane4LoopMode.setDescription("card's Lane4 Loopback state")
mc40G_OEOLoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("all", 1), ("line-side-enable", 2), ("host-side-enable", 3), ("disable", 4), ("not-support", 5)))).setLabel("mc40G-OEOLoopMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc40G_OEOLoopMode.setStatus('current')
if mibBuilder.loadTexts: mc40G_OEOLoopMode.setDescription("card's Loopback state")
mc40G_OEOSpeedMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("no-card", 0), ("mc40GSpeed-1", 1), ("mc40GSpeed-2", 2), ("mc40GSpeed-3", 3), ("mc40GSpeed-4", 4), ("mc40GSpeed-5", 5), ("mc40GSpeed-6", 6), ("mc40GSpeed-7", 7), ("mc40GSpeed-8", 8), ("mc40GSpeed-9", 9), ("mc40GSpeed-10", 10), ("mc40GSpeed-11", 11), ("mc40GSpeed-12", 12), ("mc40GSpeed-13", 13), ("not-support", 14)))).setLabel("mc40G-OEOSpeedMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc40G_OEOSpeedMode.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOSpeedMode.setDescription('speed1: 1X40G: 10G LAN(10312.5Mbps) speed2: 1X40G: OTU3(10754.60325Mbps) speed3: 1X40G: OTU3e2(11145.83875Mbps) speed4: 4X10G: 10G LAN(10312.5Mbps) speed5: 4X10G: CPRI(9830.4 Mbps) speed6: 4X10G: OC-192/STM-64(9953.28Mbps) speed7: 4X10G: OC-192/STM-64(10664.228571427Mbps) speed8: 4X10G: OC-192/STM-64(10709.225316455Mbps) speed9: 4X10G: 10G Ethernet(11049.107142857Mbps) speed10: 4X10G: 10GFibreChannel(10518.750Mbps) speed11: 4X10G: 10GFibreChannel(11270.089285714Mbps) speed12: 4X10G: 10GFibreChannel(11317.642405063Mbps) speed13: 4X10G: 10GInfiniband(10000.00Mbps)')
mc40G_OEOHWLoopMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("line-side-enable", 1), ("host-side-enable", 2), ("disable", 3), ("not-support", 4)))).setLabel("mc40G-OEOHWLoopMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOHWLoopMode.setStatus('current')
if mibBuilder.loadTexts: mc40G_OEOHWLoopMode.setDescription("card's HW Loopback state")
mc40G_OEOHWSpeedMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 8, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=NamedValues(("no-card", 0), ("mc40GSpeed-1", 1), ("mc40GSpeed-2", 2), ("mc40GSpeed-3", 3), ("mc40GSpeed-4", 4), ("mc40GSpeed-5", 5), ("mc40GSpeed-6", 6), ("mc40GSpeed-7", 7), ("mc40GSpeed-8", 8), ("mc40GSpeed-9", 9), ("mc40GSpeed-10", 10), ("mc40GSpeed-11", 11), ("mc40GSpeed-12", 12), ("not-support", 13)))).setLabel("mc40G-OEOHWSpeedMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc40G_OEOHWSpeedMode.setStatus('mandatory')
if mibBuilder.loadTexts: mc40G_OEOHWSpeedMode.setDescription('speed1: 1X40G: 10G LAN(10312.5Mbps) speed2: 1X40G: OTU3(10754.60325Mbps) speed3: 1X40G: OTU3e2(11145.83875Mbps) speed4: 4X10G: 10G LAN(10312.5Mbps) speed5: 4X10G: CPRI(9830.4 Mbps) speed6: 4X10G: OC-192/STM-64(9953.28Mbps) speed7: 4X10G: OC-192/STM-64(10664.228571427Mbps) speed8: 4X10G: OC-192/STM-64(10709.225316455Mbps) speed9: 4X10G: 10G Ethernet(11049.107142857Mbps) speed10: 4X10G: 10GFibreChannel(10518.750Mbps) speed11: 4X10G: 10GFibreChannel(11270.089285714Mbps) speed12: 4X10G: 10GFibreChannel(11317.642405063Mbps)')
mcQsfpSpecificObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9))
mcNtwQSfpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1))
mcNtwQSfpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1), )
if mibBuilder.loadTexts: mcNtwQSfpTable.setStatus('current')
if mibBuilder.loadTexts: mcNtwQSfpTable.setDescription('MC Ntw QSFP table')
mcNtwQSfpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcNtwQSfpEntry.setStatus('current')
if mibBuilder.loadTexts: mcNtwQSfpEntry.setDescription('MC Ntw QSFP entry definition')
getNtwQSfpCmd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: getNtwQSfpCmd.setStatus('current')
if mibBuilder.loadTexts: getNtwQSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.')
qsfpNtwConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwConnector.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwConnector.setDescription('SFP connector type (one byte) 0x07: LC 0x0B: Optical Pigtail 0x0C: MPO 0x21: Copper Pigtail others: unsupported')
qsfpNtwTemperature = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwTemperature.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwTemperature.setDescription('SFP temperature (one type, signed)')
qsfpNtwTxPower1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwTxPower1.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwTxPower1.setDescription('SFP tx power (one type, signed)')
qsfpNtwTxPower2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwTxPower2.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwTxPower2.setDescription('SFP tx power (one type, signed)')
qsfpNtwTxPower3 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwTxPower3.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwTxPower3.setDescription('SFP tx power (one type, signed)')
qsfpNtwTxPower4 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwTxPower4.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwTxPower4.setDescription('SFP tx power (one type, signed)')
qsfpNtwRxPower1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwRxPower1.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwRxPower1.setDescription('SFP rx power (one type, signed)')
qsfpNtwRxPower2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwRxPower2.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwRxPower2.setDescription('SFP rx power (one type, signed)')
qsfpNtwRxPower3 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwRxPower3.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwRxPower3.setDescription('SFP rx power (one type, signed)')
qsfpNtwRxPower4 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 1, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpNtwRxPower4.setStatus('current')
if mibBuilder.loadTexts: qsfpNtwRxPower4.setDescription('SFP rx power (one type, signed)')
mcAccQSfpObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2))
mcAccQSfpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1), )
if mibBuilder.loadTexts: mcAccQSfpTable.setStatus('current')
if mibBuilder.loadTexts: mcAccQSfpTable.setDescription('MC Acc QSFP table')
mcAccQSfpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcAccQSfpEntry.setStatus('current')
if mibBuilder.loadTexts: mcAccQSfpEntry.setDescription('MC Acc QSFP entry definition')
getAccQSfpCmd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: getAccQSfpCmd.setStatus('current')
if mibBuilder.loadTexts: getAccQSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.')
qsfpAccConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccConnector.setStatus('current')
if mibBuilder.loadTexts: qsfpAccConnector.setDescription('SFP connector type (one byte) 0x07: LC 0x0B: Optical Pigtail 0x0C: MPO 0x21: Copper Pigtail others: unsupported')
qsfpAccTemperature = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccTemperature.setStatus('current')
if mibBuilder.loadTexts: qsfpAccTemperature.setDescription('SFP temperature (one type, signed)')
qsfpAccTxPower1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccTxPower1.setStatus('current')
if mibBuilder.loadTexts: qsfpAccTxPower1.setDescription('SFP tx power (one type, signed)')
qsfpAccTxPower2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccTxPower2.setStatus('current')
if mibBuilder.loadTexts: qsfpAccTxPower2.setDescription('SFP tx power (one type, signed)')
qsfpAccTxPower3 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccTxPower3.setStatus('current')
if mibBuilder.loadTexts: qsfpAccTxPower3.setDescription('SFP tx power (one type, signed)')
qsfpAccTxPower4 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccTxPower4.setStatus('current')
if mibBuilder.loadTexts: qsfpAccTxPower4.setDescription('SFP tx power (one type, signed)')
qsfpAccRxPower1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccRxPower1.setStatus('current')
if mibBuilder.loadTexts: qsfpAccRxPower1.setDescription('SFP rx power (one type, signed)')
qsfpAccRxPower2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccRxPower2.setStatus('current')
if mibBuilder.loadTexts: qsfpAccRxPower2.setDescription('SFP rx power (one type, signed)')
qsfpAccRxPower3 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccRxPower3.setStatus('current')
if mibBuilder.loadTexts: qsfpAccRxPower3.setDescription('SFP rx power (one type, signed)')
qsfpAccRxPower4 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 9, 2, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: qsfpAccRxPower4.setStatus('current')
if mibBuilder.loadTexts: qsfpAccRxPower4.setDescription('SFP rx power (one type, signed)')
mc2_5GMCObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10)).setLabel("mc2-5GMCObjects")
mc2_5GMCSFP3Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1)).setLabel("mc2-5GMCSFP3Objects")
mc2_5Cm1gSfpTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1), ).setLabel("mc2-5Cm1gSfpTable")
if mibBuilder.loadTexts: mc2_5Cm1gSfpTable.setStatus('current')
if mibBuilder.loadTexts: mc2_5Cm1gSfpTable.setDescription('MC 1G SFP table')
mc2_5Cm1gSfpEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1), ).setLabel("mc2-5Cm1gSfpEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcLoOrRmtFg"))
if mibBuilder.loadTexts: mc2_5Cm1gSfpEntry.setStatus('current')
if mibBuilder.loadTexts: mc2_5Cm1gSfpEntry.setDescription('MC 1G SFP entry definition')
mc2_5g_getSfpCmd = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setLabel("mc2-5g-getSfpCmd").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc2_5g_getSfpCmd.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_getSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.')
mc2_5g_sfpCompliance = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 2), Integer32()).setLabel("mc2-5g-sfpCompliance").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpCompliance.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpCompliance.setDescription('SFP compliance (one byte) if 0 then the attributs of sfpTemperature/sfpTranPower/sfpRecvPower should be ignored')
mc2_5g_sfpConnector = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 3), Integer32()).setLabel("mc2-5g-sfpConnector").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpConnector.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpConnector.setDescription('SFP connector type (one byte) 0x01: SC 0x07: LC 0x22: RJ45 others: unsupported')
mc2_5g_sfpTransCode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 4), Integer32()).setLabel("mc2-5g-sfpTransCode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpTransCode.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpTransCode.setDescription('SFP transceiver code (one byte) bit0: SingleMode bit2: MultiMode bit3: MultiMode others: unsupported')
mc2_5g_sfpSmLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 5), Integer32()).setLabel("mc2-5g-sfpSmLength").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpSmLength.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpSmLength.setDescription('SFP link length for SingleMode, units of km. (one byte) applicable only when sfpTransCode is SingleMode')
mc2_5g_sfpMmLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 6), Integer32()).setLabel("mc2-5g-sfpMmLength").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpMmLength.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpMmLength.setDescription('SFP link length for MultiMode, units of 10m (one byte) applicable only when sfpTransCode is MultiMode')
mc2_5g_sfpCopperLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 7), Integer32()).setLabel("mc2-5g-sfpCopperLength").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpCopperLength.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpCopperLength.setDescription('SFP link length for Copper, units of m (one byte) applicable only when sfpConnector is RJ45')
mc2_5g_sfpBrSpeed = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 8), Integer32()).setLabel("mc2-5g-sfpBrSpeed").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpBrSpeed.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpBrSpeed.setDescription('SFP nominal signalling rate, units of 100Mbit/s (one byte)')
mc2_5g_sfpWavelength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 9), Integer32()).setLabel("mc2-5g-sfpWavelength").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpWavelength.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpWavelength.setDescription('SFP laser wavelength (one word)')
mc2_5g_sfpTemperature = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 10), Integer32()).setLabel("mc2-5g-sfpTemperature").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpTemperature.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpTemperature.setDescription('SFP temperature (one type, signed)')
mc2_5g_sfpTranPower = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 11), Integer32()).setLabel("mc2-5g-sfpTranPower").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpTranPower.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpTranPower.setDescription('SFP tx power (one type, signed)')
mc2_5g_sfpRecvPower = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 12), Integer32()).setLabel("mc2-5g-sfpRecvPower").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpRecvPower.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpRecvPower.setDescription('SFP rx power (one type, signed)')
mc2_5g_sfpVoltage = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 1, 1, 1, 13), Integer32()).setLabel("mc2-5g-sfpVoltage").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5g_sfpVoltage.setStatus('current')
if mibBuilder.loadTexts: mc2_5g_sfpVoltage.setDescription('SFP voltage, units of 0.1mV (one word)')
mc2_5GMCCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2)).setLabel("mc2-5GMCCardObjects")
mc2_5GMCCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1), ).setLabel("mc2-5GMCCardTable")
if mibBuilder.loadTexts: mc2_5GMCCardTable.setStatus('current')
if mibBuilder.loadTexts: mc2_5GMCCardTable.setDescription('MC 2-5GMC Configuration table')
mc2_5GMCCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1, 1), ).setLabel("mc2-5GMCCardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc2_5GMCCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc2_5GMCCardEntry.setDescription('MC Configuration entry definition')
mc2_5GMCSfp3Exist = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("inserted", 1), ("removed", 2), ("na", 3), ("not-support", 4)))).setLabel("mc2-5GMCSfp3Exist").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5GMCSfp3Exist.setStatus('current')
if mibBuilder.loadTexts: mc2_5GMCSfp3Exist.setDescription("Center 1G card's SFP3 indication")
mc2_5GMCPort1link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc2-5GMCPort1link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5GMCPort1link.setStatus('current')
if mibBuilder.loadTexts: mc2_5GMCPort1link.setDescription("Center card's electrical port1's link status")
mc2_5GMCPort2link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc2-5GMCPort2link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5GMCPort2link.setStatus('current')
if mibBuilder.loadTexts: mc2_5GMCPort2link.setDescription("Center card's electrical port2's link status")
mc2_5GMCPort3link = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 10, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc2-5GMCPort3link").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc2_5GMCPort3link.setStatus('current')
if mibBuilder.loadTexts: mc2_5GMCPort3link.setDescription("Center card's electrical port3's link status")
mcE1Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11))
mcE1CardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1))
mcE1CardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1), )
if mibBuilder.loadTexts: mcE1CardTable.setStatus('current')
if mibBuilder.loadTexts: mcE1CardTable.setDescription('MC E1 + Eth Configuration table')
mcE1CardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcE1CardEntry.setStatus('current')
if mibBuilder.loadTexts: mcE1CardEntry.setDescription('MC Configuration entry definition')
mcE1Txlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1Txlink.setStatus('mandatory')
if mibBuilder.loadTexts: mcE1Txlink.setDescription("Center card's electrical port's link status")
mcE1TxCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1TxCurWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcE1TxCurWorkMode.setDescription("Center card's current work mode")
mcE1SFP1Link = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1SFP1Link.setStatus('mandatory')
if mibBuilder.loadTexts: mcE1SFP1Link.setDescription("Center card's SFP1 port's link status")
mcE1Port1LOS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1Port1LOS.setStatus('current')
if mibBuilder.loadTexts: mcE1Port1LOS.setDescription("card's E1 Port1 Los state")
mcE1Port1AIS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1Port1AIS.setStatus('current')
if mibBuilder.loadTexts: mcE1Port1AIS.setDescription("card's E1 Port1 AIS state")
mcE1Port1CV = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1Port1CV.setStatus('current')
if mibBuilder.loadTexts: mcE1Port1CV.setDescription("card's E1 Port1 CV state")
mcE1Port2LOS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1Port2LOS.setStatus('current')
if mibBuilder.loadTexts: mcE1Port2LOS.setDescription("card's E1 Port2 Los state")
mcE1Port2AIS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1Port2AIS.setStatus('current')
if mibBuilder.loadTexts: mcE1Port2AIS.setDescription("card's E1 Port2 AIS state")
mcE1Port2CV = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1Port2CV.setStatus('current')
if mibBuilder.loadTexts: mcE1Port2CV.setDescription("card's E1 Port2 CV state")
mcE1Port1Loop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("external", 1), ("internal", 2), ("disabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcE1Port1Loop.setStatus('current')
if mibBuilder.loadTexts: mcE1Port1Loop.setDescription("card's Port1 Loopback state")
mcE1Port2Loop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("external", 1), ("internal", 2), ("disabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcE1Port2Loop.setStatus('current')
if mibBuilder.loadTexts: mcE1Port2Loop.setDescription("card's Port2 Loopback state")
mcRmtE1Txlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1Txlink.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmtE1Txlink.setDescription("Remote card's electrical port's link status")
mcRmtE1TxCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1TxCurWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmtE1TxCurWorkMode.setDescription("Remote card's current work mode")
mcRmtE1SFP1Link = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1SFP1Link.setStatus('mandatory')
if mibBuilder.loadTexts: mcRmtE1SFP1Link.setDescription("Remote card's SFP1 port's link status")
mcRmtE1Port1LOS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1Port1LOS.setStatus('current')
if mibBuilder.loadTexts: mcRmtE1Port1LOS.setDescription("Remote card's E1 Port1 Los state")
mcRmtE1Port1AIS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1Port1AIS.setStatus('current')
if mibBuilder.loadTexts: mcRmtE1Port1AIS.setDescription("Remote card's E1 Port1 AIS state")
mcRmtE1Port1CV = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1Port1CV.setStatus('current')
if mibBuilder.loadTexts: mcRmtE1Port1CV.setDescription("Remote card's E1 Port1 CV state")
mcRmtE1Port2LOS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1Port2LOS.setStatus('current')
if mibBuilder.loadTexts: mcRmtE1Port2LOS.setDescription("Remote card's E1 Port2 Los state")
mcRmtE1Port2AIS = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1Port2AIS.setStatus('current')
if mibBuilder.loadTexts: mcRmtE1Port2AIS.setDescription("Remote card's E1 Port2 AIS state")
mcRmtE1Port2CV = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("alarm", 1), ("e1normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRmtE1Port2CV.setStatus('current')
if mibBuilder.loadTexts: mcRmtE1Port2CV.setDescription("Remote card's E1 Port2 CV state")
mcRmtE1Port1Loop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("external", 1), ("internal", 2), ("disabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmtE1Port1Loop.setStatus('current')
if mibBuilder.loadTexts: mcRmtE1Port1Loop.setDescription("Remote card's Port1 Loopback state")
mcRmtE1Port2Loop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 11, 1, 1, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("external", 1), ("internal", 2), ("disabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcRmtE1Port2Loop.setStatus('current')
if mibBuilder.loadTexts: mcRmtE1Port2Loop.setDescription("Remote card's Port2 Loopback state")
mc1GE2OObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12))
mc1GE2OCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1))
mc1GE2OCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1), )
if mibBuilder.loadTexts: mc1GE2OCardTable.setStatus('current')
if mibBuilder.loadTexts: mc1GE2OCardTable.setDescription('MC E2O Fiber backup Configuration table')
mc1GE2OCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc1GE2OCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc1GE2OCardEntry.setDescription('MC Configuration entry definition')
mc1GE2OPort1SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2OPort1SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GE2OPort1SFPlink.setDescription("Center card's port1 SFP's link status")
mc1GE2OPort2SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2OPort2SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GE2OPort2SFPlink.setDescription("Center card's port2 SFP's link status")
mc1GE2OTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2OTxlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GE2OTxlink.setDescription("Center card's electrical port's link status")
mc1GE2OPortPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc1GE2OPortPri.setStatus('current')
if mibBuilder.loadTexts: mc1GE2OPortPri.setDescription("Center card's Port Pri state")
mc1GE2OPort1SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2OPort1SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GE2OPort1SFPExist.setDescription('E2O Port1 SFP indication')
mc1GE2OPort2SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2OPort2SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GE2OPort2SFPExist.setDescription('E2O Port2 SFP indication')
mc1GE2OPortHWPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2OPortHWPri.setStatus('current')
if mibBuilder.loadTexts: mc1GE2OPortHWPri.setDescription("Center card's Port Hardward Pri state")
mc1GE2ORmtPort1SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2ORmtPort1SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GE2ORmtPort1SFPlink.setDescription("Remote card's port1 SFP's link status")
mc1GE2ORmtPort2SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2ORmtPort2SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GE2ORmtPort2SFPlink.setDescription("Remote card's port2 SFP's link status")
mc1GE2ORmtTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2ORmtTxlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GE2ORmtTxlink.setDescription("Remote card's electrical port's link status")
mc1GE2ORmtPort1SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2ORmtPort1SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GE2ORmtPort1SFPExist.setDescription('E2O Port1 SFP indication')
mc1GE2ORmtPort2SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2ORmtPort2SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GE2ORmtPort2SFPExist.setDescription('E2O Port2 SFP indication')
mc1GE2ORmtPortHWPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 12, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GE2ORmtPortHWPri.setStatus('current')
if mibBuilder.loadTexts: mc1GE2ORmtPortHWPri.setDescription("Remote card's Port Hardward Pri state")
mc1GO2OObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13))
mc1GO2OCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1))
mc1GO2OCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1), )
if mibBuilder.loadTexts: mc1GO2OCardTable.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OCardTable.setDescription('MC O2O Fiber backup Configuration table')
mc1GO2OCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc1GO2OCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OCardEntry.setDescription('MC Configuration entry definition')
mc1GO2OPort1SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2OPort1SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GO2OPort1SFPlink.setDescription("Center card's port1 SFP's link status")
mc1GO2OPort2SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2OPort2SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GO2OPort2SFPlink.setDescription("Center card's port2 SFP's link status")
mc1GO2OPort3SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2OPort3SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GO2OPort3SFPlink.setDescription("Center card's port3 SFP's link status")
mc1GO2OPortPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc1GO2OPortPri.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OPortPri.setDescription("Center card's Port Pri state")
mc1GO2OPort1SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2OPort1SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OPort1SFPExist.setDescription('O2O Port1 SFP indication')
mc1GO2OPort2SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2OPort2SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OPort2SFPExist.setDescription('O2O Port2 SFP indication')
mc1GO2OPort3SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("inserted", 1), ("removed", 2), ("na", 3), ("not-support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2OPort3SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OPort3SFPExist.setDescription('O2O Port3 SFP indication')
mc1GO2OPortHWPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2OPortHWPri.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OPortHWPri.setDescription("Local card's Port Hardward Pri state")
mc1GO2OPort3HWSpd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("M100", 1), ("M1000", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2OPort3HWSpd.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OPort3HWSpd.setDescription("Local card's Port3 Hardward Speed state")
mc1GO2ORmtPort1SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2ORmtPort1SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GO2ORmtPort1SFPlink.setDescription("Remote card's port1 SFP's link status")
mc1GO2ORmtPort2SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2ORmtPort2SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GO2ORmtPort2SFPlink.setDescription("Remote card's port2 SFP's link status")
mc1GO2ORmtPort3SFPlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2ORmtPort3SFPlink.setStatus('mandatory')
if mibBuilder.loadTexts: mc1GO2ORmtPort3SFPlink.setDescription("Remote card's port3 SFP's link status")
mc1GO2ORmtPort1SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2ORmtPort1SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GO2ORmtPort1SFPExist.setDescription('O2O Port1 SFP indication')
mc1GO2ORmtPort2SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("no-card", 0), ("inserted", 1), ("removed", 2), ("na", 3), ("support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2ORmtPort2SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GO2ORmtPort2SFPExist.setDescription('O2O Port2 SFP indication')
mc1GO2ORmtPort3SFPExist = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("inserted", 1), ("removed", 2), ("na", 3), ("not-support", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2ORmtPort3SFPExist.setStatus('current')
if mibBuilder.loadTexts: mc1GO2ORmtPort3SFPExist.setDescription("Remote card's SFP3 indication")
mc1GO2ORmtPortHWPri = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Port1", 1), ("Port2", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2ORmtPortHWPri.setStatus('current')
if mibBuilder.loadTexts: mc1GO2ORmtPortHWPri.setDescription("Remot card's Port Hardward Pri state")
mc1GO2ORmtPort3HWSpd = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("M100", 1), ("M1000", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1GO2ORmtPort3HWSpd.setStatus('current')
if mibBuilder.loadTexts: mc1GO2ORmtPort3HWSpd.setDescription("Remot card's Port3 Hardward Speed state")
mc1GO2OSFP3Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2))
mc1GO2OSfp3Table = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1), )
if mibBuilder.loadTexts: mc1GO2OSfp3Table.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OSfp3Table.setDescription('MC 1G SFP table')
mc1GO2OSfp3Entry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcLoOrRmtFg"))
if mibBuilder.loadTexts: mc1GO2OSfp3Entry.setStatus('current')
if mibBuilder.loadTexts: mc1GO2OSfp3Entry.setDescription('MC 1G O2O SFP3 entry definition')
mc1go2o_getSfpCmd = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("na", 0), ("local", 1), ("remote", 2)))).setLabel("mc1go2o-getSfpCmd").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc1go2o_getSfpCmd.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_getSfpCmd.setDescription('This command will get the updated sfp information. Please send this command prior to getting the following params, otherwise the history sfp information will be sent back.')
mc1go2o_sfpCompliance = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 2), Integer32()).setLabel("mc1go2o-sfpCompliance").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpCompliance.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpCompliance.setDescription('SFP compliance (one byte) if 0 then the attributs of sfpTemperature/sfpTranPower/sfpRecvPower should be ignored')
mc1go2o_sfpConnector = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 3), Integer32()).setLabel("mc1go2o-sfpConnector").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpConnector.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpConnector.setDescription('SFP connector type (one byte) 0x01: SC 0x07: LC 0x22: RJ45 others: unsupported')
mc1go2o_sfpTransCode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 4), Integer32()).setLabel("mc1go2o-sfpTransCode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpTransCode.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpTransCode.setDescription('SFP transceiver code (one byte) bit0: SingleMode bit2: MultiMode bit3: MultiMode others: unsupported')
mc1go2o_sfpSmLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 5), Integer32()).setLabel("mc1go2o-sfpSmLength").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpSmLength.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpSmLength.setDescription('SFP link length for SingleMode, units of km. (one byte) applicable only when sfpTransCode is SingleMode')
mc1go2o_sfpMmLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 6), Integer32()).setLabel("mc1go2o-sfpMmLength").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpMmLength.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpMmLength.setDescription('SFP link length for MultiMode, units of 10m (one byte) applicable only when sfpTransCode is MultiMode')
mc1go2o_sfpCopperLength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 7), Integer32()).setLabel("mc1go2o-sfpCopperLength").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpCopperLength.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpCopperLength.setDescription('SFP link length for Copper, units of m (one byte) applicable only when sfpConnector is RJ45')
mc1go2o_sfpBrSpeed = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 8), Integer32()).setLabel("mc1go2o-sfpBrSpeed").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpBrSpeed.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpBrSpeed.setDescription('SFP nominal signalling rate, units of 100Mbit/s (one byte)')
mc1go2o_sfpWavelength = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 9), Integer32()).setLabel("mc1go2o-sfpWavelength").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpWavelength.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpWavelength.setDescription('SFP laser wavelength (one word)')
mc1go2o_sfpTemperature = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 10), Integer32()).setLabel("mc1go2o-sfpTemperature").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpTemperature.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpTemperature.setDescription('SFP temperature (one type, signed)')
mc1go2o_sfpTranPower = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 11), Integer32()).setLabel("mc1go2o-sfpTranPower").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpTranPower.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpTranPower.setDescription('SFP tx power (one type, signed)')
mc1go2o_sfpRecvPower = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 12), Integer32()).setLabel("mc1go2o-sfpRecvPower").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpRecvPower.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpRecvPower.setDescription('SFP rx power (one type, signed)')
mc1go2o_sfpVoltage = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 13, 2, 1, 1, 13), Integer32()).setLabel("mc1go2o-sfpVoltage").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc1go2o_sfpVoltage.setStatus('current')
if mibBuilder.loadTexts: mc1go2o_sfpVoltage.setDescription('SFP voltage, units of 0.1mV (one word)')
mc10GOEO1RObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14))
mc10GOEO1RCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1))
mc10GOEO1RCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1), )
if mibBuilder.loadTexts: mc10GOEO1RCardTable.setStatus('current')
if mibBuilder.loadTexts: mc10GOEO1RCardTable.setDescription('MC 10G OEO 1R Configuration table')
mc10GOEO1RCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc10GOEO1RCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc10GOEO1RCardEntry.setDescription('MC Configuration entry definition')
mcAccXFP1WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcAccXFP1WaveLengthTunability.setStatus('mandatory')
if mibBuilder.loadTexts: mcAccXFP1WaveLengthTunability.setDescription("XFP1's wavelength tunability")
mcAccXFP1WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcAccXFP1WaveLengthTunable.setStatus('mandatory')
if mibBuilder.loadTexts: mcAccXFP1WaveLengthTunable.setDescription("XFP1's wavelength tunable status")
mcAccXFP1WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcAccXFP1WaveLength.setStatus('mandatory')
if mibBuilder.loadTexts: mcAccXFP1WaveLength.setDescription("XFP1's wavelength")
mcNtwXFP2WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcNtwXFP2WaveLengthTunability.setStatus('mandatory')
if mibBuilder.loadTexts: mcNtwXFP2WaveLengthTunability.setDescription("XFP2's wavelength tunability")
mcNtwXFP2WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcNtwXFP2WaveLengthTunable.setStatus('mandatory')
if mibBuilder.loadTexts: mcNtwXFP2WaveLengthTunable.setDescription("XFP2's wavelength tunable status")
mcNtwXFP2WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcNtwXFP2WaveLength.setStatus('mandatory')
if mibBuilder.loadTexts: mcNtwXFP2WaveLength.setDescription("XFP2's wavelength")
mcAccXFP1TunableType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcAccXFP1TunableType.setStatus('mandatory')
if mibBuilder.loadTexts: mcAccXFP1TunableType.setDescription("XFP1's wavelength tunable type")
mcNtwXFP2TunableType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 14, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcNtwXFP2TunableType.setStatus('mandatory')
if mibBuilder.loadTexts: mcNtwXFP2TunableType.setDescription("XFP2's wavelength tunable type")
mc10GOEO3RObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15))
mc10GOEO3RCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1))
mc10GOEO3RCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1), )
if mibBuilder.loadTexts: mc10GOEO3RCardTable.setStatus('current')
if mibBuilder.loadTexts: mc10GOEO3RCardTable.setDescription('MC 10G OEO 3R tunable wavelength Configuration table')
mc10GOEO3RCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc10GOEO3RCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc10GOEO3RCardEntry.setDescription('MC Configuration entry definition')
accXFP1WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: accXFP1WaveLengthTunability.setStatus('mandatory')
if mibBuilder.loadTexts: accXFP1WaveLengthTunability.setDescription("XFP1's wavelength tunability")
accXFP1WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: accXFP1WaveLengthTunable.setStatus('mandatory')
if mibBuilder.loadTexts: accXFP1WaveLengthTunable.setDescription("XFP1's wavelength tunable status")
accXFP1WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: accXFP1WaveLength.setStatus('mandatory')
if mibBuilder.loadTexts: accXFP1WaveLength.setDescription("XFP1's wavelength")
ntwXFP2WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntwXFP2WaveLengthTunability.setStatus('mandatory')
if mibBuilder.loadTexts: ntwXFP2WaveLengthTunability.setDescription("XFP2's wavelength tunability")
ntwXFP2WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntwXFP2WaveLengthTunable.setStatus('mandatory')
if mibBuilder.loadTexts: ntwXFP2WaveLengthTunable.setDescription("XFP2's wavelength tunable status")
ntwXFP2WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ntwXFP2WaveLength.setStatus('mandatory')
if mibBuilder.loadTexts: ntwXFP2WaveLength.setDescription("XFP2's wavelength")
accXFP1TunableType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: accXFP1TunableType.setStatus('mandatory')
if mibBuilder.loadTexts: accXFP1TunableType.setDescription("XFP1's wavelength tunable type")
ntwXFP2TunableType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 15, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntwXFP2TunableType.setStatus('mandatory')
if mibBuilder.loadTexts: ntwXFP2TunableType.setDescription("XFP2's wavelength tunable type")
mcCWDMObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16))
mcCWDMCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1))
mcCWDMCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1), )
if mibBuilder.loadTexts: mcCWDMCardTable.setStatus('current')
if mibBuilder.loadTexts: mcCWDMCardTable.setDescription('MC CWDM table')
mcCWDMCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcCWDMCardEntry.setStatus('current')
if mibBuilder.loadTexts: mcCWDMCardEntry.setDescription('MC Configuration entry definition')
cwdmWavelengthCount = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelengthCount.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelengthCount.setDescription('wavelength count')
cwdmWavelength1 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelength1.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelength1.setDescription('CWDM Card wavelenth 1')
cwdmWavelength2 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelength2.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelength2.setDescription('CWDM Card wavelenth 2')
cwdmWavelength3 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelength3.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelength3.setDescription('CWDM Card wavelenth 3')
cwdmWavelength4 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelength4.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelength4.setDescription('CWDM Card wavelenth 4')
cwdmWavelength5 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelength5.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelength5.setDescription('CWDM Card wavelenth 5')
cwdmWavelength6 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelength6.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelength6.setDescription('CWDM Card wavelenth 6')
cwdmWavelength7 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelength7.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelength7.setDescription('CWDM Card wavelenth 7')
cwdmWavelength8 = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 16, 1, 1, 1, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cwdmWavelength8.setStatus('mandatory')
if mibBuilder.loadTexts: cwdmWavelength8.setDescription('CWDM Card wavelenth 8')
mc10G_OEO2RObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17)).setLabel("mc10G-OEO2RObjects")
mc10G_OEO2RCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1)).setLabel("mc10G-OEO2RCardObjects")
mc10G_OEO2RCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1), ).setLabel("mc10G-OEO2RCardTable")
if mibBuilder.loadTexts: mc10G_OEO2RCardTable.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RCardTable.setDescription('MC 10G OEO 2R Configuration table')
mc10G_OEO2RCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1), ).setLabel("mc10G-OEO2RCardEntry").setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc10G_OEO2RCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RCardEntry.setDescription('MC Configuration entry definition')
mc10G_OEO2RCurSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("Speed85", 1), ("Speed103to117", 2), ("Speed995to113", 3), ("not-support", 4)))).setLabel("mc10G-OEO2RCurSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2RCurSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEO2RCurSpdMode.setDescription("Center card's current speed mode 1: 8.5G 2: 10.3G-11.7G 3: 9.95G-11.3G ")
mc10G_OEO2RCfgSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("Speed85", 1), ("Speed103to117", 2), ("Speed995to113", 3), ("not-support", 4)))).setLabel("mc10G-OEO2RCfgSpdMode").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEO2RCfgSpdMode.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEO2RCfgSpdMode.setDescription("Center card's current speed mode 1: 8.5G 2: 10.3G-11.7G 3: 9.95G-11.3G ")
mc10G_OEO2RSFP1Loopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RSFP1Loopback").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEO2RSFP1Loopback.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RSFP1Loopback.setDescription("card's SFP1 Loopback state")
mc10G_OEO2RSFP2Loopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RSFP2Loopback").setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10G_OEO2RSFP2Loopback.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RSFP2Loopback.setDescription("card's SFP2 Loopback state")
mc10G_OEO2RSFP1 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RSFP1").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2RSFP1.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RSFP1.setDescription("Center card's SFP1 link status")
mc10G_OEO2RSFP2 = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RSFP2").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2RSFP2.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RSFP2.setDescription("Center card's SFP2 link status")
mc10G_OEO2RHWSpdMode = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("Speed85", 1), ("Speed103to117", 2), ("Speed995to113", 3), ("not-support", 4)))).setLabel("mc10G-OEO2RHWSpdMode").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2RHWSpdMode.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RHWSpdMode.setDescription("Center card's current speed mode 1: 8.5G 2: 10.3G-11.7G 3: 9.95G-11.3G ")
mc10G_OEO2RHWSFP1Loopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RHWSFP1Loopback").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2RHWSFP1Loopback.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RHWSFP1Loopback.setDescription("card's HW Loopback state")
mc10G_OEO2RHWSFP2Loopback = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setLabel("mc10G-OEO2RHWSFP2Loopback").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2RHWSFP2Loopback.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2RHWSFP2Loopback.setDescription("card's HW Loopback state")
mc10G_OEO2RVersion = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 10), DisplayString()).setLabel("mc10G-OEO2RVersion").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2RVersion.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEO2RVersion.setDescription('MC version')
mc10GXFP1WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10GXFP1WaveLengthTunability.setStatus('mandatory')
if mibBuilder.loadTexts: mc10GXFP1WaveLengthTunability.setDescription("XFP1's wavelength tunability")
mc10GXFP1WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10GXFP1WaveLengthTunable.setStatus('mandatory')
if mibBuilder.loadTexts: mc10GXFP1WaveLengthTunable.setDescription("XFP1's wavelength tunable status")
mc10GXFP1WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 13), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10GXFP1WaveLength.setStatus('mandatory')
if mibBuilder.loadTexts: mc10GXFP1WaveLength.setDescription("XFP1's wavelength")
mc10GXFP2WaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10GXFP2WaveLengthTunability.setStatus('mandatory')
if mibBuilder.loadTexts: mc10GXFP2WaveLengthTunability.setDescription("XFP2's wavelength tunability")
mc10GXFP2WaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10GXFP2WaveLengthTunable.setStatus('mandatory')
if mibBuilder.loadTexts: mc10GXFP2WaveLengthTunable.setDescription("XFP2's wavelength tunable status")
mc10GXFP2WaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 16), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mc10GXFP2WaveLength.setStatus('mandatory')
if mibBuilder.loadTexts: mc10GXFP2WaveLength.setDescription("XFP2's wavelength")
mc10G_OEO2R_accType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mc10G-OEO2R-accType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2R_accType.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2R_accType.setDescription('')
mc10G_OEO2R_ntwType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("XFP", 1), ("SFP", 2), ("unknow", 3)))).setLabel("mc10G-OEO2R-ntwType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2R_ntwType.setStatus('current')
if mibBuilder.loadTexts: mc10G_OEO2R_ntwType.setDescription('')
mc10G_OEO2R_accTunableType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setLabel("mc10G-OEO2R-accTunableType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2R_accTunableType.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEO2R_accTunableType.setDescription("XFP1's wavelength tunable type")
mc10G_OEO2R_ntwTunableType = MibScalar((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 17, 1, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setLabel("mc10G-OEO2R-ntwTunableType").setMaxAccess("readonly")
if mibBuilder.loadTexts: mc10G_OEO2R_ntwTunableType.setStatus('mandatory')
if mibBuilder.loadTexts: mc10G_OEO2R_ntwTunableType.setDescription("XFP2's wavelength tunable type")
mcQCA8334Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18))
mcQCA8334CardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 1))
mcQCA8334CardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 1, 1), )
if mibBuilder.loadTexts: mcQCA8334CardTable.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334CardTable.setDescription('MC QCA8334 Configuration table')
mcQCA8334CardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcQCA8334CardEntry.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334CardEntry.setDescription('MC Configuration entry definition')
mcQCA8334VlanMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("Normal", 1), ("mode1", 2), ("mode2", 3), ("not-support", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcQCA8334VlanMode.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334VlanMode.setDescription("Center card's vlan mode")
mcQCA8334PortObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2))
mcQCA8334PortTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1), )
if mibBuilder.loadTexts: mcQCA8334PortTable.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334PortTable.setDescription('MC QCA8334 Configuration table')
mcQCA8334PortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"), (0, "XXX-MIB", "mcQCA8334PortIdx"))
if mibBuilder.loadTexts: mcQCA8334PortEntry.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334PortEntry.setDescription('MC Configuration entry definition')
mcQCA8334PortIdx = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("port1", 1), ("port2", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcQCA8334PortIdx.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334PortIdx.setDescription('Port index')
mcQCA8334CurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcQCA8334CurWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcQCA8334CurWorkMode.setDescription("Center card's port current work mode")
mcQCA8334CfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcQCA8334CfgWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcQCA8334CfgWorkMode.setDescription("Center card's port configurable work mode")
mcQCA8334UpStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 4), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(32, 1000000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcQCA8334UpStream.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334UpStream.setDescription("Center card's port up stream of MC")
mcQCA8334DownStream = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 5), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(32, 1000000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcQCA8334DownStream.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334DownStream.setDescription("Center card's port down stream of MC")
mcQCA8334Txlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcQCA8334Txlink.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334Txlink.setDescription("Center card's port 1 electrical port's link status")
mcQCA8334RmtCurWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcQCA8334RmtCurWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcQCA8334RmtCurWorkMode.setDescription("Remote card's port 1 current work mode")
mcQCA8334RmtCfgWorkMode = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("no-card", 0), ("mAuto", 1), ("m100-full", 2), ("m100-half", 3), ("m10-full", 4), ("m10-half", 5), ("m1G-full", 6), ("not-support", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcQCA8334RmtCfgWorkMode.setStatus('mandatory')
if mibBuilder.loadTexts: mcQCA8334RmtCfgWorkMode.setDescription("Remote card's port1 configurable work mode")
mcQCA8334RmtTxlink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 18, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("no-card", 0), ("up", 1), ("down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcQCA8334RmtTxlink.setStatus('current')
if mibBuilder.loadTexts: mcQCA8334RmtTxlink.setDescription("Remote card's port electrial port status")
mcE1T1Objects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19))
mcE1T1CardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1))
mcE1T1CardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1), )
if mibBuilder.loadTexts: mcE1T1CardTable.setStatus('current')
if mibBuilder.loadTexts: mcE1T1CardTable.setDescription('MC E1T1 Configuration table')
mcE1T1CardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcE1T1CardEntry.setStatus('current')
if mibBuilder.loadTexts: mcE1T1CardEntry.setDescription('MC Configuration entry definition')
mcE1T1Type = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("E1", 1), ("T1", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1Type.setStatus('mandatory')
if mibBuilder.loadTexts: mcE1T1Type.setDescription("Center card's current type")
mcE1T1FLink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Up", 1), ("Down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1FLink.setStatus('mandatory')
if mibBuilder.loadTexts: mcE1T1FLink.setDescription("Center card's current link")
mcE1T1FLossAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1FLossAlarm.setStatus('current')
if mibBuilder.loadTexts: mcE1T1FLossAlarm.setDescription('')
mcE1T1TLossAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1TLossAlarm.setStatus('current')
if mibBuilder.loadTexts: mcE1T1TLossAlarm.setDescription('')
mcE1T1AISAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1AISAlarm.setStatus('current')
if mibBuilder.loadTexts: mcE1T1AISAlarm.setDescription('')
mcE1T1TLoop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcE1T1TLoop.setStatus('current')
if mibBuilder.loadTexts: mcE1T1TLoop.setDescription('Tx Loopback state')
mcE1T1FLoop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcE1T1FLoop.setStatus('current')
if mibBuilder.loadTexts: mcE1T1FLoop.setDescription('Fx Loopback state')
mcE1T1CodeType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("E1-HDB3-Or-T1-B8ZS", 1), ("AMI", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcE1T1CodeType.setStatus('current')
if mibBuilder.loadTexts: mcE1T1CodeType.setDescription('coding type')
mcE1T1Version = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1Version.setStatus('mandatory')
if mibBuilder.loadTexts: mcE1T1Version.setDescription('MC version')
mcE1T1RmtFLink = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Up", 1), ("Down", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1RmtFLink.setStatus('mandatory')
if mibBuilder.loadTexts: mcE1T1RmtFLink.setDescription("Center card's current link")
mcE1T1RmtFLossAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1RmtFLossAlarm.setStatus('current')
if mibBuilder.loadTexts: mcE1T1RmtFLossAlarm.setDescription('')
mcE1T1RmtTLossAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1RmtTLossAlarm.setStatus('current')
if mibBuilder.loadTexts: mcE1T1RmtTLossAlarm.setDescription('')
mcE1T1RmtAISAlarm = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Alarm", 1), ("Normal", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcE1T1RmtAISAlarm.setStatus('current')
if mibBuilder.loadTexts: mcE1T1RmtAISAlarm.setDescription('')
mcE1T1RmtTLoop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcE1T1RmtTLoop.setStatus('current')
if mibBuilder.loadTexts: mcE1T1RmtTLoop.setDescription('Tx Loopback state')
mcE1T1RmtFLoop = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcE1T1RmtFLoop.setStatus('current')
if mibBuilder.loadTexts: mcE1T1RmtFLoop.setDescription('Fx Loopback state')
mcE1T1RmtCodeType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 19, 1, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("E1-HDB3-Or-T1-B8ZS", 1), ("AMI", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcE1T1RmtCodeType.setStatus('current')
if mibBuilder.loadTexts: mcE1T1RmtCodeType.setDescription('coding type')
mc10GOEEXFPTunableObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20))
mc10GOEEXFPTunableCardObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1))
mc10GOEEXFPTunableCardTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1), )
if mibBuilder.loadTexts: mc10GOEEXFPTunableCardTable.setStatus('current')
if mibBuilder.loadTexts: mc10GOEEXFPTunableCardTable.setDescription('MC 10G OEE tunable wavelength Configuration table')
mc10GOEEXFPTunableCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mc10GOEEXFPTunableCardEntry.setStatus('current')
if mibBuilder.loadTexts: mc10GOEEXFPTunableCardEntry.setDescription('MC Configuration entry definition')
xfpWaveLengthTunability = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Supported", 1), ("Unsupported", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: xfpWaveLengthTunability.setStatus('mandatory')
if mibBuilder.loadTexts: xfpWaveLengthTunability.setDescription("XFP's wavelength tunability")
xfpWaveLengthTunable = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("Doing", 1), ("Completed", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: xfpWaveLengthTunable.setStatus('mandatory')
if mibBuilder.loadTexts: xfpWaveLengthTunable.setDescription("XFP's wavelength tunable status")
xfpWaveLength = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: xfpWaveLength.setStatus('mandatory')
if mibBuilder.loadTexts: xfpWaveLength.setDescription("XFP's wavelength")
xfpTunableType = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 2, 20, 1, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("channel", 1), ("wavelength", 2), ("not-support", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: xfpTunableType.setStatus('mandatory')
if mibBuilder.loadTexts: xfpTunableType.setDescription("XFP's wavelength tunable type")
mcPmObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3))
mcPmTable = MibTable((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1), )
if mibBuilder.loadTexts: mcPmTable.setStatus('current')
if mibBuilder.loadTexts: mcPmTable.setDescription('MC Performance table')
mcPmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1), ).setIndexNames((0, "XXX-MIB", "mcShelfIdx"), (0, "XXX-MIB", "mcCardIdx"))
if mibBuilder.loadTexts: mcPmEntry.setStatus('current')
if mibBuilder.loadTexts: mcPmEntry.setDescription('MC Performance entry definition')
mcRxByteHi = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRxByteHi.setStatus('current')
if mibBuilder.loadTexts: mcRxByteHi.setDescription('The total number of reveive bytes (high)')
mcRxByteLo = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcRxByteLo.setStatus('current')
if mibBuilder.loadTexts: mcRxByteLo.setDescription('The total number of reveive bytes (low)')
mcTxByteHi = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcTxByteHi.setStatus('current')
if mibBuilder.loadTexts: mcTxByteHi.setDescription('The total number of transmit bytes (high)')
mcTxByteLo = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mcTxByteLo.setStatus('current')
if mibBuilder.loadTexts: mcTxByteLo.setDescription('The total number of transmit bytes (low)')
mcPmRest = MibTableColumn((1, 3, 6, 1, 4, 1, 6688, 1, 1, 1, 4, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("idle", 1), ("reset", 2), ("not-support", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mcPmRest.setStatus('current')
if mibBuilder.loadTexts: mcPmRest.setDescription('reset counter')
shelf_Detected = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 1)).setLabel("shelf-Detected").setObjects(("XXX-MIB", "shelfIdx"))
if mibBuilder.loadTexts: shelf_Detected.setStatus('current')
if mibBuilder.loadTexts: shelf_Detected.setDescription('A slave shelf is detected (1~19)')
shelf_Lost = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 2)).setLabel("shelf-Lost").setObjects(("XXX-MIB", "shelfIdx"))
if mibBuilder.loadTexts: shelf_Lost.setStatus('current')
if mibBuilder.loadTexts: shelf_Lost.setDescription('A shelf is lost')
shelf_psuA_On = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 3)).setLabel("shelf-psuA-On").setObjects(("XXX-MIB", "shelfIdx"))
if mibBuilder.loadTexts: shelf_psuA_On.setStatus('current')
if mibBuilder.loadTexts: shelf_psuA_On.setDescription('PSU A is detected')
shelf_psuA_Off = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 4)).setLabel("shelf-psuA-Off").setObjects(("XXX-MIB", "shelfIdx"))
if mibBuilder.loadTexts: shelf_psuA_Off.setStatus('current')
if mibBuilder.loadTexts: shelf_psuA_Off.setDescription('PSU A is lost')
shelf_psuB_On = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 5)).setLabel("shelf-psuB-On").setObjects(("XXX-MIB", "shelfIdx"))
if mibBuilder.loadTexts: shelf_psuB_On.setStatus('current')
if mibBuilder.loadTexts: shelf_psuB_On.setDescription('PSU B is detected')
shelf_psuB_Off = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 6)).setLabel("shelf-psuB-Off").setObjects(("XXX-MIB", "shelfIdx"))
if mibBuilder.loadTexts: shelf_psuB_Off.setStatus('current')
if mibBuilder.loadTexts: shelf_psuB_Off.setDescription('PSU B is lost')
shelf_fan_On = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 7)).setLabel("shelf-fan-On").setObjects(("XXX-MIB", "shelfIdx"))
if mibBuilder.loadTexts: shelf_fan_On.setStatus('current')
if mibBuilder.loadTexts: shelf_fan_On.setDescription('Fan A is detected')
shelf_fan_Off = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 8)).setLabel("shelf-fan-Off").setObjects(("XXX-MIB", "shelfIdx"))
if mibBuilder.loadTexts: shelf_fan_Off.setStatus('current')
if mibBuilder.loadTexts: shelf_fan_Off.setDescription('Fan A is lost')
card_Detected = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 20)).setLabel("card-Detected").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_Detected.setStatus('current')
if mibBuilder.loadTexts: card_Detected.setDescription('A card is detected (20~29)')
card_Lost = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 21)).setLabel("card-Lost").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_Lost.setStatus('current')
if mibBuilder.loadTexts: card_Lost.setDescription('A card is lost')
card_MC_Co_Tx_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 30)).setLabel("card-MC-Co-Tx-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Tx_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Tx_Up.setDescription('The tx link of mc in center side is up (above 30)')
card_MC_Co_Tx_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 31)).setLabel("card-MC-Co-Tx-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Tx_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Tx_Down.setDescription('The tx link of mc in center side is broken')
card_MC_Co_Fx_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 32)).setLabel("card-MC-Co-Fx-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Fx_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Fx_Up.setDescription('The fx link of mc in center side is up')
card_MC_Co_Fx_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 33)).setLabel("card-MC-Co-Fx-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Fx_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Fx_Down.setDescription('The fx link of mc in center side is broken')
card_MC_Rmt_Tx_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 34)).setLabel("card-MC-Rmt-Tx-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up.setDescription('The tx link of mc in customer side is up')
card_MC_Rmt_Tx_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 35)).setLabel("card-MC-Rmt-Tx-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down.setDescription('The tx link of mc in customer side is broken')
card_MC_Rmt_PwrDown = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 36)).setLabel("card-MC-Rmt-PwrDown").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_PwrDown.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_PwrDown.setDescription('Remote mc power down detected')
card_MC_Co_Ntw_SFP_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 37)).setLabel("card-MC-Co-Ntw-SFP-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Ntw_SFP_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Ntw_SFP_Inserted.setDescription('Local network port SFP inserted')
card_MC_Co_Ntw_SFP_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 38)).setLabel("card-MC-Co-Ntw-SFP-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Ntw_SFP_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Ntw_SFP_Removed.setDescription('Local network port SFP removed')
card_MC_Co_Acc_SFP_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 39)).setLabel("card-MC-Co-Acc-SFP-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Acc_SFP_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Acc_SFP_Inserted.setDescription('Local access port SFP inserted')
card_MC_Co_Acc_SFP_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 40)).setLabel("card-MC-Co-Acc-SFP-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Acc_SFP_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Acc_SFP_Removed.setDescription('Local access port SFP removed')
card_MC_Rmt_Acc_SFP_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 41)).setLabel("card-MC-Rmt-Acc-SFP-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_Acc_SFP_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_Acc_SFP_Inserted.setDescription('Remote access port SFP inserted')
card_MC_Rmt_Acc_SFP_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 42)).setLabel("card-MC-Rmt-Acc-SFP-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_Acc_SFP_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_Acc_SFP_Removed.setDescription('Remote access port SFP removed')
card_MC_Co_Tx_Up1 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 43)).setLabel("card-MC-Co-Tx-Up1").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Tx_Up1.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Tx_Up1.setDescription('The tx1 link of mc in center side is up')
card_MC_Co_Tx_Down1 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 44)).setLabel("card-MC-Co-Tx-Down1").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Tx_Down1.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Tx_Down1.setDescription('The tx1 link of mc in center side is broken')
card_MC_Co_Tx_Up2 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 45)).setLabel("card-MC-Co-Tx-Up2").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Tx_Up2.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Tx_Up2.setDescription('The tx2 link of mc in center side is up')
card_MC_Co_Tx_Down2 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 46)).setLabel("card-MC-Co-Tx-Down2").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Tx_Down2.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Tx_Down2.setDescription('The tx2 link of mc in center side is broken')
card_MC_Rmt_Tx_Up1 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 47)).setLabel("card-MC-Rmt-Tx-Up1").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up1.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up1.setDescription('The tx1 link of mc in customer side is up')
card_MC_Rmt_Tx_Down1 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 48)).setLabel("card-MC-Rmt-Tx-Down1").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down1.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down1.setDescription('The tx1 link of mc in customer side is broken')
card_MC_Rmt_Tx_Up2 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 49)).setLabel("card-MC-Rmt-Tx-Up2").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up2.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Up2.setDescription('The tx2 link of mc in customer side is up')
card_MC_Rmt_Tx_Down2 = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 50)).setLabel("card-MC-Rmt-Tx-Down2").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down2.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_Tx_Down2.setDescription('The tx2 link of mc in customer side is broken')
card_MC_Co_SFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 51)).setLabel("card-MC-Co-SFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP1_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP1_Inserted.setDescription('Local SFP1 inserted')
card_MC_Co_SFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 52)).setLabel("card-MC-Co-SFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP1_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP1_Removed.setDescription('Local SFP1 removed')
card_MC_Co_SFP2_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 53)).setLabel("card-MC-Co-SFP2-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP2_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP2_Inserted.setDescription('Local SFP2 inserted')
card_MC_Co_SFP2_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 54)).setLabel("card-MC-Co-SFP2-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP2_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP2_Removed.setDescription('Local SFP2 removed')
card_MC_Co_SFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 55)).setLabel("card-MC-Co-SFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP1_Up.setDescription('The SFP1 link of mc in center side is up')
card_MC_Co_SFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 56)).setLabel("card-MC-Co-SFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP1_Down.setDescription('The SFP1 link of mc in center side is broken')
card_MC_Co_SFP2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 57)).setLabel("card-MC-Co-SFP2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP2_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP2_Up.setDescription('The SFP2 link of mc in center side is up')
card_MC_Co_SFP2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 58)).setLabel("card-MC-Co-SFP2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP2_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP2_Down.setDescription('The SFP2 link of mc in center side is broken')
card_MC_Rmt_SFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 59)).setLabel("card-MC-Rmt-SFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Inserted.setDescription('Remote SFP1 inserted')
card_MC_Rmt_SFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 60)).setLabel("card-MC-Rmt-SFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Removed.setDescription('Remote SFP1 removed')
card_MC_Rmt_SFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 61)).setLabel("card-MC-Rmt-SFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Up.setDescription('The SFP1 link of mc in customer side is up')
card_MC_Rmt_SFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 62)).setLabel("card-MC-Rmt-SFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP1_Down.setDescription('The SFP1 link of mc in customer side is broken')
card_MC_Co_SFPSFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 63)).setLabel("card-MC-Co-SFPSFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Inserted.setDescription('Local SFP+1 inserted')
card_MC_Co_SFPSFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 64)).setLabel("card-MC-Co-SFPSFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Removed.setDescription('Local SFP+1 removed')
card_MC_Co_SFPSFP2_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 65)).setLabel("card-MC-Co-SFPSFP2-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Inserted.setDescription('Local SFPSFP2 inserted')
card_MC_Co_SFPSFP2_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 66)).setLabel("card-MC-Co-SFPSFP2-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Removed.setDescription('Local SFP+2 removed')
card_MC_Rmt_SFPSFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 67)).setLabel("card-MC-Rmt-SFPSFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Inserted.setDescription('Remote SFP+1 inserted')
card_MC_Rmt_SFPSFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 68)).setLabel("card-MC-Rmt-SFPSFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Removed.setDescription('Remote SFP+1 removed')
card_MC_Co_XFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 69)).setLabel("card-MC-Co-XFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_XFP1_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_XFP1_Inserted.setDescription('Local XFP+1 inserted')
card_MC_Co_XFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 70)).setLabel("card-MC-Co-XFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_XFP1_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_XFP1_Removed.setDescription('Local XFP+1 removed')
card_MC_Co_XFP2_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 71)).setLabel("card-MC-Co-XFP2-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_XFP2_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_XFP2_Inserted.setDescription('Local XFP2 inserted')
card_MC_Co_XFP2_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 72)).setLabel("card-MC-Co-XFP2-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_XFP2_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_XFP2_Removed.setDescription('Local XFP+2 removed')
card_MC_Rmt_XFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 73)).setLabel("card-MC-Rmt-XFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Inserted.setDescription('Remote XFP+1 inserted')
card_MC_Rmt_XFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 74)).setLabel("card-MC-Rmt-XFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Removed.setDescription('Remote XFP+1 removed')
card_MC_Co_SFPSFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 75)).setLabel("card-MC-Co-SFPSFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Up.setDescription('The SFP+1 link of mc in center side is up')
card_MC_Co_SFPSFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 76)).setLabel("card-MC-Co-SFPSFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFPSFP1_Down.setDescription('The SFP+1 link of mc in center side is broken')
card_MC_Co_SFPSFP2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 77)).setLabel("card-MC-Co-SFPSFP2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Up.setDescription('The SFP+2 link of mc in center side is up')
card_MC_Co_SFPSFP2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 78)).setLabel("card-MC-Co-SFPSFP2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFPSFP2_Down.setDescription('The SFP+2 link of mc in center side is broken')
card_MC_Rmt_SFPSFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 79)).setLabel("card-MC-Rmt-SFPSFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Up.setDescription('The SFPSFP1 link of mc in customer side is up')
card_MC_Rmt_SFPSFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 80)).setLabel("card-MC-Rmt-SFPSFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFPSFP1_Down.setDescription('The SFP+1 link of mc in customer side is broken')
card_MC_Co_XFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 81)).setLabel("card-MC-Co-XFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_XFP1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_XFP1_Up.setDescription('The XFP1 link of mc in center side is up')
card_MC_Co_XFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 82)).setLabel("card-MC-Co-XFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_XFP1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_XFP1_Down.setDescription('The XFP1 link of mc in center side is broken')
card_MC_Co_XFP2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 83)).setLabel("card-MC-Co-XFP2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_XFP2_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_XFP2_Up.setDescription('The XFP2 link of mc in center side is up')
card_MC_Co_XFP2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 84)).setLabel("card-MC-Co-XFP2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_XFP2_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_XFP2_Down.setDescription('The XFP2 link of mc in center side is broken')
card_MC_Rmt_XFP1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 85)).setLabel("card-MC-Rmt-XFP1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Up.setDescription('The XFP1 link of mc in customer side is up')
card_MC_Rmt_XFP1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 86)).setLabel("card-MC-Rmt-XFP1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_XFP1_Down.setDescription('The XFP link of mc in customer side is broken')
card_MC_Co_SFP3_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 87)).setLabel("card-MC-Co-SFP3-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP3_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP3_Inserted.setDescription('Local SFP3 inserted')
card_MC_Co_SFP3_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 88)).setLabel("card-MC-Co-SFP3-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP3_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP3_Removed.setDescription('Local SFP3 removed')
card_MC_Co_Port1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 89)).setLabel("card-MC-Co-Port1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Port1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Port1_Up.setDescription('The Port1 link of mc in center side is up')
card_MC_Co_Port1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 90)).setLabel("card-MC-Co-Port1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Port1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Port1_Down.setDescription('The Port1 link of mc in center side is broken')
card_MC_Co_Port2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 91)).setLabel("card-MC-Co-Port2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Port2_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Port2_Up.setDescription('The Port2 link of mc in center side is up')
card_MC_Co_Port2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 92)).setLabel("card-MC-Co-Port2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Port2_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Port2_Down.setDescription('The Port2 link of mc in center side is broken')
card_MC_Co_Port3_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 93)).setLabel("card-MC-Co-Port3-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Port3_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Port3_Up.setDescription('The Port3 link of mc in center side is up')
card_MC_Co_Port3_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 94)).setLabel("card-MC-Co-Port3-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_Port3_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_Port3_Down.setDescription('The Port3 link of mc in center side is broken')
card_MC_FAN_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 100)).setLabel("card-MC-FAN-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_FAN_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_FAN_Normal.setDescription('Fan card work normally')
card_MC_FAN_Abnormal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 101)).setLabel("card-MC-FAN-Abnormal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_FAN_Abnormal.setStatus('current')
if mibBuilder.loadTexts: card_MC_FAN_Abnormal.setDescription('Fan card work abnormally')
card_MC_Co_QSFP1_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 102)).setLabel("card-MC-Co-QSFP1-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Inserted.setDescription('Local QSFP1 inserted')
card_MC_Co_QSFP1_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 103)).setLabel("card-MC-Co-QSFP1-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Removed.setDescription('Local QSFP1 removed')
card_MC_Co_QSFP2_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 104)).setLabel("card-MC-Co-QSFP2-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Inserted.setDescription('Local QSFP2 inserted')
card_MC_Co_QSFP2_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 105)).setLabel("card-MC-Co-QSFP2-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Removed.setDescription('Local QSFP2 removed')
card_MC_Co_QSFP1_Lane1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 106)).setLabel("card-MC-Co-QSFP1-Lane1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane1_Up.setDescription('The QSFP1 Lane1 link of mc in center side is up')
card_MC_Co_QSFP1_Lane1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 107)).setLabel("card-MC-Co-QSFP1-Lane1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane1_Down.setDescription('The QSFP1 lane1 link of mc in center side is broken')
card_MC_Co_QSFP1_Lane2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 108)).setLabel("card-MC-Co-QSFP1-Lane2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane2_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane2_Up.setDescription('The QSFP1 Lane2 link of mc in center side is up')
card_MC_Co_QSFP1_Lane2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 109)).setLabel("card-MC-Co-QSFP1-Lane2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane2_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane2_Down.setDescription('The QSFP1 lane2 link of mc in center side is broken')
card_MC_Co_QSFP1_Lane3_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 110)).setLabel("card-MC-Co-QSFP1-Lane3-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane3_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane3_Up.setDescription('The QSFP1 Lane3 link of mc in center side is up')
card_MC_Co_QSFP1_Lane3_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 111)).setLabel("card-MC-Co-QSFP1-Lane3-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane3_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane3_Down.setDescription('The QSFP1 lane3 link of mc in center side is broken')
card_MC_Co_QSFP1_Lane4_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 112)).setLabel("card-MC-Co-QSFP1-Lane4-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane4_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane4_Up.setDescription('The QSFP1 Lane4 link of mc in center side is up')
card_MC_Co_QSFP1_Lane4_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 113)).setLabel("card-MC-Co-QSFP1-Lane4-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane4_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP1_Lane4_Down.setDescription('The QSFP1 lane4 link of mc in center side is broken')
card_MC_Co_QSFP2_Lane1_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 114)).setLabel("card-MC-Co-QSFP2-Lane1-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane1_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane1_Up.setDescription('The QSFP2 Lane1 link of mc in center side is up')
card_MC_Co_QSFP2_Lane1_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 115)).setLabel("card-MC-Co-QSFP2-Lane1-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane1_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane1_Down.setDescription('The QSFP2 lane1 link of mc in center side is broken')
card_MC_Co_QSFP2_Lane2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 116)).setLabel("card-MC-Co-QSFP2-Lane2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane2_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane2_Up.setDescription('The QSFP2 Lane2 link of mc in center side is up')
card_MC_Co_QSFP2_Lane2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 117)).setLabel("card-MC-Co-QSFP2-Lane2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane2_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane2_Down.setDescription('The QSFP2 lane2 link of mc in center side is broken')
card_MC_Co_QSFP2_Lane3_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 118)).setLabel("card-MC-Co-QSFP2-Lane3-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane3_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane3_Up.setDescription('The QSFP2 Lane3 link of mc in center side is up')
card_MC_Co_QSFP2_Lane3_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 119)).setLabel("card-MC-Co-QSFP2-Lane3-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane3_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane3_Down.setDescription('The QSFP2 lane3 link of mc in center side is broken')
card_MC_Co_QSFP2_Lane4_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 120)).setLabel("card-MC-Co-QSFP2-Lane4-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane4_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane4_Up.setDescription('The QSFP2 Lane4 link of mc in center side is up')
card_MC_Co_QSFP2_Lane4_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 121)).setLabel("card-MC-Co-QSFP2-Lane4-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane4_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_QSFP2_Lane4_Down.setDescription('The QSFP2 lane4 link of mc in center side is broken')
card_MC_Rmt_SFP2_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 122)).setLabel("card-MC-Rmt-SFP2-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Inserted.setDescription('Remote SFP2 inserted')
card_MC_Rmt_SFP2_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 123)).setLabel("card-MC-Rmt-SFP2-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Removed.setDescription('Remote SFP2 removed')
card_MC_Rmt_SFP3_Inserted = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 124)).setLabel("card-MC-Rmt-SFP3-Inserted").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Inserted.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Inserted.setDescription('Remote SFP3 inserted')
card_MC_Rmt_SFP3_Removed = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 125)).setLabel("card-MC-Rmt-SFP3-Removed").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Removed.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Removed.setDescription('Remote SFP3 removed')
card_MC_Rmt_SFP2_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 126)).setLabel("card-MC-Rmt-SFP2-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Up.setDescription('The SFP2 link of mc in customer side is up')
card_MC_Rmt_SFP2_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 127)).setLabel("card-MC-Rmt-SFP2-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP2_Down.setDescription('The SFP2 link of mc in customer side is broken')
card_MC_Rmt_SFP3_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 128)).setLabel("card-MC-Rmt-SFP3-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Up.setDescription('The SFP3 link of mc in customer side is up')
card_MC_Rmt_SFP3_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 129)).setLabel("card-MC-Rmt-SFP3-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Rmt_SFP3_Down.setDescription('The SFP3 link of mc in customer side is broken')
card_MC_E1_Co_Port1_LOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 130)).setLabel("card-MC-E1-Co-Port1-LOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_LOS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_LOS_Alarm.setDescription('Port1 LOS alarm in center side')
card_MC_E1_Co_Port1_LOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 131)).setLabel("card-MC-E1-Co-Port1-LOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_LOS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_LOS_Normal.setDescription('Port1 LOS normal in center side')
card_MC_E1_Co_Port1_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 132)).setLabel("card-MC-E1-Co-Port1-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_AIS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_AIS_Alarm.setDescription('Port1 AIS alarm in center side')
card_MC_E1_Co_Port1_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 133)).setLabel("card-MC-E1-Co-Port1-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_AIS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_AIS_Normal.setDescription('Port1 AIS normal in center side')
card_MC_E1_Co_Port1_CV_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 134)).setLabel("card-MC-E1-Co-Port1-CV-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_CV_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_CV_Alarm.setDescription('Port1 CV alarm in center side')
card_MC_E1_Co_Port1_CV_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 135)).setLabel("card-MC-E1-Co-Port1-CV-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_CV_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port1_CV_Normal.setDescription('Port1 CV normal in center side')
card_MC_E1_Co_Port2_LOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 136)).setLabel("card-MC-E1-Co-Port2-LOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_LOS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_LOS_Alarm.setDescription('Port2 LOS alarm in center side')
card_MC_E1_Co_Port2_LOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 137)).setLabel("card-MC-E1-Co-Port2-LOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_LOS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_LOS_Normal.setDescription('Port2 LOS normal in center side')
card_MC_E1_Co_Port2_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 138)).setLabel("card-MC-E1-Co-Port2-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_AIS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_AIS_Alarm.setDescription('Port2 AIS alarm in center side')
card_MC_E1_Co_Port2_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 139)).setLabel("card-MC-E1-Co-Port2-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_AIS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_AIS_Normal.setDescription('Port2 AIS normal in center side')
card_MC_E1_Co_Port2_CV_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 140)).setLabel("card-MC-E1-Co-Port2-CV-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_CV_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_CV_Alarm.setDescription('Port2 CV alarm in center side')
card_MC_E1_Co_Port2_CV_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 141)).setLabel("card-MC-E1-Co-Port2-CV-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_CV_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Co_Port2_CV_Normal.setDescription('Port2 CV normal in center side')
card_MC_E1_Rmt_Port1_LOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 142)).setLabel("card-MC-E1-Rmt-Port1-LOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_LOS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_LOS_Alarm.setDescription('Port1 LOS alarm in customer side')
card_MC_E1_Rmt_Port1_LOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 143)).setLabel("card-MC-E1-Rmt-Port1-LOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_LOS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_LOS_Normal.setDescription('Port1 LOS normal in customer side')
card_MC_E1_Rmt_Port1_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 144)).setLabel("card-MC-E1-Rmt-Port1-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_AIS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_AIS_Alarm.setDescription('Port1 AIS alarm in customer side')
card_MC_E1_Rmt_Port1_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 145)).setLabel("card-MC-E1-Rmt-Port1-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_AIS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_AIS_Normal.setDescription('Port1 AIS normal in customer side')
card_MC_E1_Rmt_Port1_CV_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 146)).setLabel("card-MC-E1-Rmt-Port1-CV-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_CV_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_CV_Alarm.setDescription('Port1 CV alarm in customer side')
card_MC_E1_Rmt_Port1_CV_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 147)).setLabel("card-MC-E1-Rmt-Port1-CV-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_CV_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port1_CV_Normal.setDescription('Port1 CV normal in customer side')
card_MC_E1_Rmt_Port2_LOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 148)).setLabel("card-MC-E1-Rmt-Port2-LOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_LOS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_LOS_Alarm.setDescription('Port2 LOS alarm in customer side')
card_MC_E1_Rmt_Port2_LOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 149)).setLabel("card-MC-E1-Rmt-Port2-LOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_LOS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_LOS_Normal.setDescription('Port2 LOS normal in customer side')
card_MC_E1_Rmt_Port2_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 150)).setLabel("card-MC-E1-Rmt-Port2-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_AIS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_AIS_Alarm.setDescription('Port2 AIS alarm in customer side')
card_MC_E1_Rmt_Port2_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 151)).setLabel("card-MC-E1-Rmt-Port2-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_AIS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_AIS_Normal.setDescription('Port2 AIS normal in customer side')
card_MC_E1_Rmt_Port2_CV_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 152)).setLabel("card-MC-E1-Rmt-Port2-CV-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_CV_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_CV_Alarm.setDescription('Port2 CV alarm in customer side')
card_MC_E1_Rmt_Port2_CV_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 153)).setLabel("card-MC-E1-Rmt-Port2-CV-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_CV_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1_Rmt_Port2_CV_Normal.setDescription('Port2 CV normal in customer side')
card_MC_Co_SFP3_Up = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 154)).setLabel("card-MC-Co-SFP3-Up").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP3_Up.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP3_Up.setDescription('The SFP3 link of mc in center side is up')
card_MC_Co_SFP3_Down = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 155)).setLabel("card-MC-Co-SFP3-Down").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_Co_SFP3_Down.setStatus('current')
if mibBuilder.loadTexts: card_MC_Co_SFP3_Down.setDescription('The SFP3 link of mc in center side is broken')
card_MC_E1T1_Co_TXLOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 156)).setLabel("card-MC-E1T1-Co-TXLOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Co_TXLOS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Co_TXLOS_Alarm.setDescription('Tx LOS alarm in center side')
card_MC_E1T1_Co_TXLOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 157)).setLabel("card-MC-E1T1-Co-TXLOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Co_TXLOS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Co_TXLOS_Normal.setDescription('Tx LOS normal in center side')
card_MC_E1T1_Co_FXLOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 158)).setLabel("card-MC-E1T1-Co-FXLOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Co_FXLOS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Co_FXLOS_Alarm.setDescription('Fx LOS alarm in center side')
card_MC_E1T1_Co_FXLOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 159)).setLabel("card-MC-E1T1-Co-FXLOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Co_FXLOS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Co_FXLOS_Normal.setDescription('Fx LOS normal in center side')
card_MC_E1T1_Co_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 160)).setLabel("card-MC-E1T1-Co-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Co_AIS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Co_AIS_Alarm.setDescription('AIS alarm in center side')
card_MC_E1T1_Co_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 161)).setLabel("card-MC-E1T1-Co-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Co_AIS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Co_AIS_Normal.setDescription('AIS normal in center side')
card_MC_E1T1_Rmt_TXLOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 162)).setLabel("card-MC-E1T1-Rmt-TXLOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_TXLOS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_TXLOS_Alarm.setDescription('Tx LOS alarm in customer side')
card_MC_E1T1_Rmt_TXLOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 163)).setLabel("card-MC-E1T1-Rmt-TXLOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_TXLOS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_TXLOS_Normal.setDescription('Tx LOS normal in customer side')
card_MC_E1T1_Rmt_FXLOS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 164)).setLabel("card-MC-E1T1-Rmt-FXLOS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_FXLOS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_FXLOS_Alarm.setDescription('Fx LOS alarm in customer side')
card_MC_E1T1_Rmt_FXLOS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 165)).setLabel("card-MC-E1T1-Rmt-FXLOS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_FXLOS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_FXLOS_Normal.setDescription('Fx LOS normal in customer side')
card_MC_E1T1_Rmt_AIS_Alarm = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 166)).setLabel("card-MC-E1T1-Rmt-AIS-Alarm").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_AIS_Alarm.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_AIS_Alarm.setDescription('AIS alarm in customer side')
card_MC_E1T1_Rmt_AIS_Normal = NotificationType((1, 3, 6, 1, 4, 1, 6688, 1, 1, 2, 167)).setLabel("card-MC-E1T1-Rmt-AIS-Normal").setObjects(("XXX-MIB", "shelfIdx"), ("XXX-MIB", "slotIdx"))
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_AIS_Normal.setStatus('current')
if mibBuilder.loadTexts: card_MC_E1T1_Rmt_AIS_Normal.setDescription('AIS normal in customer side')
mibBuilder.exportSymbols("XXX-MIB", mc40G_OEOLane2LoopMode=mc40G_OEOLane2LoopMode, company=company, mcFanCardObjects=mcFanCardObjects, mc10G_OEECurSpd=mc10G_OEECurSpd, card_MC_Co_QSFP1_Lane2_Up=card_MC_Co_QSFP1_Lane2_Up, mc4_25G_OEONtwPD=mc4_25G_OEONtwPD, mcCm1gSfpEntry=mcCm1gSfpEntry, card_MC_Co_QSFP1_Lane1_Down=card_MC_Co_QSFP1_Lane1_Down, mc10G_OEO2RObjects=mc10G_OEO2RObjects, mcTransceiverDist=mcTransceiverDist, mc2_5GMCCardObjects=mc2_5GMCCardObjects, mcPortState=mcPortState, mc1GE2OPort2SFPExist=mc1GE2OPort2SFPExist, shelfNum=shelfNum, mcIP175DCurWorkMode=mcIP175DCurWorkMode, card_MC_Rmt_Tx_Up1=card_MC_Rmt_Tx_Up1, mc1go2o_sfpTransCode=mc1go2o_sfpTransCode, sysName=sysName, psuB=psuB, ntwXFP2WaveLength=ntwXFP2WaveLength, mcAccXFP1WaveLengthTunability=mcAccXFP1WaveLengthTunability, mc10G_OEO_Test_Error_Counter=mc10G_OEO_Test_Error_Counter, mc2_5g_sfpWavelength=mc2_5g_sfpWavelength, mc2_5g_sfpCompliance=mc2_5g_sfpCompliance, card_MC_E1_Co_Port1_CV_Alarm=card_MC_E1_Co_Port1_CV_Alarm, mcIP175DCardTable=mcIP175DCardTable, mc4_25G_OEO_Test_Result=mc4_25G_OEO_Test_Result, mc40G_OEOHWLoopMode=mc40G_OEOHWLoopMode, mcE1T1RmtFLink=mcE1T1RmtFLink, ntwXFP2WaveLengthTunability=ntwXFP2WaveLengthTunability, card_MC_E1T1_Co_TXLOS_Normal=card_MC_E1T1_Co_TXLOS_Normal, mcCmEntry=mcCmEntry, mc1go2o_sfpCompliance=mc1go2o_sfpCompliance, shelfIdx=shelfIdx, trapHost1=trapHost1, mcCm1gSfpObjects=mcCm1gSfpObjects, volA=volA, getNtwQSfpCmd=getNtwQSfpCmd, mc1GO2OPortHWPri=mc1GO2OPortHWPri, card_MC_Co_SFP3_Up=card_MC_Co_SFP3_Up, mcE1Port2Loop=mcE1Port2Loop, xfpWaveLengthTunable=xfpWaveLengthTunable, rmtCardNum=rmtCardNum, shelf_fan_Off=shelf_fan_Off, mcDownStream=mcDownStream, mcE1SFP1Link=mcE1SFP1Link, mc10G_OEOCardEntry=mc10G_OEOCardEntry, mcAccXFP1WaveLengthTunable=mcAccXFP1WaveLengthTunable, nmuType=nmuType, mc10G_OEO_Get_Test_Rst=mc10G_OEO_Get_Test_Rst, mc1GE2ORmtPort1SFPlink=mc1GE2ORmtPort1SFPlink, slotObjects=slotObjects, mcIP175DObjects=mcIP175DObjects, card_MC_Co_Tx_Up1=card_MC_Co_Tx_Up1, mcRmtE1Port1CV=mcRmtE1Port1CV, mcRmtHWTransmitMode=mcRmtHWTransmitMode, mcIP175DTxlink=mcIP175DTxlink, mc10G_OEOCurSpdMode=mc10G_OEOCurSpdMode, mc10G_OEELoopMode=mc10G_OEELoopMode, mcE1T1FLossAlarm=mcE1T1FLossAlarm, shelf_psuA_On=shelf_psuA_On, mcQCA8334RmtCfgWorkMode=mcQCA8334RmtCfgWorkMode, shelfTable=shelfTable, ipaddr=ipaddr, mc1GE2OPort1SFPlink=mc1GE2OPort1SFPlink, mc10G_OEO_accType=mc10G_OEO_accType, card_MC_E1_Rmt_Port1_AIS_Normal=card_MC_E1_Rmt_Port1_AIS_Normal, sfpVoltage=sfpVoltage, mc10GOEO1RCardTable=mc10GOEO1RCardTable, mc10GXFP2WaveLengthTunable=mc10GXFP2WaveLengthTunable, mcRmtE1Port1Loop=mcRmtE1Port1Loop, mc4_25G_OEOCardEntry=mc4_25G_OEOCardEntry, card_MC_E1_Rmt_Port1_AIS_Alarm=card_MC_E1_Rmt_Port1_AIS_Alarm, mcRmt4_25G_OEOHWWorkMode=mcRmt4_25G_OEOHWWorkMode, mcCm1gIpEntry=mcCm1gIpEntry, cwdmWavelength5=cwdmWavelength5, mcE1Port1AIS=mcE1Port1AIS, accXFP1TunableType=accXFP1TunableType, mcRmtPwrDown=mcRmtPwrDown, mc10G_OEOObjects=mc10G_OEOObjects, card_MC_Rmt_Tx_Down=card_MC_Rmt_Tx_Down, mc10G_OEEHWLoopback=mc10G_OEEHWLoopback, card_MC_Co_Port3_Down=card_MC_Co_Port3_Down, card_MC_Co_SFP3_Down=card_MC_Co_SFP3_Down, mcQCA8334RmtTxlink=mcQCA8334RmtTxlink, card_MC_E1T1_Co_AIS_Normal=card_MC_E1T1_Co_AIS_Normal, mcRmtE1TxCurWorkMode=mcRmtE1TxCurWorkMode, qsfpAccRxPower4=qsfpAccRxPower4, mcRmt10G_OEOLoopback=mcRmt10G_OEOLoopback, card_MC_E1_Rmt_Port2_CV_Normal=card_MC_E1_Rmt_Port2_CV_Normal, card_MC_E1_Co_Port2_CV_Normal=card_MC_E1_Co_Port2_CV_Normal, mcTxByteLo=mcTxByteLo, mcCardIdx=mcCardIdx, card_MC_Rmt_SFP3_Up=card_MC_Rmt_SFP3_Up, card_MC_E1T1_Co_FXLOS_Alarm=card_MC_E1T1_Co_FXLOS_Alarm, mcRmt4_25G_OEOHWSpdMode=mcRmt4_25G_OEOHWSpdMode, card_MC_Co_QSFP1_Lane4_Up=card_MC_Co_QSFP1_Lane4_Up, mcUtility=mcUtility, mc4_25G_OEOObjects=mc4_25G_OEOObjects, mc10GOEO1RCardEntry=mc10GOEO1RCardEntry, getAccSfpCmd=getAccSfpCmd, mc1GO2OPort3SFPExist=mc1GO2OPort3SFPExist, mcRmtCfgWorkMode=mcRmtCfgWorkMode, mcNtwQSfpObjects=mcNtwQSfpObjects, mcShelfIdx=mcShelfIdx, mc40G_OEOCardEntry=mc40G_OEOCardEntry, mcE1T1CardTable=mcE1T1CardTable, accsfpRecvPower=accsfpRecvPower, mc10GXFP1WaveLengthTunability=mc10GXFP1WaveLengthTunability, cwdmWavelength7=cwdmWavelength7, cwdmWavelength1=cwdmWavelength1, mc40G_OEOQsfp2Lane3_link=mc40G_OEOQsfp2Lane3_link, mcE1Port2CV=mcE1Port2CV, mcAccQSfpObjects=mcAccQSfpObjects, mcRmtE1Port1AIS=mcRmtE1Port1AIS, mcIP175DCardEntry=mcIP175DCardEntry, accsfpTemperature=accsfpTemperature, mc10G_OEO2R_accTunableType=mc10G_OEO2R_accTunableType, card_MC_Co_QSFP2_Lane2_Up=card_MC_Co_QSFP2_Lane2_Up, mc4_25G_OEO_Test_Lock=mc4_25G_OEO_Test_Lock, mcRmtUtility=mcRmtUtility, mc2_5GMCSfp3Exist=mc2_5GMCSfp3Exist, mc4_25G_OEO_Start_Test=mc4_25G_OEO_Start_Test, qsfpAccTemperature=qsfpAccTemperature, card_MC_Rmt_Tx_Down2=card_MC_Rmt_Tx_Down2, sfpConnector=sfpConnector, mcFanObjects=mcFanObjects, card_MC_Co_SFP2_Inserted=card_MC_Co_SFP2_Inserted, mc1GE2OPortPri=mc1GE2OPortPri, mc1GO2OPort1SFPExist=mc1GO2OPort1SFPExist, card_MC_Rmt_SFP3_Down=card_MC_Rmt_SFP3_Down, mcRmt10G_OEOCfgSpdMode=mcRmt10G_OEOCfgSpdMode, mc1go2o_sfpWavelength=mc1go2o_sfpWavelength, card_MC_Co_SFP1_Removed=card_MC_Co_SFP1_Removed, mc40G_OEOQsfp1Lane1_link=mc40G_OEOQsfp1Lane1_link, card_MC_Rmt_XFP1_Up=card_MC_Rmt_XFP1_Up, card_MC_E1T1_Co_TXLOS_Alarm=card_MC_E1T1_Co_TXLOS_Alarm, sfpCopperLength=sfpCopperLength, mc2_5GMCCardTable=mc2_5GMCCardTable, mc2_5g_getSfpCmd=mc2_5g_getSfpCmd, mcE1T1RmtTLossAlarm=mcE1T1RmtTLossAlarm, trapHost3=trapHost3, mcRmtE1Port2LOS=mcRmtE1Port2LOS, card_MC_Co_SFPSFP1_Removed=card_MC_Co_SFPSFP1_Removed, card_MC_E1T1_Co_FXLOS_Normal=card_MC_E1T1_Co_FXLOS_Normal, mc1GO2OPort3SFPlink=mc1GO2OPort3SFPlink, mc1go2o_getSfpCmd=mc1go2o_getSfpCmd, sfpCompliance=sfpCompliance, card_MC_Co_QSFP2_Lane2_Down=card_MC_Co_QSFP2_Lane2_Down, mc10GXFP1WaveLength=mc10GXFP1WaveLength, mc10GOEO3RCardTable=mc10GOEO3RCardTable, mc1GO2ORmtPortHWPri=mc1GO2ORmtPortHWPri, card_MC_Rmt_Tx_Up2=card_MC_Rmt_Tx_Up2, mcRmtHWLFP=mcRmtHWLFP, card_MC_Co_QSFP1_Lane1_Up=card_MC_Co_QSFP1_Lane1_Up, accsfpSmLength=accsfpSmLength, mcIP175DPortObjects=mcIP175DPortObjects, mcType=mcType, accXFP1WaveLength=accXFP1WaveLength, mc40G_OEOQsfp1Lane3_link=mc40G_OEOQsfp1Lane3_link, mc10GXFP2WaveLengthTunability=mc10GXFP2WaveLengthTunability, card_MC_E1T1_Rmt_TXLOS_Normal=card_MC_E1T1_Rmt_TXLOS_Normal, mcRmtHWWorkMode=mcRmtHWWorkMode, mcQCA8334UpStream=mcQCA8334UpStream, card_MC_Rmt_SFP1_Down=card_MC_Rmt_SFP1_Down, mc4_25G_OEOHWLoopback=mc4_25G_OEOHWLoopback, mc10G_OEEFxlink=mc10G_OEEFxlink, mcE1Port1LOS=mcE1Port1LOS, mc2_5GMCPort1link=mc2_5GMCPort1link, qsfpAccRxPower3=qsfpAccRxPower3, mcNtwSfpExist=mcNtwSfpExist, mcNtwXFP2WaveLength=mcNtwXFP2WaveLength, mcRmt10G_OEOHWSpdMode=mcRmt10G_OEOHWSpdMode, cwdmWavelengthCount=cwdmWavelengthCount, card_MC_Rmt_SFPSFP1_Up=card_MC_Rmt_SFPSFP1_Up, mc10G_OEOHWSpdMode=mc10G_OEOHWSpdMode, mc40G_OEOQsfp2Lane4_link=mc40G_OEOQsfp2Lane4_link, mcNtwXFP2WaveLengthTunable=mcNtwXFP2WaveLengthTunable, mc2_5GMCSFP3Objects=mc2_5GMCSFP3Objects, mcHWTransmitMode=mcHWTransmitMode, slotIdx=slotIdx, qsfpNtwTxPower1=qsfpNtwTxPower1, mcHWLFP=mcHWLFP, mcE1T1RmtAISAlarm=mcE1T1RmtAISAlarm, volB=volB, mcRmtCurWorkMode=mcRmtCurWorkMode, mc1GE2OCardTable=mc1GE2OCardTable, accsfpWavelength=accsfpWavelength, mcAccQSfpEntry=mcAccQSfpEntry, mcTxByteHi=mcTxByteHi, mc10G_OEO2RHWSFP1Loopback=mc10G_OEO2RHWSFP1Loopback, sysContact=sysContact, slotEntry=slotEntry, mcCurWorkMode=mcCurWorkMode, card_MC_E1_Rmt_Port2_AIS_Alarm=card_MC_E1_Rmt_Port2_AIS_Alarm, mcIP175DUpStream=mcIP175DUpStream, mcRmtDetect=mcRmtDetect, mc10G_OEO_Test_Lock=mc10G_OEO_Test_Lock, mc2_5g_sfpTranPower=mc2_5g_sfpTranPower, mc2_5g_sfpBrSpeed=mc2_5g_sfpBrSpeed, mc40G_OEOSpeedMode=mc40G_OEOSpeedMode, mc1GO2OSfp3Table=mc1GO2OSfp3Table, mc1GE2ORmtPort2SFPlink=mc1GE2ORmtPort2SFPlink, mcCm1gAccSfpObjects=mcCm1gAccSfpObjects, rmtCardType=rmtCardType, card_MC_Co_SFP2_Removed=card_MC_Co_SFP2_Removed, card_MC_Co_Tx_Down1=card_MC_Co_Tx_Down1, card_MC_Co_QSFP1_Lane2_Down=card_MC_Co_QSFP1_Lane2_Down, card_MC_Co_QSFP2_Lane4_Down=card_MC_Co_QSFP2_Lane4_Down, mcRxByteHi=mcRxByteHi, card_MC_Rmt_SFP3_Inserted=card_MC_Rmt_SFP3_Inserted, mcE1CardEntry=mcE1CardEntry, card_Detected=card_Detected, card_MC_Co_XFP2_Up=card_MC_Co_XFP2_Up, card_MC_Rmt_SFP2_Up=card_MC_Rmt_SFP2_Up, trapHost2=trapHost2, mc2_5Cm1gSfpTable=mc2_5Cm1gSfpTable, mcCWDMCardEntry=mcCWDMCardEntry, mcE1T1TLoop=mcE1T1TLoop, mc1GO2OPortPri=mc1GO2OPortPri, mcLFPCfg=mcLFPCfg, qsfpAccRxPower1=qsfpAccRxPower1, temperature=temperature, qsfpAccTxPower3=qsfpAccTxPower3, card_MC_Co_SFP1_Down=card_MC_Co_SFP1_Down, mc4_25G_OEOCurSpdMode=mc4_25G_OEOCurSpdMode, mc10G_OEECardEntry=mc10G_OEECardEntry, mcFanStatus=mcFanStatus, mc2_5GMCPort3link=mc2_5GMCPort3link, systemMIB=systemMIB, mcTxlink=mcTxlink, mcCmTable=mcCmTable, mcRmtTxlink=mcRmtTxlink, mc40G_OEOQsfp1Lane4_link=mc40G_OEOQsfp1Lane4_link, sfpTranPower=sfpTranPower, coCardType=coCardType, mcFanCardEntry=mcFanCardEntry, cwdmWavelength3=cwdmWavelength3, card_MC_E1_Co_Port1_AIS_Normal=card_MC_E1_Co_Port1_AIS_Normal, mc1GE2OPortHWPri=mc1GE2OPortHWPri, mcE1T1TLossAlarm=mcE1T1TLossAlarm, mcQCA8334DownStream=mcQCA8334DownStream, mcIP175DPortEntry=mcIP175DPortEntry, mcE1T1Version=mcE1T1Version, card_MC_Co_QSFP1_Lane3_Down=card_MC_Co_QSFP1_Lane3_Down, card_MC_E1_Rmt_Port1_CV_Normal=card_MC_E1_Rmt_Port1_CV_Normal, card_MC_Rmt_SFP2_Removed=card_MC_Rmt_SFP2_Removed, card_MC_Co_SFPSFP1_Down=card_MC_Co_SFPSFP1_Down, accsfpBrSpeed=accsfpBrSpeed, cwdmWavelength8=cwdmWavelength8, mcRmt10G_OEOCurSpdMode=mcRmt10G_OEOCurSpdMode, mc10GOEEXFPTunableCardObjects=mc10GOEEXFPTunableCardObjects, mc1GE2ORmtPortHWPri=mc1GE2ORmtPortHWPri, mc10G_OEO_Test_Continue_Time=mc10G_OEO_Test_Continue_Time)
mibBuilder.exportSymbols("XXX-MIB", mc4_25G_OEO_Test_Error_Counter=mc4_25G_OEO_Test_Error_Counter, mc1GO2OPort1SFPlink=mc1GO2OPort1SFPlink, card_MC_E1_Rmt_Port1_LOS_Normal=card_MC_E1_Rmt_Port1_LOS_Normal, mc1GO2OSFP3Objects=mc1GO2OSFP3Objects, xfpWaveLength=xfpWaveLength, card_MC_Co_XFP1_Removed=card_MC_Co_XFP1_Removed, card_MC_Rmt_SFP2_Inserted=card_MC_Rmt_SFP2_Inserted, mc4_25G_OEOWorkMode=mc4_25G_OEOWorkMode, card_MC_Co_XFP2_Inserted=card_MC_Co_XFP2_Inserted, mc2_5g_sfpSmLength=mc2_5g_sfpSmLength, card_MC_Co_Port2_Down=card_MC_Co_Port2_Down, card_MC_Rmt_Acc_SFP_Removed=card_MC_Rmt_Acc_SFP_Removed, card_MC_Co_QSFP2_Removed=card_MC_Co_QSFP2_Removed, mc10GOEEXFPTunableCardTable=mc10GOEEXFPTunableCardTable, card_MC_Co_SFPSFP2_Down=card_MC_Co_SFPSFP2_Down, mcQCA8334PortIdx=mcQCA8334PortIdx, card_MC_E1T1_Rmt_AIS_Alarm=card_MC_E1T1_Rmt_AIS_Alarm, mcIP175DPortTable=mcIP175DPortTable, mc1go2o_sfpRecvPower=mc1go2o_sfpRecvPower, mc10G_OEO2R_ntwType=mc10G_OEO2R_ntwType, card_MC_Rmt_Tx_Up=card_MC_Rmt_Tx_Up, mcCm1gAccSfpTable=mcCm1gAccSfpTable, mcE1T1RmtFLoop=mcE1T1RmtFLoop, card_MC_E1_Co_Port1_CV_Normal=card_MC_E1_Co_Port1_CV_Normal, mc10GXFP1WaveLengthTunable=mc10GXFP1WaveLengthTunable, card_MC_Co_Tx_Up2=card_MC_Co_Tx_Up2, mcRmt10G_OEOHWLoopback=mcRmt10G_OEOHWLoopback, mc10G_OEECardObjects=mc10G_OEECardObjects, mcRmt4_25G_OEOWorkMode=mcRmt4_25G_OEOWorkMode, card_MC_Co_Port1_Up=card_MC_Co_Port1_Up, mc10GXFP2WaveLength=mc10GXFP2WaveLength, mc10G_OEOCardTable=mc10G_OEOCardTable, mc10G_OEE_ntwType=mc10G_OEE_ntwType, mcRmt10G_OEO_ntwType=mcRmt10G_OEO_ntwType, card_MC_Co_SFPSFP1_Up=card_MC_Co_SFPSFP1_Up, mc1GO2OPort2SFPlink=mc1GO2OPort2SFPlink, mcE1T1AISAlarm=mcE1T1AISAlarm, mcLoOrRmtFg=mcLoOrRmtFg, mc10G_OEOCfgSpdMode=mc10G_OEOCfgSpdMode, mcE1T1CardObjects=mcE1T1CardObjects, card_MC_Co_SFP3_Removed=card_MC_Co_SFP3_Removed, mcE1T1FLink=mcE1T1FLink, nmuObjects=nmuObjects, mc1go2o_sfpCopperLength=mc1go2o_sfpCopperLength, mc1GO2OPort3HWSpd=mc1GO2OPort3HWSpd, mc1GO2ORmtPort3SFPExist=mc1GO2ORmtPort3SFPExist, mc10G_OEO2R_ntwTunableType=mc10G_OEO2R_ntwTunableType, shelf_psuB_On=shelf_psuB_On, mcPmTable=mcPmTable, mcCm1gSpecificObjects=mcCm1gSpecificObjects, mc4_25G_OEO_Get_Test_Rst=mc4_25G_OEO_Get_Test_Rst, mcE1T1CardEntry=mcE1T1CardEntry, mc4_25G_OEO_Test_Continue_Time=mc4_25G_OEO_Test_Continue_Time, mcQsfpSpecificObjects=mcQsfpSpecificObjects, mc10GOEO3RCardObjects=mc10GOEO3RCardObjects, card_MC_E1_Co_Port2_AIS_Alarm=card_MC_E1_Co_Port2_AIS_Alarm, mc1GE2OCardObjects=mc1GE2OCardObjects, mc2_5g_sfpRecvPower=mc2_5g_sfpRecvPower, height2HU=height2HU, mc2_5g_sfpMmLength=mc2_5g_sfpMmLength, mc1GE2OObjects=mc1GE2OObjects, mc1GO2OObjects=mc1GO2OObjects, mcCm1gIpTable=mcCm1gIpTable, mcNtwQSfpEntry=mcNtwQSfpEntry, mc40G_OEOLane1LoopMode=mc40G_OEOLane1LoopMode, mc1go2o_sfpConnector=mc1go2o_sfpConnector, card_MC_Co_SFPSFP2_Up=card_MC_Co_SFPSFP2_Up, mcE1Port1CV=mcE1Port1CV, shelfEntry=shelfEntry, sfpBrSpeed=sfpBrSpeed, mcRmtE1SFP1Link=mcRmtE1SFP1Link, card_MC_Co_Acc_SFP_Removed=card_MC_Co_Acc_SFP_Removed, mc10G_OEOSFP2=mc10G_OEOSFP2, card_MC_E1_Co_Port2_LOS_Normal=card_MC_E1_Co_Port2_LOS_Normal, mcUpStream=mcUpStream, mc4_25G_OEOCardObjects=mc4_25G_OEOCardObjects, card_MC_Co_SFP2_Up=card_MC_Co_SFP2_Up, mc2_5g_sfpTransCode=mc2_5g_sfpTransCode, card_MC_E1_Rmt_Port2_LOS_Normal=card_MC_E1_Rmt_Port2_LOS_Normal, mc10G_OEO_Start_Test=mc10G_OEO_Start_Test, accXFP1WaveLengthTunable=accXFP1WaveLengthTunable, mcIpAddr=mcIpAddr, mc40G_OEOCardTable=mc40G_OEOCardTable, mc1GO2ORmtPort3SFPlink=mc1GO2ORmtPort3SFPlink, mcRmt4_25G_OEOLoopback=mcRmt4_25G_OEOLoopback, mcE1T1Type=mcE1T1Type, qsfpNtwRxPower3=qsfpNtwRxPower3, mc10G_OEOCardObjects=mc10G_OEOCardObjects, shelfName=shelfName, card_Lost=card_Lost, accsfpCopperLength=accsfpCopperLength, card_MC_E1_Co_Port1_LOS_Alarm=card_MC_E1_Co_Port1_LOS_Alarm, mcIP175DVlanMode=mcIP175DVlanMode, card_MC_E1T1_Rmt_TXLOS_Alarm=card_MC_E1T1_Rmt_TXLOS_Alarm, mc10G_OEESpdMode=mc10G_OEESpdMode, card_MC_Co_Fx_Up=card_MC_Co_Fx_Up, rmtCardDesc=rmtCardDesc, mcAccXFP1TunableType=mcAccXFP1TunableType, card_MC_Rmt_SFPSFP1_Removed=card_MC_Rmt_SFPSFP1_Removed, mcNtwXFP2WaveLengthTunability=mcNtwXFP2WaveLengthTunability, card_MC_Co_QSFP1_Removed=card_MC_Co_QSFP1_Removed, card_MC_Co_Ntw_SFP_Removed=card_MC_Co_Ntw_SFP_Removed, mcPmRest=mcPmRest, card_MC_Co_Acc_SFP_Inserted=card_MC_Co_Acc_SFP_Inserted, mcE1T1RmtFLossAlarm=mcE1T1RmtFLossAlarm, mc10G_OEO2RCardTable=mc10G_OEO2RCardTable, card_MC_E1_Rmt_Port2_CV_Alarm=card_MC_E1_Rmt_Port2_CV_Alarm, card_MC_Co_Port3_Up=card_MC_Co_Port3_Up, mc1GE2OPort2SFPlink=mc1GE2OPort2SFPlink, mc10GOEO3RObjects=mc10GOEO3RObjects, card_MC_E1_Co_Port1_AIS_Alarm=card_MC_E1_Co_Port1_AIS_Alarm, mc10G_OEO_Test_Result=mc10G_OEO_Test_Result, card_MC_Co_SFPSFP1_Inserted=card_MC_Co_SFPSFP1_Inserted, mc4_25G_OEOHWWorkMode=mc4_25G_OEOHWWorkMode, card_MC_Co_Tx_Down=card_MC_Co_Tx_Down, mcCWDMObjects=mcCWDMObjects, mcHWRmtCtrlMode=mcHWRmtCtrlMode, mcCfgWorkMode=mcCfgWorkMode, mcQCA8334PortEntry=mcQCA8334PortEntry, mcE1CardTable=mcE1CardTable, mc40G_OEOQsfp1Lane2_link=mc40G_OEOQsfp1Lane2_link, mc10G_OEO2RCurSpdMode=mc10G_OEO2RCurSpdMode, PYSNMP_MODULE_ID=company, mc10G_OEOLoopback=mc10G_OEOLoopback, mcE1T1FLoop=mcE1T1FLoop, card_MC_Rmt_SFP1_Up=card_MC_Rmt_SFP1_Up, mcRmt4_25G_OEOCfgSpdMode=mcRmt4_25G_OEOCfgSpdMode, card_MC_Rmt_PwrDown=card_MC_Rmt_PwrDown, mc1go2o_sfpMmLength=mc1go2o_sfpMmLength, mcQCA8334CfgWorkMode=mcQCA8334CfgWorkMode, mcRmtType=mcRmtType, mcQCA8334VlanMode=mcQCA8334VlanMode, mc4_25G_OEOHWSpdMode=mc4_25G_OEOHWSpdMode, card_MC_Co_QSFP2_Lane1_Up=card_MC_Co_QSFP2_Lane1_Up, mcE1Port2LOS=mcE1Port2LOS, mc1GO2OCardObjects=mc1GO2OCardObjects, mcRmt4_25G_OEOHWLoopback=mcRmt4_25G_OEOHWLoopback, mcRmt10G_OEO_accType=mcRmt10G_OEO_accType, mc2_5GMCObjects=mc2_5GMCObjects, mcTransceiverMode=mcTransceiverMode, mc40G_OEOObjects=mc40G_OEOObjects, mcCm1gAccSfpEntry=mcCm1gAccSfpEntry, mcIP175DCfgWorkMode=mcIP175DCfgWorkMode, mc1GE2ORmtTxlink=mc1GE2ORmtTxlink, mcE1T1RmtTLoop=mcE1T1RmtTLoop, qsfpNtwTxPower2=qsfpNtwTxPower2, mc1GO2OPort2SFPExist=mc1GO2OPort2SFPExist, ntwXFP2WaveLengthTunable=ntwXFP2WaveLengthTunable, qsfpNtwRxPower2=qsfpNtwRxPower2, card_MC_Co_XFP1_Inserted=card_MC_Co_XFP1_Inserted, mc10G_OEOSFP1=mc10G_OEOSFP1, mcQCA8334CardObjects=mcQCA8334CardObjects, card_MC_Co_QSFP1_Lane3_Up=card_MC_Co_QSFP1_Lane3_Up, mcAccQSfpTable=mcAccQSfpTable, mc10G_OEO_ntwType=mc10G_OEO_ntwType, mc40G_OEOLane3LoopMode=mc40G_OEOLane3LoopMode, card_MC_Rmt_SFP3_Removed=card_MC_Rmt_SFP3_Removed, card_MC_Co_QSFP2_Lane3_Down=card_MC_Co_QSFP2_Lane3_Down, psuA=psuA, ipProduct=ipProduct, mcFanCardTable=mcFanCardTable, mcRmtE1Port2CV=mcRmtE1Port2CV, mc40G_OEOQsfp2Lane2_link=mc40G_OEOQsfp2Lane2_link, qsfpAccRxPower2=qsfpAccRxPower2, mc10G_OEO2RCfgSpdMode=mc10G_OEO2RCfgSpdMode, card_MC_E1_Rmt_Port2_LOS_Alarm=card_MC_E1_Rmt_Port2_LOS_Alarm, card_MC_E1T1_Rmt_FXLOS_Normal=card_MC_E1T1_Rmt_FXLOS_Normal, mc2_5g_sfpCopperLength=mc2_5g_sfpCopperLength, mcCmObjects=mcCmObjects, mc10GOEEXFPTunableObjects=mc10GOEEXFPTunableObjects, mc10GOEO1RObjects=mc10GOEO1RObjects, mc40G_OEOHWSpeedMode=mc40G_OEOHWSpeedMode, getAccQSfpCmd=getAccQSfpCmd, mcQCA8334CardTable=mcQCA8334CardTable, mc10G_OEO2R_accType=mc10G_OEO2R_accType, card_MC_E1_Co_Port1_LOS_Normal=card_MC_E1_Co_Port1_LOS_Normal, card_MC_Rmt_SFPSFP1_Inserted=card_MC_Rmt_SFPSFP1_Inserted, mc2_5g_sfpTemperature=mc2_5g_sfpTemperature, mcRmtE1Txlink=mcRmtE1Txlink, accsfpTranPower=accsfpTranPower, mcE1T1CodeType=mcE1T1CodeType, sfpRecvPower=sfpRecvPower, card_MC_Co_Fx_Down=card_MC_Co_Fx_Down, card_MC_Co_QSFP2_Inserted=card_MC_Co_QSFP2_Inserted, mcRmtLFP=mcRmtLFP, card_MC_Rmt_SFP1_Inserted=card_MC_Rmt_SFP1_Inserted, accsfpTransCode=accsfpTransCode, sysLocation=sysLocation, qsfpNtwRxPower4=qsfpNtwRxPower4, card_MC_E1_Rmt_Port1_LOS_Alarm=card_MC_E1_Rmt_Port1_LOS_Alarm, sfpSmLength=sfpSmLength, cwdmWavelength6=cwdmWavelength6, mcE1TxCurWorkMode=mcE1TxCurWorkMode, mc1GE2ORmtPort1SFPExist=mc1GE2ORmtPort1SFPExist, mcCm1gIpObjects=mcCm1gIpObjects, mcRmtE1Port2Loop=mcRmtE1Port2Loop, mc10G_OEO2RCardEntry=mc10G_OEO2RCardEntry, card_MC_Co_SFP1_Up=card_MC_Co_SFP1_Up, qsfpAccTxPower2=qsfpAccTxPower2, card_MC_Rmt_XFP1_Removed=card_MC_Rmt_XFP1_Removed, qsfpAccTxPower1=qsfpAccTxPower1, qsfpAccConnector=qsfpAccConnector, mcRmtE1Port2AIS=mcRmtE1Port2AIS, card_MC_E1T1_Rmt_FXLOS_Alarm=card_MC_E1T1_Rmt_FXLOS_Alarm, card_MC_Co_QSFP1_Inserted=card_MC_Co_QSFP1_Inserted, card_MC_FAN_Normal=card_MC_FAN_Normal, mcNtwQSfpTable=mcNtwQSfpTable, mc10G_OEE_checkResult=mc10G_OEE_checkResult, card_MC_E1T1_Rmt_AIS_Normal=card_MC_E1T1_Rmt_AIS_Normal, mc2_5GMCCardEntry=mc2_5GMCCardEntry, mcE1Txlink=mcE1Txlink, mcQCA8334CardEntry=mcQCA8334CardEntry, mcRmtLoopback=mcRmtLoopback, mcQCA8334CurWorkMode=mcQCA8334CurWorkMode, card_MC_Rmt_SFP2_Down=card_MC_Rmt_SFP2_Down, card_MC_Rmt_SFP1_Removed=card_MC_Rmt_SFP1_Removed, mcRmtE1Port1LOS=mcRmtE1Port1LOS, card_MC_Co_SFP1_Inserted=card_MC_Co_SFP1_Inserted, qsfpNtwRxPower1=qsfpNtwRxPower1, mc10G_OEO2RSFP2=mc10G_OEO2RSFP2, slotTable=slotTable, mc10G_OEECardTable=mc10G_OEECardTable, mc40G_OEOLoopMode=mc40G_OEOLoopMode, mc1go2o_sfpTranPower=mc1go2o_sfpTranPower, mc1GO2ORmtPort2SFPlink=mc1GO2ORmtPort2SFPlink, mc10G_OEO2RCardObjects=mc10G_OEO2RCardObjects, mcCm1gSfpTable=mcCm1gSfpTable, mc1go2o_sfpTemperature=mc1go2o_sfpTemperature, mc1GO2ORmtPort2SFPExist=mc1GO2ORmtPort2SFPExist, card_MC_E1_Co_Port2_CV_Alarm=card_MC_E1_Co_Port2_CV_Alarm, mc1GE2OPort1SFPExist=mc1GE2OPort1SFPExist, accsfpConnector=accsfpConnector, coCardNum=coCardNum, mc1GO2OCardTable=mc1GO2OCardTable, gateway=gateway, qsfpNtwTemperature=qsfpNtwTemperature, card_MC_Rmt_XFP1_Inserted=card_MC_Rmt_XFP1_Inserted, mcIP175DRmtCurWorkMode=mcIP175DRmtCurWorkMode, mc2_5g_sfpConnector=mc2_5g_sfpConnector, mcE1T1RmtCodeType=mcE1T1RmtCodeType, mc1GO2ORmtPort1SFPExist=mc1GO2ORmtPort1SFPExist, mc4_25G_OEOCfgSpdMode=mc4_25G_OEOCfgSpdMode, card_MC_Co_SFP2_Down=card_MC_Co_SFP2_Down, sfpMmLength=sfpMmLength, mc10GOEEXFPTunableCardEntry=mc10GOEEXFPTunableCardEntry, mcIP175DPortIdx=mcIP175DPortIdx, sfpWavelength=sfpWavelength, shelf_psuA_Off=shelf_psuA_Off, card_MC_Co_Port1_Down=card_MC_Co_Port1_Down, mcRmtAccSfpExist=mcRmtAccSfpExist, mcAccXFP1WaveLength=mcAccXFP1WaveLength, card_MC_Co_QSFP2_Lane4_Up=card_MC_Co_QSFP2_Lane4_Up, card_MC_Co_QSFP2_Lane3_Up=card_MC_Co_QSFP2_Lane3_Up, mcE1CardObjects=mcE1CardObjects)
mibBuilder.exportSymbols("XXX-MIB", ntwXFP2TunableType=ntwXFP2TunableType, card_MC_Co_Ntw_SFP_Inserted=card_MC_Co_Ntw_SFP_Inserted, card_MC_E1_Rmt_Port2_AIS_Normal=card_MC_E1_Rmt_Port2_AIS_Normal, mc2_5GMCPort2link=mc2_5GMCPort2link, mc1GE2ORmtPort2SFPExist=mc1GE2ORmtPort2SFPExist, getSfpCmd=getSfpCmd, mc10G_OEO2RSFP1=mc10G_OEO2RSFP1, card_MC_Co_SFPSFP2_Removed=card_MC_Co_SFPSFP2_Removed, mc1GO2ORmtPort1SFPlink=mc1GO2ORmtPort1SFPlink, mcPmObjects=mcPmObjects, card_MC_Co_Tx_Up=card_MC_Co_Tx_Up, qsfpAccTxPower4=qsfpAccTxPower4, shelf_Lost=shelf_Lost, mcPmEntry=mcPmEntry, mc2_5Cm1gSfpEntry=mc2_5Cm1gSfpEntry, mc1GO2OSfp3Entry=mc1GO2OSfp3Entry, cwdmWavelength4=cwdmWavelength4, xfpWaveLengthTunability=xfpWaveLengthTunability, card_MC_FAN_Abnormal=card_MC_FAN_Abnormal, mc10G_OEO2RSFP2Loopback=mc10G_OEO2RSFP2Loopback, subnet=subnet, card_MC_Co_Port2_Up=card_MC_Co_Port2_Up, card_MC_Co_XFP2_Removed=card_MC_Co_XFP2_Removed, shelf_fan_On=shelf_fan_On, mcQCA8334PortObjects=mcQCA8334PortObjects, mcE1Port1Loop=mcE1Port1Loop, mc10G_OEETxlink=mc10G_OEETxlink, mc10G_OEOHWLoopback=mc10G_OEOHWLoopback, mcRmt4_25G_OEOCurSpdMode=mcRmt4_25G_OEOCurSpdMode, coCardDesc=coCardDesc, nmuConfig=nmuConfig, mc1GO2OCardEntry=mc1GO2OCardEntry, mc1go2o_sfpBrSpeed=mc1go2o_sfpBrSpeed, fan=fan, mc10GOEO3RCardEntry=mc10GOEO3RCardEntry, card_MC_Co_XFP1_Down=card_MC_Co_XFP1_Down, card_MC_Rmt_XFP1_Down=card_MC_Rmt_XFP1_Down, mcE1Objects=mcE1Objects, card_MC_Rmt_Tx_Down1=card_MC_Rmt_Tx_Down1, mcHWWorkMode=mcHWWorkMode, card_MC_Rmt_SFPSFP1_Down=card_MC_Rmt_SFPSFP1_Down, card_MC_Co_QSFP2_Lane1_Down=card_MC_Co_QSFP2_Lane1_Down, mc4_25G_OEOAccPD=mc4_25G_OEOAccPD, cwdmWavelength2=cwdmWavelength2, trapHost4=trapHost4, mcQCA8334PortTable=mcQCA8334PortTable, mcCWDMCardObjects=mcCWDMCardObjects, mcTransmitMode=mcTransmitMode, sfpTemperature=sfpTemperature, qsfpNtwConnector=qsfpNtwConnector, mc4_25G_OEOCardTable=mc4_25G_OEOCardTable, mc40G_OEOLane4LoopMode=mc40G_OEOLane4LoopMode, mcQCA8334Objects=mcQCA8334Objects, mc40G_OEOCardObjects=mc40G_OEOCardObjects, shelf_psuB_Off=shelf_psuB_Off, mcIP175DRmtTxlink=mcIP175DRmtTxlink, mc2_5g_sfpVoltage=mc2_5g_sfpVoltage, mcCWDMCardTable=mcCWDMCardTable, mcRmtTransmitMode=mcRmtTransmitMode, mcFxlink=mcFxlink, mcRxByteLo=mcRxByteLo, mc1GE2OTxlink=mc1GE2OTxlink, mc1go2o_sfpVoltage=mc1go2o_sfpVoltage, mc10GOEO1RCardObjects=mc10GOEO1RCardObjects, card_MC_E1_Rmt_Port1_CV_Alarm=card_MC_E1_Rmt_Port1_CV_Alarm, card_MC_Co_XFP1_Up=card_MC_Co_XFP1_Up, mc10G_OEEObjects=mc10G_OEEObjects, card_MC_E1T1_Co_AIS_Alarm=card_MC_E1T1_Co_AIS_Alarm, mc1go2o_sfpSmLength=mc1go2o_sfpSmLength, shelf_Detected=shelf_Detected, card_MC_Co_SFPSFP2_Inserted=card_MC_Co_SFPSFP2_Inserted, card_MC_Rmt_Acc_SFP_Inserted=card_MC_Rmt_Acc_SFP_Inserted, mcIP175DCardObjects=mcIP175DCardObjects, card_MC_Co_XFP2_Down=card_MC_Co_XFP2_Down, mcQCA8334Txlink=mcQCA8334Txlink, mcE1Port2AIS=mcE1Port2AIS, card_MC_Co_Tx_Down2=card_MC_Co_Tx_Down2, sfpTransCode=sfpTransCode, mc4_25G_OEOLoopback=mc4_25G_OEOLoopback, mcNtwXFP2TunableType=mcNtwXFP2TunableType, accsfpVoltage=accsfpVoltage, mcRmt10G_OEOSFP1=mcRmt10G_OEOSFP1, mc1GO2ORmtPort3HWSpd=mc1GO2ORmtPort3HWSpd, mc10G_OEO2RHWSFP2Loopback=mc10G_OEO2RHWSFP2Loopback, qsfpNtwTxPower3=qsfpNtwTxPower3, mc10G_OEO2RVersion=mc10G_OEO2RVersion, accsfpMmLength=accsfpMmLength, mc10G_OEO2RHWSpdMode=mc10G_OEO2RHWSpdMode, mc10G_OEO2RSFP1Loopback=mc10G_OEO2RSFP1Loopback, mc1GE2OCardEntry=mc1GE2OCardEntry, mcQCA8334RmtCurWorkMode=mcQCA8334RmtCurWorkMode, xfpTunableType=xfpTunableType, card_MC_Co_QSFP1_Lane4_Down=card_MC_Co_QSFP1_Lane4_Down, mcIP175DDownStream=mcIP175DDownStream, cardObjects=cardObjects, alarmMIB=alarmMIB, accXFP1WaveLengthTunability=accXFP1WaveLengthTunability, qsfpNtwTxPower4=qsfpNtwTxPower4, mcE1T1Objects=mcE1T1Objects, mcAccSfpExist=mcAccSfpExist, card_MC_E1_Co_Port2_AIS_Normal=card_MC_E1_Co_Port2_AIS_Normal, card_MC_E1_Co_Port2_LOS_Alarm=card_MC_E1_Co_Port2_LOS_Alarm, mcIP175DRmtCfgWorkMode=mcIP175DRmtCfgWorkMode, mc40G_OEOQsfp2Lane1_link=mc40G_OEOQsfp2Lane1_link, accsfpCompliance=accsfpCompliance, card_MC_Co_SFP3_Inserted=card_MC_Co_SFP3_Inserted)
| 141.884835 | 10,350 | 0.750826 |
d9e551f94d290cc9b470d1fddfc0e91666dab7ba | 444 | py | Python | setup.py | zhanghang1989/notedown | b0fa1eac88d1cd7fa2261d6c454f82669e6f552b | [
"BSD-2-Clause"
] | null | null | null | setup.py | zhanghang1989/notedown | b0fa1eac88d1cd7fa2261d6c454f82669e6f552b | [
"BSD-2-Clause"
] | null | null | null | setup.py | zhanghang1989/notedown | b0fa1eac88d1cd7fa2261d6c454f82669e6f552b | [
"BSD-2-Clause"
] | null | null | null | from setuptools import setup
# create __version__
exec(open('./_version.py').read())
setup(
name="notedown",
version=__version__,
description="Convert markdown to IPython notebook.",
author="Aaron O'Leary",
author_email='[email protected]',
url='http://github.com/aaren/notedown',
install_requires=['ipython', ],
entry_points={
'console_scripts': [
'notedown = notedown:cli',
],
}
)
| 22.2 | 56 | 0.628378 |
d9e5c18f6a37dd4a96dd21f7ddefb31b197848dd | 2,853 | py | Python | multithreaded_webcrawler.py | the-muses-ltd/Multithreaded-Webcrawler-Cassandra- | eee68faf3c6ecb548edd0e96ce445dcd366fb735 | [
"MIT"
] | null | null | null | multithreaded_webcrawler.py | the-muses-ltd/Multithreaded-Webcrawler-Cassandra- | eee68faf3c6ecb548edd0e96ce445dcd366fb735 | [
"MIT"
] | null | null | null | multithreaded_webcrawler.py | the-muses-ltd/Multithreaded-Webcrawler-Cassandra- | eee68faf3c6ecb548edd0e96ce445dcd366fb735 | [
"MIT"
] | null | null | null | # This is a reusable webcraawler architecture that can be adapted to scrape any webstie.
# RESULTS:
# Roughly 24 seconds per thousand courses scraped for ThreadPoolExecutor vs 63s for unthreaded script.
# This is a very basic implementation of multithreading in order to show the proof of concept, but is a good base to build off of.
import requests
from bs4 import BeautifulSoup
import csv
from concurrent.futures import ProcessPoolExecutor, as_completed, ThreadPoolExecutor
import time
import logging
from mitopencourseware_crawler_worker import mit_crawler
# Exports data to a formatted csv file, this will be replaced with multithreaded API calls to the Cassandra Prisma Database
# or on the cloud in production, it will be sent to the S3 temporary database to be picked up by the AWS Lambda funtion which will push it to the Cassandra Database
| 42.58209 | 164 | 0.667368 |
d9e62b20786a73ca86ccde01bde160623cc32657 | 3,710 | py | Python | genyrator/entities/Template.py | jumblesale/genyrator | c4429f689e92e8447b0b944e7d9b434f99cae51d | [
"MIT"
] | 1 | 2020-07-01T16:54:39.000Z | 2020-07-01T16:54:39.000Z | genyrator/entities/Template.py | jumblesale/genyrator | c4429f689e92e8447b0b944e7d9b434f99cae51d | [
"MIT"
] | 10 | 2018-11-16T15:04:21.000Z | 2021-06-01T22:27:38.000Z | genyrator/entities/Template.py | jumblesale/genyrator | c4429f689e92e8447b0b944e7d9b434f99cae51d | [
"MIT"
] | 2 | 2018-08-08T10:42:35.000Z | 2019-07-25T11:56:06.000Z | from typing import List, Optional, NewType, Tuple, NamedTuple, Type
import attr
from jinja2 import Template as JinjaTemplate, StrictUndefined
from genyrator.entities.Entity import Entity
from genyrator.path import create_relative_path
OutPath = NewType('OutPath', Tuple[List[str], str])
Import = NamedTuple('Import',
[('module_name', str),
('imports', List[str]), ])
| 24.090909 | 73 | 0.616712 |
d9e78859b4482aaef1db18210493138799d91b2f | 1,969 | py | Python | MIDI Remote Scripts/Push2/mode_collector.py | aarkwright/ableton_devices | fe5df3bbd64ccbc136bba722ba1e131a02969798 | [
"MIT"
] | null | null | null | MIDI Remote Scripts/Push2/mode_collector.py | aarkwright/ableton_devices | fe5df3bbd64ccbc136bba722ba1e131a02969798 | [
"MIT"
] | null | null | null | MIDI Remote Scripts/Push2/mode_collector.py | aarkwright/ableton_devices | fe5df3bbd64ccbc136bba722ba1e131a02969798 | [
"MIT"
] | null | null | null | # uncompyle6 version 3.3.5
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.7.3 (default, Apr 24 2019, 15:29:51) [MSC v.1915 64 bit (AMD64)]
# Embedded file name: c:\Jenkins\live\output\win_64_static\Release\python-bundle\MIDI Remote Scripts\Push2\mode_collector.py
# Compiled at: 2018-11-30 15:48:11
from __future__ import absolute_import, print_function, unicode_literals
from ableton.v2.base import listenable_property, listens, EventObject | 37.865385 | 124 | 0.742509 |
d9e7a46d631c672aae25d04f18b75876427b787e | 817 | py | Python | src/topicModel.py | daidaotong/SingleView | db3249ca5afba97f750495cccbc185de88bf2287 | [
"MIT"
] | null | null | null | src/topicModel.py | daidaotong/SingleView | db3249ca5afba97f750495cccbc185de88bf2287 | [
"MIT"
] | null | null | null | src/topicModel.py | daidaotong/SingleView | db3249ca5afba97f750495cccbc185de88bf2287 | [
"MIT"
] | null | null | null | from gensim import corpora, models, similarities, matutils,utils
from gensim.models import KeyedVectors
import numpy as np
#Word2vec Experiment
testString = ['PAST_MEDICAL_HISTORY','PAST_SURGICAL_HISTORY','PHYSICAL_EXAMINATION']
'''
word_vectors = KeyedVectors.load_word2vec_format('~/Downloads/GoogleNews-vectors-negative300.bin', binary=True)
#model.save("file.txt")
print word_vectors.most_similar(positive=['woman', 'king'], negative=['man'])
print "******************************************************"
print word_vectors.similarity('woman', 'man')
#print word_vectors.most_similar(positive=['san_francisco'])
print word_vectors.most_similar(positive=['SURGICAL'])
#word_vectors.similarity(testString[0],testString[1])
'''
a=[1,4,3,6,3,6]
print a[:-1]
#print zip(a[:-1],a[1:])
print np.random.randn(3, 2)
| 35.521739 | 111 | 0.71481 |
d9e8867f9d8fa5dbea3f62a0b298eac5f535d37a | 9,499 | py | Python | src/bots/test/test_inputs.py | drewbitt/lightnovel-crawler | fa9546ad9dcff49c75296b0b8772f6578689adcc | [
"Apache-2.0"
] | 1 | 2019-03-10T13:02:23.000Z | 2019-03-10T13:02:23.000Z | src/bots/test/test_inputs.py | drewbitt/lightnovel-crawler | fa9546ad9dcff49c75296b0b8772f6578689adcc | [
"Apache-2.0"
] | null | null | null | src/bots/test/test_inputs.py | drewbitt/lightnovel-crawler | fa9546ad9dcff49c75296b0b8772f6578689adcc | [
"Apache-2.0"
] | null | null | null | from base64 import decodestring as b64decode
allowed_failures = [
'https://ranobelib.me/',
'https://www.aixdzs.com/',
'https://webnovelindonesia.com/',
b64decode("aHR0cHM6Ly9jb21yYWRlbWFvLmNvbS8=".encode()).decode()
]
test_user_inputs = {
b64decode("aHR0cHM6Ly9jb21yYWRlbWFvLmNvbS8=".encode()).decode(): [
b64decode(
"aHR0cHM6Ly9jb21yYWRlbWFvLmNvbS9ub3ZlbC90c3VydWdpLW5vLWpvb3UtdG8tcmFrdWluLW5vLWtvLw==".encode()).decode()
],
'https://novelsrock.com/': [
'https://novelsrock.com/novel/the-returner/',
'kuro'
],
'http://gravitytales.com/': [
'http://gravitytales.com/posts/novel/a-dragons-curiosity'
],
'http://novelfull.com/': [
'http://novelfull.com/dungeon-defense.html',
'Sinister Ex Girlfriend',
],
'http://www.machinenoveltranslation.com/': [
'http://www.machinenoveltranslation.com/a-thought-through-eternity',
],
'http://zenithnovels.com/': [
'http://zenithnovels.com/infinity-armament/',
],
'https://anythingnovel.com/': [
'https://anythingnovel.com/novel/king-of-gods/',
],
'https://boxnovel.com/': [
'https://boxnovel.com/novel/the-rest-of-my-life-is-for-you/',
'cultivation chat',
],
'https://crescentmoon.blog/': [
'https://crescentmoon.blog/dark-blue-and-moonlight/',
],
'https://litnet.com/': [
'https://litnet.com/en/book/candy-lips-1-b106232',
'candy lips',
],
'https://lnmtl.com/': [
'https://lnmtl.com/novel/the-strongest-dan-god',
],
'https://m.chinesefantasynovels.com/': [
'https://m.chinesefantasynovels.com/3838/',
],
'https://m.novelspread.com/': [
'https://m.novelspread.com/novel/the-legend-of-the-concubine-s-daughter-minglan',
],
'https://m.romanticlovebooks.com/': [
'https://m.romanticlovebooks.com/xuanhuan/207.html',
],
'http://www.tiknovel.com/': [
'http://www.tiknovel.com/book/index?id=717',
],
'https://www.wuxiaworld.co/': [
'sword',
],
'https://m.wuxiaworld.co/': [
'https://m.wuxiaworld.co/Reincarnation-Of-The-Strongest-Sword-God/',
],
'https://meionovel.id/': [
'https://meionovel.id/novel/the-legendary-mechanic/',
],
'https://mtled-novels.com/': [
'https://mtled-novels.com/novels/great-ruler/',
'great ruler'
],
'https://bestlightnovel.com/': [
'https://bestlightnovel.com/novel_888103800',
'martial'
],
'https://novelplanet.com/': [
'https://novelplanet.com/Novel/Returning-from-the-Immortal-World',
'immortal'
],
'https://www.volarenovels.com/': [
'https://www.volarenovels.com/novel/adorable-creature-attacks',
],
'https://webnovel.online/': [
'https://webnovel.online/full-marks-hidden-marriage-pick-up-a-son-get-a-free-husband',
],
'https://www.idqidian.us/': [
'https://www.idqidian.us/novel/peerless-martial-god/'
],
'https://www.novelall.com/': [
'https://www.novelall.com/novel/Virtual-World-Close-Combat-Mage.html',
'combat'
],
'https://www.novelspread.com/': [
'https://www.novelspread.com/novel/the-legend-of-the-concubine-s-daughter-minglan'
],
'https://www.readlightnovel.org/': [
'https://www.readlightnovel.org/top-furious-doctor-soldier'
],
'https://www.romanticlovebooks.com/': [
'https://www.romanticlovebooks.com/xianxia/251.html'
],
'https://www.royalroad.com/': [
'https://www.royalroad.com/fiction/21220/mother-of-learning',
'mother'
],
'https://www.scribblehub.com/': [
'https://www.scribblehub.com/series/73550/modern-life-of-the-exalted-immortal/',
'cultivation'
],
'https://www.webnovel.com/': [
'https://www.webnovel.com/book/8212987205006305/Trial-Marriage-Husband%3A-Need-to-Work-Hard',
'martial',
],
'https://www.worldnovel.online/': [
'https://www.worldnovel.online/novel/solo-leveling/',
],
'https://www.wuxiaworld.co/': [
'https://www.wuxiaworld.co/Reincarnation-Of-The-Strongest-Sword-God/',
'sword'
],
'https://rewayat.club/': [
'https://rewayat.club/novel/almighty-sword-domain/'
],
'https://www.wuxiaworld.com/': [
'https://www.wuxiaworld.com/novel/martial-god-asura',
'martial',
],
'https://creativenovels.com/': [
'https://creativenovels.com/novel/eternal-reverence/',
],
'https://www.tapread.com/': [
'https://www.tapread.com/book/detail/80',
],
'http://www.tapread.com/': [
'http://www.tapread.com/book/detail/80',
],
'https://readnovelfull.com/': [
'https://readnovelfull.com/lord-of-all-realms.html',
'cultivation'
],
'https://myoniyonitranslations.com/': [
'https://myoniyonitranslations.com/top-management/',
'https://myoniyonitranslations.com/category/god-of-tennis',
],
'https://babelnovel.com/': [
'https://babelnovel.com/books/ceo-let-me-go',
'dazzle Good'
],
'https://wuxiaworld.online/': [
'https://wuxiaworld.online/trial-marriage-husband-need-to-work-hard',
'cultivation',
],
'https://www.novelv.com/': [
'https://www.novelv.com/0/349/'
],
'http://fullnovel.live/': [
'http://fullnovel.live/novel-a-will-eternal',
'will eternal',
],
'https://www.noveluniverse.com/': [
'https://www.noveluniverse.com/index/novel/info/id/15.html'
],
'https://novelraw.blogspot.com/': [
'https://novelraw.blogspot.com/2019/03/dragon-king-son-in-law-mtl.html'
],
'https://light-novel.online/': [
'https://light-novel.online/great-tyrannical-deity',
'tyrannical'
],
'https://www.rebirth.online/': [
'https://www.rebirth.online/novel/upside-down'
],
'https://www.jieruihao.cn/': [
'https://www.jieruihao.cn/novel/against-the-gods/',
],
'https://www.wattpad.com/': [
'https://www.wattpad.com/story/87505567-loving-mr-jerkface-%E2%9C%94%EF%B8%8F'
],
'https://novelgo.id/': [
'https://novelgo.id/novel/the-mightiest-leveling-system/'
],
'https://yukinovel.me/': [
'https://yukinovel.me/novel/the-second-coming-of-avarice/',
],
'https://www.asianhobbyist.com/': [
'https://www.asianhobbyist.com/series/that-time-i-got-reincarnated-as-a-slime/'
],
'https://kisslightnovels.info/': [
'https://kisslightnovels.info/novel/solo-leveling/'
],
'https://novelonlinefull.com/': [
'https://novelonlinefull.com/novel/abo1520855001564322110'
],
'https://www.machine-translation.org/': [
'https://www.machine-translation.org/novel/bace21c9b10d34e9/world-of-cultivation.html'
],
'https://www.fanfiction.net/': [
'https://www.fanfiction.net/s/7268451/1/Facebook-For-wizards'
],
'https://www.mtlnovel.com/': [
'https://www.mtlnovel.com/trapped-in-a-typical-idol-drama/'
],
'https://wordexcerpt.com/': [
'https://wordexcerpt.com/series/transmigration-raising-the-child-of-the-male-lead-boss/'
],
'https://www.translateindo.com/': [
'https://www.translateindo.com/demon-wang-golden-status-favoured-fei/'
],
'https://ranobelib.me/': [
'https://ranobelib.me/sozvezdie-klinka'
],
'https://novelringan.com/': [
'https://novelringan.com/series/the-most-loving-marriage-in-history-master-mus-pampered-wife/'
],
'https://wuxiaworld.site/': [
'https://wuxiaworld.site/novel/only-i-level-up/'
],
'https://id.mtlnovel.com/': [
'https://id.mtlnovel.com/the-strongest-plane-becomes-god/'
],
'https://www.shinsori.com/': [
'https://www.shinsori.com/akuyaku-reijou-ni-nanka-narimasen/'
],
'https://www.flying-lines.com/': [
'https://www.flying-lines.com/novel/one-useless-rebirth'
],
'https://book.qidian.com/': [
'https://book.qidian.com/info/1016597088'
],
'https://kiss-novel.com/': [
'https://kiss-novel.com/the-first-order'
],
'https://www.machine-translation.org/': [
'https://www.machine-translation.org/novel/a5eee127d75da0d2/long-live-summons.html'
],
'https://www.aixdzs.com/': [
'https://www.aixdzs.com/d/66/66746/'
],
'https://webnovelonline.com/': [
'https://webnovelonline.com/novel/the_anarchic_consort'
],
'https://4scanlation.com/': [
'https://4scanlation.com/tensei-shitara-slime-datta-ken-wn/'
],
'https://listnovel.com/': [
'https://listnovel.com/novel/my-sassy-crown-princess/'
],
'https://tomotranslations.com/': [
'https://tomotranslations.com/this-hero-is-invincible-but-too-cautious/'
],
'https://www.wuxialeague.com/': [
'https://www.wuxialeague.com/novel/245/'
],
'http://liberspark.com/': [
'http://liberspark.com/novel/black-irons-glory'
],
'https://webnovelindonesia.com/': [
'https://webnovelindonesia.com/nv/almighty-student'
],
'https://webnovelindonesia.com/': [
'https://webnovelindonesia.com/nv/almighty-student'
],
'http://tiknovel.com/': [
'http://tiknovel.com/book/index?id=717'
],
'http://boxnovel.org/': [
'http://boxnovel.org/novel/martial-god-asura'
]
}
| 34.922794 | 117 | 0.596694 |
d9ea76a8227b5405cef7b2e6991bcba1911971f4 | 5,819 | py | Python | wikisourcesort.py | ostropunk/wikisourcesort | 3af2d086df0818a75b3e6c34550e2cc1382911a5 | [
"MIT"
] | null | null | null | wikisourcesort.py | ostropunk/wikisourcesort | 3af2d086df0818a75b3e6c34550e2cc1382911a5 | [
"MIT"
] | null | null | null | wikisourcesort.py | ostropunk/wikisourcesort | 3af2d086df0818a75b3e6c34550e2cc1382911a5 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
import re
# In[2]:
# In[3]:
def textreader(text):
'''Opens textfile and returns the content as a string'''
with open(text, 'rt', encoding="utf8") as wiki:
txtstring = wiki.read()
return txtstring
# In[44]:
def replace_from_dict(text, dictionary):
'''Replaces words in text with new words in dictionary'''
for word in dictionary:
text = text.replace(word, dictionary[word])
return text
# In[172]:
def get_ref(text):
'''
Finds references between the <ref>- and </ref>-tags
and returns them as a list of strings
'''
ref = re.findall("\<ref.+?\<\/ref\>", text)
return ref
# In[171]:
def getrefurl(ref):
'''Finds the reference url in references and returns it as a string'''
url = re.search("http.+?(?=\s|\|title=|\|titel|\}\})", ref)
url = url.group()
return url
# In[30]:
def get_domain_name(url):
'''
Finds the domain name of the reference url and
returns that name as a string.
'''
domain_name = re.search('(?<=\/\/).+?(?=\/)', url)
domain_name = domain_name.group()
if domain_name.startswith('www.'):
domain_name = domain_name.replace('www.', '')
return domain_name
# In[32]:
# In[36]:
def create_ref_dict(refs):
'''
Takes a list of references, extracts the reference url and name,
and returns a dictionary sorted on the referenceurl as key.
'''
ref_dict = {}
ref_counts = {}
for ref in refs:
ref_dict, ref_counts = update_ref_dict(ref, ref_dict, ref_counts)
return ref_dict
# In[79]:
def get_ref_tag(text):
'''
Finds references between the <ref>- and </ref>-tags
and returns them as a list of strings
'''
ref = re.findall("\<ref name\=.+?\/\>", text)
#ref = re.findall("\<ref.+?\<\/ref\>|\<ref name\=.+?\/\>", text)
#ref = re.findall("\<ref.+?(?!\"\s\/\>)\<\/ref>", text)
#ref = re.findall("\<ref.+?\<\/ref\>", text)
return set(ref)
# In[130]:
def get_spec_ref(text, ref_tag):
'''
Finds references between the <ref>- and </ref>-tags
and returns them as a list of strings
'''
#ref = re.findall("\<ref name\=.+?\/\>", text)
#ref = re.findall("\<ref.+?\<\/ref\>|\<ref name\=.+?\/\>", text)
#ref = re.findall("\<ref.+?(?!\"\s\/\>)\<\/ref>", text)
ref = re.findall(f'\<ref name\=\"{ref_tag}\"\>.+?\<\/ref\>', text)
ref = ref[0]
return ref
# In[115]:
# In[136]:
# In[49]:
# In[66]:
# In[169]:
def reference_sorter(text):
'''
Does a bunch of stuff that should be broken out in different functions.
'''
references = get_ref(text)
reference_dict = create_ref_dict(references)
reference_list = []
reference_text = '== Referenser ==\n<references>\n'
text = text.replace('== Kllor ==', '== Referenser ==')
text = text.replace('<references/>', '')
for entry in reference_dict:
for reference in reference_dict[entry]['refs']:
text = text.replace(reference, '<ref name="{}" />'.format(reference_dict[entry]['refname']))
reference_list.append('<ref name="{}">{}</ref>'.format(reference_dict[entry]['refname'], entry))
for reference in reference_list:
reference_text += reference +'\n'
reference_text += '</references>'
text = re.split('== Referenser ==', text)
text = text[0] + reference_text + text[-1]
return text
# In[134]:
# In[173]:
if __name__ == "__main__":
main()
| 23.75102 | 104 | 0.598385 |
d9ea7ffbac1c307ae6a48a478a94b12a44b81de1 | 3,325 | py | Python | backend/radar/engine/body_objects.py | me-anton/radar-app | cc7d1e876e0ce9b6173b6d7b484d5553e247166e | [
"MIT"
] | null | null | null | backend/radar/engine/body_objects.py | me-anton/radar-app | cc7d1e876e0ce9b6173b6d7b484d5553e247166e | [
"MIT"
] | null | null | null | backend/radar/engine/body_objects.py | me-anton/radar-app | cc7d1e876e0ce9b6173b6d7b484d5553e247166e | [
"MIT"
] | null | null | null | import logging
import json
from dataclasses import dataclass
from redis import Redis
from typing import Iterable, Tuple, List, Iterator, Union, Dict
from typing_extensions import TypedDict
from backend import settings
from caching.scripts import RedisScriptsPool
from share.metaclasses import Singleton
from radar.models import AlienBody
from radar.validation import validate_body_str_profile
logger = logging.getLogger(__name__)
BodiesUpdate = TypedDict('BodiesUpdate', {'dropped_keys': List[str],
'new_records': Dict[str, str]})
| 33.25 | 76 | 0.657143 |
d9eb0ee449a6b916e969b15c42a07550484f36ad | 959 | py | Python | djangocms_baseplugins/spacer/cms_plugins.py | benzkji/djangocms-baseplugins | 7f041a030ed93dcdec70e4ca777b841846b8f2f2 | [
"MIT"
] | 2 | 2019-04-14T01:31:22.000Z | 2020-03-05T13:06:57.000Z | djangocms_baseplugins/spacer/cms_plugins.py | benzkji/djangocms-baseplugins | 7f041a030ed93dcdec70e4ca777b841846b8f2f2 | [
"MIT"
] | 32 | 2017-04-04T09:28:06.000Z | 2021-08-18T16:23:02.000Z | djangocms_baseplugins/spacer/cms_plugins.py | bnzk/djangocms-baseplugins | 7f041a030ed93dcdec70e4ca777b841846b8f2f2 | [
"MIT"
] | null | null | null | # coding: utf-8
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django import forms
from django.utils.translation import ugettext_lazy as _
from djangocms_baseplugins.baseplugin import defaults
from djangocms_baseplugins.baseplugin.cms_plugins import BasePluginMixin
from djangocms_baseplugins.baseplugin.utils import get_fields_from_fieldsets, get_baseplugin_widgets
from . import conf
from .models import Spacer
plugin_pool.register_plugin(SpacerPlugin)
| 29.96875 | 100 | 0.788321 |
d9ec253823566d98d214c4860b8c8d8ac8c80515 | 2,188 | py | Python | python_utilities/plotting/util.py | sdaxen/python_utilities | 7b9d6cc21bfc31be83629d2ac02b27e886ebc2bb | [
"MIT"
] | 2 | 2020-04-13T20:17:36.000Z | 2020-05-12T01:13:12.000Z | python_utilities/plotting/util.py | sethaxen/python_utilities | 7b9d6cc21bfc31be83629d2ac02b27e886ebc2bb | [
"MIT"
] | 5 | 2015-10-20T22:57:51.000Z | 2017-09-07T01:10:23.000Z | python_utilities/plotting/util.py | sethaxen/python_utilities | 7b9d6cc21bfc31be83629d2ac02b27e886ebc2bb | [
"MIT"
] | 3 | 2015-08-17T17:55:41.000Z | 2018-09-19T13:56:42.000Z | """Utility functions for plotting.
Author: Seth Axen
E-mail: [email protected]"""
from collections import deque
import numpy as np
def rgb_to_hsv(rgb):
"""Convert RGB colors to HSV colors."""
r, g, b = tuple(map(float, rgb))
if any([r > 1, g > 1, b > 1]):
r /= 255.
g /= 255.
b /= 255.
mmax = max(r, g, b)
mmin = min(r, g, b)
c = mmax - mmin
if (c == 0.):
hp = 0.
elif (mmax == r):
hp = ((g - b) / c) % 6
elif (mmax == g):
hp = ((b - r) / c) + 2
elif (mmax == b):
hp = ((r - g) / c) + 4
h = 60 * hp
v = mmax
if (c == 0):
s = 0
else:
s = c / v
return (h, s, v)
def hsv_to_rgb(hsv):
"""Convert HSV colors to RGB colors."""
h, s, v = tuple(map(float, hsv))
c = v * s
m = v - c
hp = h / 60.
x = c * (1. - abs((hp % 2) - 1.))
hp = int(hp)
rgb = deque((c + m, x + m, m))
if (hp % 2):
rgb.reverse()
rgb.rotate((hp - 3) / 2)
else:
rgb.rotate(hp / 2)
return tuple(rgb)
def rgb_to_yuv(rgb):
"""Convert RGB colors to Y'UV colors, useful for comparison."""
rgbv = np.array(rgb).reshape(3, 1)
if np.any(rgbv > 1.):
rgbv = rgbv / 255.
yuv = np.dot(np.array([[ .299, .587, .114],
[-.14713, -.28886, .436],
[ .615, -.51499, -.10001]], dtype=np.double),
rgbv)
return list(yuv)
def yuv_to_rgb(yuv):
"""Convert Y'UV colors to RGB colors."""
yuvv = np.array(yuv).reshape(3, 1)
rgb = np.dot(np.array([[1., 0., 1.13983],
[1., -.39465, -.58060],
[1., 2.03211, 0.]], dtype=np.double),
yuvv)
return list(rgb)
def compute_yuv_dist(rgb1, rgb2):
"""Compute Euclidean Y'UV distance between RGB colors."""
yuv1 = rgb_to_yuv(rgb1)
yuv2 = rgb_to_yuv(rgb2)
return float(sum((np.array(yuv1) - np.array(yuv2))**2)**.5)
def lighten_rgb(rgb, p=0.):
"""Lighten RGB colors by percentage p of total."""
h, s, v = rgb_to_hsv(rgb)
hsv = (h, s, min(1, v + p))
return hsv_to_rgb(hsv)
| 24.863636 | 74 | 0.472121 |
d9ec2cc7a1a6ba6f4583fe5b1a6bc53ffc63f837 | 618 | py | Python | tests/test_process.py | confluentinc/utils-core | 6001b4c61f7d923d273a23dc5a1580e0fa277d2c | [
"MIT"
] | null | null | null | tests/test_process.py | confluentinc/utils-core | 6001b4c61f7d923d273a23dc5a1580e0fa277d2c | [
"MIT"
] | null | null | null | tests/test_process.py | confluentinc/utils-core | 6001b4c61f7d923d273a23dc5a1580e0fa277d2c | [
"MIT"
] | 1 | 2021-01-14T11:33:35.000Z | 2021-01-14T11:33:35.000Z | import pytest
from utils.process import run, silent_run, RunError
from utils.fs import in_temp_dir
| 24.72 | 81 | 0.600324 |
d9ec7fb034397cf9a445f613d02c81768a1461eb | 3,410 | py | Python | bokeh/client/util.py | areaweb/bokeh | 9d131e45d626a912e85aee5b2647139c194dc893 | [
"BSD-3-Clause"
] | 1 | 2021-01-31T22:13:13.000Z | 2021-01-31T22:13:13.000Z | bokeh/client/util.py | adsbxchange/bokeh | 47aa8f8420944c47e876c1c36be182d257c14b87 | [
"BSD-3-Clause"
] | 1 | 2017-01-12T00:37:38.000Z | 2017-01-12T00:37:38.000Z | bokeh/client/util.py | adsbxchange/bokeh | 47aa8f8420944c47e876c1c36be182d257c14b87 | [
"BSD-3-Clause"
] | null | null | null | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2017, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
''' Internal utility functions used by ``bokeh.client``
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
from bokeh.util.api import public, internal ; public, internal
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
# External imports
# Bokeh imports
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Public API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Internal API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
| 31.574074 | 82 | 0.389443 |
d9ed79fef6ca74a4e312f154a876ffa2123179f7 | 16,276 | py | Python | slim/nets/inception_resnet_v2.py | PPTMiao/mtl-ssl | b61449c3f902414304657de6ec217077e441a6b9 | [
"Apache-2.0"
] | 90 | 2019-06-12T06:11:39.000Z | 2022-03-21T22:28:38.000Z | slim/nets/inception_resnet_v2.py | PPTMiao/mtl-ssl | b61449c3f902414304657de6ec217077e441a6b9 | [
"Apache-2.0"
] | 3 | 2020-03-24T17:01:25.000Z | 2021-02-02T22:00:11.000Z | slim/nets/inception_resnet_v2.py | PPTMiao/mtl-ssl | b61449c3f902414304657de6ec217077e441a6b9 | [
"Apache-2.0"
] | 17 | 2019-06-15T08:49:46.000Z | 2022-01-24T06:46:23.000Z | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains the definition of the Inception Resnet V2 architecture.
As described in http://arxiv.org/abs/1602.07261.
Inception-v4, Inception-ResNet and the Impact of Residual Connections
on Learning
Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
slim = tf.contrib.slim
def block35(net, scale=1.0, activation_fn=tf.nn.relu, scope=None, reuse=None):
"""Builds the 35x35 resnet block."""
with tf.variable_scope(scope, 'Block35', [net], reuse=reuse):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 32, 1, scope='Conv2d_1x1')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 32, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 32, 3, scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
tower_conv2_0 = slim.conv2d(net, 32, 1, scope='Conv2d_0a_1x1')
tower_conv2_1 = slim.conv2d(tower_conv2_0, 48, 3, scope='Conv2d_0b_3x3')
tower_conv2_2 = slim.conv2d(tower_conv2_1, 64, 3, scope='Conv2d_0c_3x3')
mixed = tf.concat(axis=3, values=[tower_conv, tower_conv1_1, tower_conv2_2])
up = slim.conv2d(mixed, net.get_shape()[3], 1, normalizer_fn=None,
activation_fn=None, scope='Conv2d_1x1')
net += scale * up
if activation_fn:
net = activation_fn(net)
return net
def block17(net, scale=1.0, activation_fn=tf.nn.relu, scope=None, reuse=None):
"""Builds the 17x17 resnet block."""
with tf.variable_scope(scope, 'Block17', [net], reuse=reuse):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 192, 1, scope='Conv2d_1x1')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 128, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 160, [1, 7],
scope='Conv2d_0b_1x7')
tower_conv1_2 = slim.conv2d(tower_conv1_1, 192, [7, 1],
scope='Conv2d_0c_7x1')
mixed = tf.concat(axis=3, values=[tower_conv, tower_conv1_2])
up = slim.conv2d(mixed, net.get_shape()[3], 1, normalizer_fn=None,
activation_fn=None, scope='Conv2d_1x1')
net += scale * up
if activation_fn:
net = activation_fn(net)
return net
def block8(net, scale=1.0, activation_fn=tf.nn.relu, scope=None, reuse=None):
"""Builds the 8x8 resnet block."""
with tf.variable_scope(scope, 'Block8', [net], reuse=reuse):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 192, 1, scope='Conv2d_1x1')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 192, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 224, [1, 3],
scope='Conv2d_0b_1x3')
tower_conv1_2 = slim.conv2d(tower_conv1_1, 256, [3, 1],
scope='Conv2d_0c_3x1')
mixed = tf.concat(axis=3, values=[tower_conv, tower_conv1_2])
up = slim.conv2d(mixed, net.get_shape()[3], 1, normalizer_fn=None,
activation_fn=None, scope='Conv2d_1x1')
net += scale * up
if activation_fn:
net = activation_fn(net)
return net
def inception_resnet_v2_base(inputs,
final_endpoint='Conv2d_7b_1x1',
output_stride=16,
align_feature_maps=False,
scope=None):
"""Inception model from http://arxiv.org/abs/1602.07261.
Constructs an Inception Resnet v2 network from inputs to the given final
endpoint. This method can construct the network up to the final inception
block Conv2d_7b_1x1.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
final_endpoint: specifies the endpoint to construct the network up to. It
can be one of ['Conv2d_1a_3x3', 'Conv2d_2a_3x3', 'Conv2d_2b_3x3',
'MaxPool_3a_3x3', 'Conv2d_3b_1x1', 'Conv2d_4a_3x3', 'MaxPool_5a_3x3',
'Mixed_5b', 'Mixed_6a', 'PreAuxLogits', 'Mixed_7a', 'Conv2d_7b_1x1']
output_stride: A scalar that specifies the requested ratio of input to
output spatial resolution. Only supports 8 and 16.
align_feature_maps: When true, changes all the VALID paddings in the network
to SAME padding so that the feature maps are aligned.
scope: Optional variable_scope.
Returns:
tensor_out: output tensor corresponding to the final_endpoint.
end_points: a set of activations for external use, for example summaries or
losses.
Raises:
ValueError: if final_endpoint is not set to one of the predefined values,
or if the output_stride is not 8 or 16, or if the output_stride is 8 and
we request an end point after 'PreAuxLogits'.
"""
if output_stride != 8 and output_stride != 16:
raise ValueError('output_stride must be 8 or 16.')
padding = 'SAME' if align_feature_maps else 'VALID'
end_points = {}
with tf.variable_scope(scope, 'InceptionResnetV2', [inputs]):
with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride=1, padding='SAME'):
# 149 x 149 x 32
net = slim.conv2d(inputs, 32, 3, stride=2, padding=padding,
scope='Conv2d_1a_3x3')
if add_and_check_final('Conv2d_1a_3x3', net): return net, end_points
# 147 x 147 x 32
net = slim.conv2d(net, 32, 3, padding=padding,
scope='Conv2d_2a_3x3')
if add_and_check_final('Conv2d_2a_3x3', net): return net, end_points
# 147 x 147 x 64
net = slim.conv2d(net, 64, 3, scope='Conv2d_2b_3x3')
if add_and_check_final('Conv2d_2b_3x3', net): return net, end_points
# 73 x 73 x 64
net = slim.max_pool2d(net, 3, stride=2, padding=padding,
scope='MaxPool_3a_3x3')
if add_and_check_final('MaxPool_3a_3x3', net): return net, end_points
# 73 x 73 x 80
net = slim.conv2d(net, 80, 1, padding=padding,
scope='Conv2d_3b_1x1')
if add_and_check_final('Conv2d_3b_1x1', net): return net, end_points
# 71 x 71 x 192
net = slim.conv2d(net, 192, 3, padding=padding,
scope='Conv2d_4a_3x3')
if add_and_check_final('Conv2d_4a_3x3', net): return net, end_points
# 35 x 35 x 192
net = slim.max_pool2d(net, 3, stride=2, padding=padding,
scope='MaxPool_5a_3x3')
if add_and_check_final('MaxPool_5a_3x3', net): return net, end_points
# 35 x 35 x 320
with tf.variable_scope('Mixed_5b'):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 96, 1, scope='Conv2d_1x1')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 48, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 64, 5,
scope='Conv2d_0b_5x5')
with tf.variable_scope('Branch_2'):
tower_conv2_0 = slim.conv2d(net, 64, 1, scope='Conv2d_0a_1x1')
tower_conv2_1 = slim.conv2d(tower_conv2_0, 96, 3,
scope='Conv2d_0b_3x3')
tower_conv2_2 = slim.conv2d(tower_conv2_1, 96, 3,
scope='Conv2d_0c_3x3')
with tf.variable_scope('Branch_3'):
tower_pool = slim.avg_pool2d(net, 3, stride=1, padding='SAME',
scope='AvgPool_0a_3x3')
tower_pool_1 = slim.conv2d(tower_pool, 64, 1,
scope='Conv2d_0b_1x1')
net = tf.concat(
[tower_conv, tower_conv1_1, tower_conv2_2, tower_pool_1], 3)
if add_and_check_final('Mixed_5b', net): return net, end_points
# TODO(alemi): Register intermediate endpoints
net = slim.repeat(net, 10, block35, scale=0.17)
# 17 x 17 x 1088 if output_stride == 8,
# 33 x 33 x 1088 if output_stride == 16
use_atrous = output_stride == 8
with tf.variable_scope('Mixed_6a'):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 384, 3, stride=1 if use_atrous else 2,
padding=padding,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 256, 3,
scope='Conv2d_0b_3x3')
tower_conv1_2 = slim.conv2d(tower_conv1_1, 384, 3,
stride=1 if use_atrous else 2,
padding=padding,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_2'):
tower_pool = slim.max_pool2d(net, 3, stride=1 if use_atrous else 2,
padding=padding,
scope='MaxPool_1a_3x3')
net = tf.concat([tower_conv, tower_conv1_2, tower_pool], 3)
if add_and_check_final('Mixed_6a', net): return net, end_points
# TODO(alemi): register intermediate endpoints
with slim.arg_scope([slim.conv2d], rate=2 if use_atrous else 1):
net = slim.repeat(net, 20, block17, scale=0.10)
if add_and_check_final('PreAuxLogits', net): return net, end_points
if output_stride == 8:
# TODO(gpapan): Properly support output_stride for the rest of the net.
raise ValueError('output_stride==8 is only supported up to the '
'PreAuxlogits end_point for now.')
# 8 x 8 x 2080
with tf.variable_scope('Mixed_7a'):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1')
tower_conv_1 = slim.conv2d(tower_conv, 384, 3, stride=2,
padding=padding,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
tower_conv1 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1, 288, 3, stride=2,
padding=padding,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_2'):
tower_conv2 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1')
tower_conv2_1 = slim.conv2d(tower_conv2, 288, 3,
scope='Conv2d_0b_3x3')
tower_conv2_2 = slim.conv2d(tower_conv2_1, 320, 3, stride=2,
padding=padding,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_3'):
tower_pool = slim.max_pool2d(net, 3, stride=2,
padding=padding,
scope='MaxPool_1a_3x3')
net = tf.concat(
[tower_conv_1, tower_conv1_1, tower_conv2_2, tower_pool], 3)
if add_and_check_final('Mixed_7a', net): return net, end_points
# TODO(alemi): register intermediate endpoints
net = slim.repeat(net, 9, block8, scale=0.20)
net = block8(net, activation_fn=None)
# 8 x 8 x 1536
net = slim.conv2d(net, 1536, 1, scope='Conv2d_7b_1x1')
if add_and_check_final('Conv2d_7b_1x1', net): return net, end_points
raise ValueError('final_endpoint (%s) not recognized', final_endpoint)
def inception_resnet_v2(inputs, num_classes=1001, is_training=True,
dropout_keep_prob=0.8,
reuse=None,
scope='InceptionResnetV2',
create_aux_logits=True):
"""Creates the Inception Resnet V2 model.
Args:
inputs: a 4-D tensor of size [batch_size, height, width, 3].
num_classes: number of predicted classes.
is_training: whether is training or not.
dropout_keep_prob: float, the fraction to keep before final layer.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
scope: Optional variable_scope.
create_aux_logits: Whether to include the auxilliary logits.
Returns:
logits: the logits outputs of the model.
end_points: the set of end_points from the inception model.
"""
end_points = {}
with tf.variable_scope(scope, 'InceptionResnetV2', [inputs, num_classes],
reuse=reuse) as scope:
with slim.arg_scope([slim.batch_norm, slim.dropout],
is_training=is_training):
net, end_points = inception_resnet_v2_base(inputs, scope=scope)
if create_aux_logits:
with tf.variable_scope('AuxLogits'):
aux = end_points['PreAuxLogits']
aux = slim.avg_pool2d(aux, 5, stride=3, padding='VALID',
scope='Conv2d_1a_3x3')
aux = slim.conv2d(aux, 128, 1, scope='Conv2d_1b_1x1')
aux = slim.conv2d(aux, 768, aux.get_shape()[1:3],
padding='VALID', scope='Conv2d_2a_5x5')
aux = slim.flatten(aux)
aux = slim.fully_connected(aux, num_classes, activation_fn=None,
scope='Logits')
end_points['AuxLogits'] = aux
with tf.variable_scope('Logits'):
net = slim.avg_pool2d(net, net.get_shape()[1:3], padding='VALID',
scope='AvgPool_1a_8x8')
net = slim.flatten(net)
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='Dropout')
end_points['PreLogitsFlatten'] = net
logits = slim.fully_connected(net, num_classes, activation_fn=None,
scope='Logits')
end_points['Logits'] = logits
end_points['Predictions'] = tf.nn.softmax(logits, name='Predictions')
return logits, end_points
inception_resnet_v2.default_image_size = 299
def inception_resnet_v2_arg_scope(weight_decay=0.00004,
batch_norm_decay=0.9997,
batch_norm_epsilon=0.001,
trainable=True):
"""Returns the scope with the default parameters for inception_resnet_v2.
Args:
weight_decay: the weight decay for weights variables.
batch_norm_decay: decay for the moving average of batch_norm momentums.
batch_norm_epsilon: small float added to variance to avoid dividing by zero.
Returns:
a arg_scope with the parameters needed for inception_resnet_v2.
"""
# Set weight_decay for weights in conv2d and fully_connected layers.
with slim.arg_scope([slim.conv2d, slim.fully_connected],
weights_regularizer=slim.l2_regularizer(weight_decay),
biases_regularizer=slim.l2_regularizer(weight_decay),
trainable=trainable):
batch_norm_params = {
'decay': batch_norm_decay,
'epsilon': batch_norm_epsilon,
'trainable': trainable
}
# Set activation_fn and parameters for batch_norm.
with slim.arg_scope([slim.conv2d], activation_fn=tf.nn.relu,
normalizer_fn=slim.batch_norm,
normalizer_params=batch_norm_params) as scope:
return scope
| 45.085873 | 80 | 0.616552 |
d9ee27c57dbf76a3c2165139cae647ead0e58c46 | 6,479 | py | Python | tests/boilerplate_client/boilerplate_cmd.py | LedgerHQ/ledger-app-neo3 | 48e1e0dec3e4801fc3ab1b07c4fe4ed86735a642 | [
"MIT"
] | null | null | null | tests/boilerplate_client/boilerplate_cmd.py | LedgerHQ/ledger-app-neo3 | 48e1e0dec3e4801fc3ab1b07c4fe4ed86735a642 | [
"MIT"
] | 5 | 2021-09-13T16:41:52.000Z | 2022-01-12T16:00:21.000Z | tests/boilerplate_client/boilerplate_cmd.py | isabella232/app-neo3 | c48ec5032143fe606d694372c2cfc02082b2ce03 | [
"MIT"
] | 3 | 2021-09-01T11:40:09.000Z | 2022-03-06T06:45:13.000Z | import struct
from typing import Tuple
from ledgercomm import Transport
from boilerplate_client.boilerplate_cmd_builder import BoilerplateCommandBuilder, InsType
from boilerplate_client.button import Button
from boilerplate_client.exception import DeviceException
from boilerplate_client.transaction import Transaction
from neo3.network import payloads
| 34.462766 | 131 | 0.533416 |
d9efa4ffda8cacd286187e29ce110d292c7a1e64 | 946 | py | Python | clpy/sparse/util.py | fixstars/clpy | 693485f85397cc110fa45803c36c30c24c297df0 | [
"BSD-3-Clause"
] | 142 | 2018-06-07T07:43:10.000Z | 2021-10-30T21:06:32.000Z | clpy/sparse/util.py | fixstars/clpy | 693485f85397cc110fa45803c36c30c24c297df0 | [
"BSD-3-Clause"
] | 282 | 2018-06-07T08:35:03.000Z | 2021-03-31T03:14:32.000Z | clpy/sparse/util.py | fixstars/clpy | 693485f85397cc110fa45803c36c30c24c297df0 | [
"BSD-3-Clause"
] | 19 | 2018-06-19T11:07:53.000Z | 2021-05-13T20:57:04.000Z | import clpy
import clpy.sparse.base
_preamble_atomic_add = '''
#if __CUDA_ARCH__ < 600
__device__ double atomicAdd(double* address, double val) {
unsigned long long* address_as_ull =
(unsigned long long*)address;
unsigned long long old = *address_as_ull, assumed;
do {
assumed = old;
old = atomicCAS(address_as_ull, assumed,
__double_as_longlong(val +
__longlong_as_double(assumed)));
} while (assumed != old);
return __longlong_as_double(old);
}
#endif
'''
| 24.25641 | 76 | 0.60148 |
d9efc68d74f0ff6411265258b8ee1094b0fa820e | 1,316 | py | Python | test/test_cartesian.py | hwazni/discopy | 812a4c77de4c766591bad74306720b518cdc54fc | [
"BSD-3-Clause"
] | 205 | 2019-12-29T09:45:09.000Z | 2022-03-24T09:29:13.000Z | test/test_cartesian.py | hwazni/discopy | 812a4c77de4c766591bad74306720b518cdc54fc | [
"BSD-3-Clause"
] | 61 | 2019-12-11T10:46:38.000Z | 2022-03-28T17:10:52.000Z | test/test_cartesian.py | hwazni/discopy | 812a4c77de4c766591bad74306720b518cdc54fc | [
"BSD-3-Clause"
] | 46 | 2020-04-08T23:33:31.000Z | 2022-03-18T21:58:35.000Z | from pytest import raises
from discopy.cartesian import *
| 25.803922 | 78 | 0.595745 |
d9f04eac1f39d4c14950ae0caf3dff21f18defd4 | 84,990 | py | Python | source/browseMode.py | neal-hub/nvda-test | 4c3a67b2eafa9721c5de3f671d10e60ab2d43865 | [
"bzip2-1.0.6"
] | 1 | 2022-02-20T23:10:39.000Z | 2022-02-20T23:10:39.000Z | source/browseMode.py | neal-hub/nvda-test | 4c3a67b2eafa9721c5de3f671d10e60ab2d43865 | [
"bzip2-1.0.6"
] | null | null | null | source/browseMode.py | neal-hub/nvda-test | 4c3a67b2eafa9721c5de3f671d10e60ab2d43865 | [
"bzip2-1.0.6"
] | null | null | null | # A part of NonVisual Desktop Access (NVDA)
# Copyright (C) 2007-2021 NV Access Limited, Babbage B.V., James Teh, Leonard de Ruijter,
# Thomas Stivers, Accessolutions, Julien Cochuyt
# This file is covered by the GNU General Public License.
# See the file COPYING for more details.
from typing import Any, Callable, Union
import os
import itertools
import collections
import winsound
import time
import weakref
import wx
import core
from logHandler import log
import documentBase
import review
import scriptHandler
import eventHandler
import nvwave
import queueHandler
import gui
import ui
import cursorManager
from scriptHandler import script, isScriptWaiting, willSayAllResume
import aria
import controlTypes
from controlTypes import OutputReason
import config
import textInfos
import braille
import vision
import speech
from speech import sayAll
import treeInterceptorHandler
import inputCore
import api
import gui.guiHelper
from gui.dpiScalingHelper import DpiScalingHelperMixinWithoutInit
from NVDAObjects import NVDAObject
import gui.contextHelp
from abc import ABCMeta, abstractmethod
import globalVars
from typing import Optional
def reportPassThrough(treeInterceptor,onlyIfChanged=True):
"""Reports the pass through mode if it has changed.
@param treeInterceptor: The current Browse Mode treeInterceptor.
@type treeInterceptor: L{BrowseModeTreeInterceptor}
@param onlyIfChanged: if true reporting will not happen if the last reportPassThrough reported the same thing.
@type onlyIfChanged: bool
"""
if not onlyIfChanged or treeInterceptor.passThrough != reportPassThrough.last:
if config.conf["virtualBuffers"]["passThroughAudioIndication"]:
sound = "focusMode.wav" if treeInterceptor.passThrough else "browseMode.wav"
nvwave.playWaveFile(os.path.join(globalVars.appDir, "waves", sound))
else:
if treeInterceptor.passThrough:
# Translators: The mode to interact with controls in documents
ui.message(_("Focus mode"))
else:
# Translators: The mode that presents text in a flat representation
# that can be navigated with the cursor keys like in a text document
ui.message(_("Browse mode"))
reportPassThrough.last = treeInterceptor.passThrough
reportPassThrough.last = False
def rename(self,newName):
"""
Renames this item with the new name.
"""
raise NotImplementedError
def report(self,readUnit=None):
info=self.textInfo
# If we are dealing with a form field, ensure we don't read the whole content if it's an editable text.
if self.itemType == "formField":
if self.obj.role == controlTypes.Role.EDITABLETEXT:
readUnit = textInfos.UNIT_LINE
if readUnit:
fieldInfo = info.copy()
info.collapse()
info.move(readUnit, 1, endPoint="end")
if info.compareEndPoints(fieldInfo, "endToEnd") > 0:
# We've expanded past the end of the field, so limit to the end of the field.
info.setEndPoint(fieldInfo, "endToEnd")
speech.speakTextInfo(info, reason=OutputReason.QUICKNAV)
def activate(self):
self.textInfo.obj._activatePosition(info=self.textInfo)
def moveTo(self):
if self.document.passThrough and getattr(self, "obj", False):
if controlTypes.State.FOCUSABLE in self.obj.states:
self.obj.setFocus()
return
self.document.passThrough = False
reportPassThrough(self.document)
info = self.textInfo.copy()
info.collapse()
self.document._set_selection(info, reason=OutputReason.QUICKNAV)
def _getLabelForProperties(self, labelPropertyGetter: Callable[[str], Optional[Any]]):
"""
Fetches required properties for this L{TextInfoQuickNavItem} and constructs a label to be shown in an elements list.
This can be used by subclasses to implement the L{label} property.
@Param labelPropertyGetter: A callable taking 1 argument, specifying the property to fetch.
For example, if L{itemType} is landmark, the callable must return the landmark type when "landmark" is passed as the property argument.
Alternative property names might be name or value.
The callable must return None if the property doesn't exist.
An expected callable might be get method on a L{Dict},
or "lambda property: getattr(self.obj, property, None)" for an L{NVDAObject}.
"""
content = self.textInfo.text.strip()
if self.itemType == "heading":
# Output: displayed text of the heading.
return content
labelParts = None
name = labelPropertyGetter("name")
if self.itemType == "landmark":
landmark = aria.landmarkRoles.get(labelPropertyGetter("landmark"))
# Example output: main menu; navigation
labelParts = (name, landmark)
else:
role: Union[controlTypes.Role, int] = labelPropertyGetter("role")
role = controlTypes.Role(role)
roleText = role.displayString
# Translators: Reported label in the elements list for an element which which has no name and value
unlabeled = _("Unlabeled")
realStates = labelPropertyGetter("states")
labeledStates = " ".join(controlTypes.processAndLabelStates(role, realStates, OutputReason.FOCUS))
if self.itemType == "formField":
if role in (
controlTypes.Role.BUTTON,
controlTypes.Role.DROPDOWNBUTTON,
controlTypes.Role.TOGGLEBUTTON,
controlTypes.Role.SPLITBUTTON,
controlTypes.Role.MENUBUTTON,
controlTypes.Role.DROPDOWNBUTTONGRID,
controlTypes.Role.TREEVIEWBUTTON
):
# Example output: Mute; toggle button; pressed
labelParts = (content or name or unlabeled, roleText, labeledStates)
else:
# Example output: Find a repository...; edit; has auto complete; NVDA
labelParts = (name or unlabeled, roleText, labeledStates, content)
elif self.itemType in ("link", "button"):
# Example output: You have unread notifications; visited
labelParts = (content or name or unlabeled, labeledStates)
if labelParts:
label = "; ".join(lp for lp in labelParts if lp)
else:
label = content
return label
class BrowseModeTreeInterceptor(treeInterceptorHandler.TreeInterceptor):
scriptCategory = inputCore.SCRCAT_BROWSEMODE
_disableAutoPassThrough = False
APPLICATION_ROLES = (controlTypes.Role.APPLICATION, controlTypes.Role.DIALOG)
def event_treeInterceptor_gainFocus(self):
"""Triggered when this browse mode interceptor gains focus.
This event is only fired upon entering this treeInterceptor when it was not the current treeInterceptor before.
This is different to L{event_gainFocus}, which is fired when an object inside this treeInterceptor gains focus, even if that object is in the same treeInterceptor.
"""
reportPassThrough(self)
ALWAYS_SWITCH_TO_PASS_THROUGH_ROLES = frozenset({
controlTypes.Role.COMBOBOX,
controlTypes.Role.EDITABLETEXT,
controlTypes.Role.LIST,
controlTypes.Role.LISTITEM,
controlTypes.Role.SLIDER,
controlTypes.Role.TABCONTROL,
controlTypes.Role.MENUBAR,
controlTypes.Role.POPUPMENU,
controlTypes.Role.TREEVIEW,
controlTypes.Role.TREEVIEWITEM,
controlTypes.Role.SPINBUTTON,
controlTypes.Role.TABLEROW,
controlTypes.Role.TABLECELL,
controlTypes.Role.TABLEROWHEADER,
controlTypes.Role.TABLECOLUMNHEADER,
})
SWITCH_TO_PASS_THROUGH_ON_FOCUS_ROLES = frozenset({
controlTypes.Role.LISTITEM,
controlTypes.Role.RADIOBUTTON,
controlTypes.Role.TAB,
controlTypes.Role.MENUITEM,
controlTypes.Role.RADIOMENUITEM,
controlTypes.Role.CHECKMENUITEM,
})
IGNORE_DISABLE_PASS_THROUGH_WHEN_FOCUSED_ROLES = frozenset({
controlTypes.Role.MENUITEM,
controlTypes.Role.RADIOMENUITEM,
controlTypes.Role.CHECKMENUITEM,
controlTypes.Role.TABLECELL,
})
def shouldPassThrough(self, obj, reason: Optional[OutputReason] = None):
"""Determine whether pass through mode should be enabled (focus mode) or disabled (browse mode) for a given object.
@param obj: The object in question.
@type obj: L{NVDAObjects.NVDAObject}
@param reason: The reason for this query;
one of the output reasons, or C{None} for manual pass through mode activation by the user.
@return: C{True} if pass through mode (focus mode) should be enabled, C{False} if it should be disabled (browse mode).
"""
if reason and (
self.disableAutoPassThrough
or (reason == OutputReason.FOCUS and not config.conf["virtualBuffers"]["autoPassThroughOnFocusChange"])
or (reason == OutputReason.CARET and not config.conf["virtualBuffers"]["autoPassThroughOnCaretMove"])
):
# This check relates to auto pass through and auto pass through is disabled, so don't change the pass through state.
return self.passThrough
if reason == OutputReason.QUICKNAV:
return False
states = obj.states
role = obj.role
if controlTypes.State.EDITABLE in states and controlTypes.State.UNAVAILABLE not in states:
return True
# Menus sometimes get focus due to menuStart events even though they don't report as focused/focusable.
if not obj.isFocusable and controlTypes.State.FOCUSED not in states and role != controlTypes.Role.POPUPMENU:
return False
# many controls that are read-only should not switch to passThrough.
# However, there are exceptions.
if controlTypes.State.READONLY in states:
# #13221: For Slack message lists, and the MS Edge downloads window, switch to passthrough
# even though the list item and list are read-only, but focusable.
if (
role == controlTypes.Role.LISTITEM and controlTypes.State.FOCUSED in states
and obj.parent.role == controlTypes.Role.LIST and controlTypes.State.FOCUSABLE in obj.parent.states
):
return True
# Certain controls such as combo boxes and readonly edits are read-only but still interactive.
# #5118: read-only ARIA grids should also be allowed (focusable table cells, rows and headers).
if role not in (
controlTypes.Role.EDITABLETEXT, controlTypes.Role.COMBOBOX, controlTypes.Role.TABLEROW,
controlTypes.Role.TABLECELL, controlTypes.Role.TABLEROWHEADER, controlTypes.Role.TABLECOLUMNHEADER
):
return False
# Any roles or states for which we always switch to passThrough
if role in self.ALWAYS_SWITCH_TO_PASS_THROUGH_ROLES or controlTypes.State.EDITABLE in states:
return True
# focus is moving to this control. Perhaps after pressing tab or clicking a button that brings up a menu (via javascript)
if reason == OutputReason.FOCUS:
if role in self.SWITCH_TO_PASS_THROUGH_ON_FOCUS_ROLES:
return True
# If this is a focus change, pass through should be enabled for certain ancestor containers.
# this is done last for performance considerations. Walking up the through the parents could be costly
while obj and obj != self.rootNVDAObject:
if obj.role == controlTypes.Role.TOOLBAR:
return True
obj = obj.parent
return False
singleLetterNavEnabled=True #: Whether single letter navigation scripts should be active (true) or if these letters should fall to the application.
# Translators: the description for the toggleSingleLetterNavigation command in browse mode.
script_toggleSingleLetterNav.__doc__=_("Toggles single letter navigation on and off. When on, single letter keys in browse mode jump to various kinds of elements on the page. When off, these keys are passed to the application")
def _iterNodesByType(self,itemType,direction="next",pos=None):
"""
Yields L{QuickNavItem} objects representing the ordered positions in this document according to the type being searched for (e.g. link, heading, table etc).
@param itemType: the type being searched for (e.g. link, heading, table etc)
@type itemType: string
@param direction: the direction in which to search (next, previous, up)
@type direction: string
@param pos: the position in the document from where to start the search.
@type pos: Usually an L{textInfos.TextInfo}
@raise NotImplementedError: This type is not supported by this BrowseMode implementation
"""
raise NotImplementedError
# Translators: the description for the Elements List command in browse mode.
script_elementsList.__doc__ = _("Lists various types of elements in this document")
script_elementsList.ignoreTreeInterceptorPassThrough = True
def _activateNVDAObject(self, obj):
"""Activate an object in response to a user request.
This should generally perform the default action or click on the object.
@param obj: The object to activate.
@type obj: L{NVDAObjects.NVDAObject}
"""
try:
obj.doAction()
except NotImplementedError:
log.debugWarning("doAction not implemented")
# Translators: the description for the activatePosition script on browseMode documents.
script_activatePosition.__doc__ = _("Activates the current object in the document")
def _focusLastFocusableObject(self, activatePosition=False):
"""Used when auto focus focusable elements is disabled to sync the focus
to the browse mode cursor.
When auto focus focusable elements is disabled, NVDA doesn't focus elements
as the user moves the browse mode cursor. However, there are some cases
where the user always wants to interact with the focus; e.g. if they press
the applications key to open the context menu. In these cases, this method
is called first to sync the focus to the browse mode cursor.
"""
obj = self.currentFocusableNVDAObject
if obj!=self.rootNVDAObject and self._shouldSetFocusToObj(obj) and obj!= api.getFocusObject():
obj.setFocus()
# We might be about to activate or pass through a key which will cause
# this object to change (e.g. checking a check box). However, we won't
# actually get the focus event until after the change has occurred.
# Therefore, we must cache properties for speech before the change occurs.
speech.speakObject(obj, OutputReason.ONLYCACHE)
self._objPendingFocusBeforeActivate = obj
if activatePosition:
# Make sure we activate the object at the caret, which is not necessarily focusable.
self._activatePosition()
# Translators: the description for the passThrough script on browseMode documents.
script_passThrough.__doc__ = _("Passes gesture through to the application")
script_disablePassThrough.ignoreTreeInterceptorPassThrough = True
__gestures={
"kb:NVDA+f7": "elementsList",
"kb:enter": "activatePosition",
"kb:numpadEnter": "activatePosition",
"kb:space": "activatePosition",
"kb:NVDA+shift+space":"toggleSingleLetterNav",
"kb:escape": "disablePassThrough",
"kb:control+enter": "passThrough",
"kb:control+numpadEnter": "passThrough",
"kb:shift+enter": "passThrough",
"kb:shift+numpadEnter": "passThrough",
"kb:control+shift+enter": "passThrough",
"kb:control+shift+numpadEnter": "passThrough",
"kb:alt+enter": "passThrough",
"kb:alt+numpadEnter": "passThrough",
"kb:applications": "passThrough",
"kb:shift+applications": "passThrough",
"kb:shift+f10": "passThrough",
}
# Add quick navigation scripts.
qn = BrowseModeTreeInterceptor.addQuickNav
qn("heading", key="h",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading"))
qn("heading1", key="1",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 1"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 1"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 1"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 1"))
qn("heading2", key="2",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 2"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 2"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 2"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 2"))
qn("heading3", key="3",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 3"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 3"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 3"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 3"))
qn("heading4", key="4",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 4"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 4"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 4"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 4"))
qn("heading5", key="5",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 5"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 5"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 5"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 5"))
qn("heading6", key="6",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 6"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 6"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 6"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 6"))
qn("table", key="t",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next table"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next table"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous table"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous table"),
readUnit=textInfos.UNIT_LINE)
qn("link", key="k",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next link"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next link"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous link"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous link"))
qn("visitedLink", key="v",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next visited link"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next visited link"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous visited link"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous visited link"))
qn("unvisitedLink", key="u",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next unvisited link"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next unvisited link"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous unvisited link"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous unvisited link"))
qn("formField", key="f",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next form field"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next form field"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous form field"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous form field"))
qn("list", key="l",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next list"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next list"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous list"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous list"),
readUnit=textInfos.UNIT_LINE)
qn("listItem", key="i",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next list item"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next list item"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous list item"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous list item"))
qn("button", key="b",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next button"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next button"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous button"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous button"))
qn("edit", key="e",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next edit field"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next edit field"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous edit field"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous edit field"),
readUnit=textInfos.UNIT_LINE)
qn("frame", key="m",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next frame"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next frame"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous frame"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous frame"),
readUnit=textInfos.UNIT_LINE)
qn("separator", key="s",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next separator"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next separator"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous separator"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous separator"))
qn("radioButton", key="r",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next radio button"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next radio button"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous radio button"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous radio button"))
qn("comboBox", key="c",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next combo box"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next combo box"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous combo box"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous combo box"))
qn("checkBox", key="x",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next check box"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next check box"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous check box"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous check box"))
qn("graphic", key="g",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next graphic"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next graphic"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous graphic"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous graphic"))
qn("blockQuote", key="q",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next block quote"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next block quote"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous block quote"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous block quote"))
qn("notLinkBlock", key="n",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("skips forward past a block of links"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no more text after a block of links"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("skips backward past a block of links"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no more text before a block of links"),
readUnit=textInfos.UNIT_LINE)
qn("landmark", key="d",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next landmark"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next landmark"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous landmark"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous landmark"),
readUnit=textInfos.UNIT_LINE)
qn("embeddedObject", key="o",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next embedded object"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next embedded object"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous embedded object"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous embedded object"))
qn("annotation", key="a",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next annotation"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next annotation"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous annotation"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous annotation"))
qn("error", key="w",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next error"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next error"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous error"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous error"))
qn(
"article", key=None,
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next article"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next article"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous article"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous article")
)
qn(
"grouping", key=None,
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next grouping"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next grouping"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous grouping"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous grouping")
)
del qn
| 44.265625 | 229 | 0.740899 |
d9f0ba759404ab21f8b93c6f40fde8e030bbf8a1 | 12,904 | py | Python | qiskit_metal/qlibrary/qubits/Transmon_Interdigitated.py | PatrickSJacobs/qiskit-metal | 9628369c4b880d1e13199e559f898c5e0b96eecb | [
"Apache-2.0"
] | null | null | null | qiskit_metal/qlibrary/qubits/Transmon_Interdigitated.py | PatrickSJacobs/qiskit-metal | 9628369c4b880d1e13199e559f898c5e0b96eecb | [
"Apache-2.0"
] | null | null | null | qiskit_metal/qlibrary/qubits/Transmon_Interdigitated.py | PatrickSJacobs/qiskit-metal | 9628369c4b880d1e13199e559f898c5e0b96eecb | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
#from math import *
from math import sin, cos
from qiskit_metal import draw, Dict
from qiskit_metal.qlibrary.core.base import QComponent
import numpy as np
#from ... import config
#if not config.is_building_docs():
# from qiskit_metal import is_true
| 45.43662 | 93 | 0.577805 |
d9f1f15178cb9e26d9b4f91695b333a07eaa59d6 | 74,778 | py | Python | sqlova/model/nl2sql/wikisql_models.py | guotong1988/Rule-SQL | e826c0d659c8b35a72b64aa2b50d4d943fdd70f1 | [
"Apache-2.0"
] | 15 | 2019-07-25T12:13:31.000Z | 2020-10-17T13:42:58.000Z | sqlova/model/nl2sql/wikisql_models.py | guotong1988/Rule-SQL | e826c0d659c8b35a72b64aa2b50d4d943fdd70f1 | [
"Apache-2.0"
] | 1 | 2020-01-07T05:49:15.000Z | 2020-04-22T01:22:00.000Z | sqlova/model/nl2sql/wikisql_models.py | guotong1988/Rule-SQL | e826c0d659c8b35a72b64aa2b50d4d943fdd70f1 | [
"Apache-2.0"
] | 3 | 2019-10-01T09:14:35.000Z | 2020-07-18T08:39:48.000Z | # Copyright 2019-present NAVER Corp.
# Apache License v2.0
# Wonseok Hwang
import os, json
from copy import deepcopy
from matplotlib.pylab import *
import torch
import torch.nn as nn
import torch.nn.functional as F
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
from sqlova.utils.utils import topk_multi_dim
from sqlova.utils.utils_wikisql import *
# where column predict
# where op predict
def Loss_selectwhere_startend_v2(score_select_column, s_sa, s_wn, s_wc, s_wo,
s_wv, ground_truth_select_column, g_sa, g_wn, g_wc, g_wo, g_wvi):
"""
:param s_wv: score [ B, n_conds, T, score]
:param g_wn: [ B ]
:param g_wvi: [B, conds, pnt], e.g. [[[0, 6, 7, 8, 15], [0, 1, 2, 3, 4, 15]], [[0, 1, 2, 3, 16], [0, 7, 8, 9, 16]]]
:return:
"""
loss = 0
# loss += Loss_sc(score_select_column, ground_truth_select_column)
# loss += Loss_sa(s_sa, g_sa)
# loss += Loss_wn(s_wn, g_wn)
# loss += Loss_wc(s_wc, g_wc)
# loss += Loss_wo(s_wo, g_wn, g_wo)
# loss += Loss_wv_se(s_wv, g_wn, g_wvi)
return loss
def Loss_sw_se(score_select_column, s_sa, s_wn, s_wc, s_wo,
s_wv, ground_truth_select_column, g_sa, g_wn, g_wc, g_wo, g_wvi):
"""
:param s_wv: score [ B, n_conds, T, score]
:param g_wn: [ B ]
:param g_wvi: [B, conds, pnt], e.g. [[[0, 6, 7, 8, 15], [0, 1, 2, 3, 4, 15]], [[0, 1, 2, 3, 16], [0, 7, 8, 9, 16]]]
:return:
"""
loss = 0
loss += Loss_sc(score_select_column, ground_truth_select_column)
loss += Loss_sa(s_sa, g_sa)
loss += Loss_wn(s_wn, g_wn)
loss += Loss_wc(s_wc, g_wc)
loss += Loss_wo(s_wo, g_wn, g_wo)
loss += Loss_wv_se(s_wv, g_wn, g_wvi)
return loss
def Loss_sc(s_sc, g_sc):
loss = F.cross_entropy(s_sc, torch.tensor(g_sc).to(device))
return loss
def Loss_sa(s_sa, g_sa):
loss = F.cross_entropy(s_sa, torch.tensor(g_sa).to(device))
return loss
def Loss_wn(s_wn, g_wn):
loss = F.cross_entropy(s_wn, torch.tensor(g_wn).to(device))
return loss
def Loss_wc(s_wc, g_wc):
# Construct index matrix
bS, max_h_len = s_wc.shape
im = torch.zeros([bS, max_h_len]).to(device)
for b, g_wc1 in enumerate(g_wc):
for g_wc11 in g_wc1:
im[b, g_wc11] = 1.0
# Construct prob.
p = F.sigmoid(s_wc)
loss = F.binary_cross_entropy(p, im)
return loss
def Loss_wo(s_wo, g_wn, g_wo):
# Construct index matrix
loss = 0
for b, g_wn1 in enumerate(g_wn):
if g_wn1 == 0:
continue
g_wo1 = g_wo[b]
s_wo1 = s_wo[b]
loss += F.cross_entropy(s_wo1[:g_wn1], torch.tensor(g_wo1).to(device))
return loss
def Loss_wv_se(s_wv, g_wn, g_wvi):
"""
s_wv: [bS, 4, mL, 2], 4 stands for maximum # of condition, 2 tands for start & end logits.
g_wvi: [ [1, 3, 2], [4,3] ] (when B=2, wn(b=1) = 3, wn(b=2) = 2).
"""
loss = 0
# g_wvi = torch.tensor(g_wvi).to(device)
for b, g_wvi1 in enumerate(g_wvi):
# for i_wn, g_wvi11 in enumerate(g_wvi1):
g_wn1 = len(g_wvi1) #
# g_wn1 = g_wn[b] #
if g_wn1 == 0:
continue
g_wvi1 = torch.tensor(g_wvi1)[:g_wn1].to(device) #
g_st1 = g_wvi1[:,0]
g_ed1 = g_wvi1[:,1]
# loss from the start position
loss += F.cross_entropy(s_wv[b,:g_wn1,:,0], g_st1)
# print("st_login: ", s_wv[b,:g_wn1,:,0], g_st1, loss)
# loss from the end position
loss += F.cross_entropy(s_wv[b,:g_wn1,:,1], g_ed1)
# print("ed_login: ", s_wv[b,:g_wn1,:,1], g_ed1, loss)
return loss
# ========= Decoder-Layer ===========
# ============= Shallow-Layer ===============
def Loss_s2s(score, g_pnt_idxs):
"""
score = [B, T, max_seq_length]
"""
# WHERE string part
loss = 0
for b, g_pnt_idxs1 in enumerate(g_pnt_idxs):
ed = len(g_pnt_idxs1) - 1
score_part = score[b, :ed]
loss += F.cross_entropy(score_part, torch.tensor(g_pnt_idxs1[1:]).to(device)) # +1 shift.
return loss
| 39.419083 | 161 | 0.555163 |
d9f2ed71da13f5b57b61c1c386731f8180c40992 | 667 | py | Python | www/app.py | leeeGreat/xlw_study_python | 03d8eb59f6826b4689d6598ede6393ecbb5058fb | [
"MIT"
] | 1 | 2018-03-12T12:29:21.000Z | 2018-03-12T12:29:21.000Z | www/app.py | leeeGreat/xlw_study_python | 03d8eb59f6826b4689d6598ede6393ecbb5058fb | [
"MIT"
] | null | null | null | www/app.py | leeeGreat/xlw_study_python | 03d8eb59f6826b4689d6598ede6393ecbb5058fb | [
"MIT"
] | 1 | 2018-04-13T13:26:50.000Z | 2018-04-13T13:26:50.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'Michael Liao'
'''
async web application.
'''
import logging; logging.basicConfig(level=logging.INFO)
import asyncio, os, json, time
from datetime import datetime
from aiohttp import web
loop = asyncio.get_event_loop()
loop.run_until_complete(init(loop))
loop.run_forever()
| 22.233333 | 73 | 0.698651 |
d9f306cc03073671d285f885169a3fe6dd743eef | 684 | py | Python | examples/Testing/flopy3_plotdata.py | ritchie46/flopy | 8e7284dcb3aaf5c12293d442248c2c2d9959f835 | [
"CC0-1.0",
"BSD-3-Clause"
] | 1 | 2021-03-17T09:15:54.000Z | 2021-03-17T09:15:54.000Z | examples/Testing/flopy3_plotdata.py | ritchie46/flopy | 8e7284dcb3aaf5c12293d442248c2c2d9959f835 | [
"CC0-1.0",
"BSD-3-Clause"
] | null | null | null | examples/Testing/flopy3_plotdata.py | ritchie46/flopy | 8e7284dcb3aaf5c12293d442248c2c2d9959f835 | [
"CC0-1.0",
"BSD-3-Clause"
] | 1 | 2021-08-05T19:11:27.000Z | 2021-08-05T19:11:27.000Z | from __future__ import print_function
import os
import numpy as np
import matplotlib.pyplot as plt
import flopy
fb = flopy.modflow.Modflow.load('freyberg', version='mf2005', model_ws=os.path.join('..', 'data', 'freyberg'), verbose=True)
dis = fb.dis
top = fb.dis.top
fb.dis.top.plot(grid=True, colorbar=True)
fb.dis.botm.plot(grid=True, colorbar=True)
fb.dis.plot()
plt.show()
fb.dis.plot()
plt.show()
fig = plt.figure(figsize=(8, 8))
ax = fig.add_subplot(1,2,1, aspect='equal')
fb.dis.top.plot(grid=True, axes=ax, colorbar=True)
ax = fig.add_subplot(1,2,2, aspect='equal')
fb.dis.botm.plot(grid=True, axes=ax, colorbar=True)
plt.show()
print('this is the end my friend') | 20.727273 | 124 | 0.71345 |
d9f32d2b9e677d6893c7269bf23bcedaa4e7f68a | 363 | py | Python | chia/components/sample_transformers/__init__.py | cabrust/chia | 3eaf815b261dc8a85d64fd698e0079515ec0dde9 | [
"BSD-3-Clause"
] | null | null | null | chia/components/sample_transformers/__init__.py | cabrust/chia | 3eaf815b261dc8a85d64fd698e0079515ec0dde9 | [
"BSD-3-Clause"
] | 2 | 2021-10-06T13:19:09.000Z | 2021-10-20T17:32:36.000Z | chia/components/sample_transformers/__init__.py | cabrust/chia | 3eaf815b261dc8a85d64fd698e0079515ec0dde9 | [
"BSD-3-Clause"
] | null | null | null | from chia import components
from chia.components.sample_transformers import identity
from chia.components.sample_transformers.sample_transformer import SampleTransformer
__all__ = ["SampleTransformer", "SampleTransformerFactory"]
| 33 | 84 | 0.85124 |
d9f3cb72d610ec30e4ecf05d60ba2025dc849112 | 416 | py | Python | 3/3.6/add_guest.py | singi2016cn/python-scaffold | 274e508d1919da67e599aa73be139800c043bce4 | [
"MIT"
] | null | null | null | 3/3.6/add_guest.py | singi2016cn/python-scaffold | 274e508d1919da67e599aa73be139800c043bce4 | [
"MIT"
] | null | null | null | 3/3.6/add_guest.py | singi2016cn/python-scaffold | 274e508d1919da67e599aa73be139800c043bce4 | [
"MIT"
] | null | null | null | #
names = []
names.append('singi')
names.append('lily')
names.append('sam')
print('I find a big dining-table,I can invite more friends.')
names.insert(0, 'xiaoling')
names.insert(2, 'fangsi')
names.append('zhangqing')
greets = ',would you like to have dinner with me ?'
print(names[0]+greets)
print(names[1]+greets)
print(names[2]+greets)
print(names[3]+greets)
print(names[4]+greets)
print(names[5]+greets) | 20.8 | 61 | 0.711538 |
d9f53b3bd4af7f2d655423b3e5a97d903f5c6dac | 2,025 | py | Python | apps/pypi/tests/test_slurper.py | cartwheelweb/packaginator | f6ce11da22154bce9cba42e896989bdb0fd5e865 | [
"MIT"
] | 1 | 2015-11-08T11:31:09.000Z | 2015-11-08T11:31:09.000Z | apps/pypi/tests/test_slurper.py | cartwheelweb/packaginator | f6ce11da22154bce9cba42e896989bdb0fd5e865 | [
"MIT"
] | null | null | null | apps/pypi/tests/test_slurper.py | cartwheelweb/packaginator | f6ce11da22154bce9cba42e896989bdb0fd5e865 | [
"MIT"
] | null | null | null | from django.template.defaultfilters import slugify
from django.test import TestCase
from package.models import Package, Version
from pypi.slurper import Slurper
TEST_PACKAGE_NAME = 'Django'
TEST_PACKAGE_VERSION = '1.3'
TEST_PACKAGE_REPO_NAME = 'django-uni-form' | 39.705882 | 89 | 0.718025 |
d9f57949a15383ed2a070813678af904fe2e2df0 | 1,145 | py | Python | azure-mgmt-logic/azure/mgmt/logic/models/recurrence_schedule_occurrence.py | azuresdkci1x/azure-sdk-for-python-1722 | e08fa6606543ce0f35b93133dbb78490f8e6bcc9 | [
"MIT"
] | 1 | 2017-10-29T15:14:35.000Z | 2017-10-29T15:14:35.000Z | azure-mgmt-logic/azure/mgmt/logic/models/recurrence_schedule_occurrence.py | azuresdkci1x/azure-sdk-for-python-1722 | e08fa6606543ce0f35b93133dbb78490f8e6bcc9 | [
"MIT"
] | null | null | null | azure-mgmt-logic/azure/mgmt/logic/models/recurrence_schedule_occurrence.py | azuresdkci1x/azure-sdk-for-python-1722 | e08fa6606543ce0f35b93133dbb78490f8e6bcc9 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
| 34.69697 | 76 | 0.590393 |
d9f6bdae288edaa527af57b654eafa00cfa5047b | 11,757 | py | Python | pandas/core/apply.py | AakankshaAshok/pandas | 6498bc1e8a12003640139db4794bd5cd2462c116 | [
"BSD-3-Clause"
] | null | null | null | pandas/core/apply.py | AakankshaAshok/pandas | 6498bc1e8a12003640139db4794bd5cd2462c116 | [
"BSD-3-Clause"
] | null | null | null | pandas/core/apply.py | AakankshaAshok/pandas | 6498bc1e8a12003640139db4794bd5cd2462c116 | [
"BSD-3-Clause"
] | null | null | null | import inspect
import numpy as np
from pandas._libs import reduction as libreduction
from pandas.util._decorators import cache_readonly
from pandas.core.dtypes.common import (
is_dict_like,
is_extension_array_dtype,
is_list_like,
is_sequence,
)
from pandas.core.dtypes.generic import ABCSeries
def frame_apply(
obj,
func,
axis=0,
raw=False,
result_type=None,
ignore_failures=False,
args=None,
kwds=None,
):
""" construct and return a row or column based frame apply object """
axis = obj._get_axis_number(axis)
if axis == 0:
klass = FrameRowApply
elif axis == 1:
klass = FrameColumnApply
return klass(
obj,
func,
raw=raw,
result_type=result_type,
ignore_failures=ignore_failures,
args=args,
kwds=kwds,
)
def apply_empty_result(self):
"""
we have an empty result; at least 1 axis is 0
we will try to apply the function to an empty
series in order to see if this is a reduction function
"""
# we are not asked to reduce or infer reduction
# so just return a copy of the existing object
if self.result_type not in ["reduce", None]:
return self.obj.copy()
# we may need to infer
should_reduce = self.result_type == "reduce"
from pandas import Series
if not should_reduce:
try:
r = self.f(Series([]))
except Exception:
pass
else:
should_reduce = not isinstance(r, Series)
if should_reduce:
if len(self.agg_axis):
r = self.f(Series([]))
else:
r = np.nan
return self.obj._constructor_sliced(r, index=self.agg_axis)
else:
return self.obj.copy()
def apply_raw(self):
""" apply to the values as a numpy array """
try:
result = libreduction.compute_reduction(self.values, self.f, axis=self.axis)
except ValueError as err:
if "Function does not reduce" not in str(err):
# catch only ValueError raised intentionally in libreduction
raise
result = np.apply_along_axis(self.f, self.axis, self.values)
# TODO: mixed type case
if result.ndim == 2:
return self.obj._constructor(result, index=self.index, columns=self.columns)
else:
return self.obj._constructor_sliced(result, index=self.agg_axis)
class FrameRowApply(FrameApply):
axis = 0
def wrap_results_for_axis(self):
""" return the results for the rows """
results = self.results
result = self.obj._constructor(data=results)
if not isinstance(results[0], ABCSeries):
if len(result.index) == len(self.res_columns):
result.index = self.res_columns
if len(result.columns) == len(self.res_index):
result.columns = self.res_index
return result
class FrameColumnApply(FrameApply):
axis = 1
def wrap_results_for_axis(self):
""" return the results for the columns """
results = self.results
# we have requested to expand
if self.result_type == "expand":
result = self.infer_to_same_shape()
# we have a non-series and don't want inference
elif not isinstance(results[0], ABCSeries):
from pandas import Series
result = Series(results)
result.index = self.res_index
# we may want to infer results
else:
result = self.infer_to_same_shape()
return result
def infer_to_same_shape(self):
""" infer the results to the same shape as the input object """
results = self.results
result = self.obj._constructor(data=results)
result = result.T
# set the index
result.index = self.res_index
# infer dtypes
result = result.infer_objects()
return result
| 28.745721 | 88 | 0.575572 |
d9f7ffc0611459c276e6f9ae99c70b7e8ba1a1c3 | 707 | py | Python | tests/test_model/test_recognizer/test_shufflenetv1.py | YinAoXiong/ZCls | 8aeea3640f8456937db35d043e37cf2c03ac9017 | [
"Apache-2.0"
] | null | null | null | tests/test_model/test_recognizer/test_shufflenetv1.py | YinAoXiong/ZCls | 8aeea3640f8456937db35d043e37cf2c03ac9017 | [
"Apache-2.0"
] | null | null | null | tests/test_model/test_recognizer/test_shufflenetv1.py | YinAoXiong/ZCls | 8aeea3640f8456937db35d043e37cf2c03ac9017 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
@date: 2021/5/16 10:22
@file: test_shufflenetv1.py
@author: zj
@description:
"""
import torch
from zcls.config import cfg
from zcls.config.key_word import KEY_OUTPUT
from zcls.model.recognizers.build import build_recognizer
if __name__ == '__main__':
test_shufflenet()
| 19.638889 | 98 | 0.711457 |
d9f8dcb19533a96faaad26bde1b0790a5c363c97 | 142,263 | py | Python | autotest/gcore/vsis3.py | jpapadakis/gdal | f07aa15fd65af36b04291303cc6834c87f662814 | [
"MIT"
] | 18 | 2021-01-27T00:07:35.000Z | 2022-03-25T22:20:13.000Z | autotest/gcore/vsis3.py | jpapadakis/gdal | f07aa15fd65af36b04291303cc6834c87f662814 | [
"MIT"
] | 1 | 2015-04-14T00:19:57.000Z | 2015-04-14T00:29:29.000Z | autotest/gcore/vsis3.py | jpapadakis/gdal | f07aa15fd65af36b04291303cc6834c87f662814 | [
"MIT"
] | 1 | 2021-11-21T02:33:51.000Z | 2021-11-21T02:33:51.000Z | #!/usr/bin/env pytest
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: Test /vsis3
# Author: Even Rouault <even dot rouault at spatialys dot com>
#
###############################################################################
# Copyright (c) 2015, Even Rouault <even dot rouault at spatialys dot com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import json
import os.path
import stat
import sys
from osgeo import gdal
import gdaltest
import webserver
import pytest
def open_for_read(uri):
"""
Opens a test file for reading.
"""
return gdal.VSIFOpenExL(uri, 'rb', 1)
###############################################################################
###############################################################################
# Test AWS_NO_SIGN_REQUEST=YES
###############################################################################
# Test Sync() and multithreaded download
###############################################################################
# Test Sync() and multithreaded download and CHUNK_SIZE
###############################################################################
# Error cases
###############################################################################
###############################################################################
# Test with a fake AWS server
###############################################################################
# Test re-opening after changing configuration option (#2294)
###############################################################################
# Test ReadDir() with a fake AWS server
###############################################################################
# Test OpenDir() with a fake AWS server
###############################################################################
# Test simple PUT support with a fake AWS server
###############################################################################
# Test simple PUT support with retry logic
###############################################################################
# Test simple DELETE support with a fake AWS server
###############################################################################
# Test DeleteObjects with a fake AWS server
###############################################################################
# Test RmdirRecursive() with a fake AWS server
###############################################################################
# Test multipart upload with a fake AWS server
###############################################################################
# Test multipart upload with retry logic
###############################################################################
# Test Mkdir() / Rmdir()
###############################################################################
# Test handling of file and directory with same name
###############################################################################
# Test vsisync() with SYNC_STRATEGY=ETAG
###############################################################################
# Test vsisync() with SYNC_STRATEGY=TIMESTAMP
###############################################################################
# Test vsisync() with SYNC_STRATEGY=OVERWRITE
###############################################################################
# Test vsisync() with source and target in /vsis3
###############################################################################
# Test rename
###############################################################################
# Test rename
###############################################################################
# Test rename onto existing dir is not allowed
###############################################################################
# Test Sync() and multithreaded download and CHUNK_SIZE
###############################################################################
# Test reading/writing metadata
###############################################################################
# Test that we take into account directory listing to avoid useless
# requests
###############################################################################
# Test w+ access
###############################################################################
# Test w+ access
###############################################################################
# Test w+ access
###############################################################################
# Test w+ access
###############################################################################
# Read credentials from simulated ~/.aws/credentials
###############################################################################
# Read credentials from simulated ~/.aws/config
###############################################################################
# Read credentials from simulated ~/.aws/credentials and ~/.aws/config
###############################################################################
# Read credentials from simulated ~/.aws/credentials and ~/.aws/config with
# a non default profile
###############################################################################
# Read credentials from simulated ~/.aws/credentials and ~/.aws/config
###############################################################################
# Read credentials from simulated EC2 instance
###############################################################################
# Read credentials from simulated EC2 instance that only supports IMDSv1
###############################################################################
# Read credentials from simulated EC2 instance with expiration of the
# cached credentials
###############################################################################
###############################################################################
# Nominal cases (require valid credentials)
###############################################################################
| 40.974366 | 335 | 0.610749 |
d9f92ab910680bac296e7b003e06e2747df83ea4 | 882 | py | Python | day06/part1.py | bugra-yilmaz/adventofcode2021 | 136cb1d4fba42af4eea934a73714c93710c8741e | [
"MIT"
] | null | null | null | day06/part1.py | bugra-yilmaz/adventofcode2021 | 136cb1d4fba42af4eea934a73714c93710c8741e | [
"MIT"
] | null | null | null | day06/part1.py | bugra-yilmaz/adventofcode2021 | 136cb1d4fba42af4eea934a73714c93710c8741e | [
"MIT"
] | null | null | null | import os.path
from collections import Counter
import pytest
INPUT_TXT = os.path.join(os.path.dirname(__file__), 'input.txt')
INPUT_S = '''\
3,4,3,1,2
'''
EXPECTED = 5934
if __name__ == '__main__':
raise SystemExit(main())
| 18.765957 | 64 | 0.580499 |
d9f9595b5ef66170be57096ea8261b3da13883ac | 132 | py | Python | functional_tests.py | gustavomazevedo/tbackup-client | eb2fdf75eff7abf17c9bce12920de793ba760f61 | [
"MIT"
] | null | null | null | functional_tests.py | gustavomazevedo/tbackup-client | eb2fdf75eff7abf17c9bce12920de793ba760f61 | [
"MIT"
] | null | null | null | functional_tests.py | gustavomazevedo/tbackup-client | eb2fdf75eff7abf17c9bce12920de793ba760f61 | [
"MIT"
] | null | null | null | from selenium import webdriver
browser = webdriver.Firefox()
browser.get('http://localhost:8000')
assert 'Django' in browser.title | 22 | 36 | 0.780303 |
d9f9cd4e7a0b73e79eb71d2bdbfa755d69a9cc9d | 597 | py | Python | examples/first_char_last_column.py | clarkfitzg/sta141c | 129704ba0952a4b80f9b093dcfa49f49f37b052d | [
"MIT"
] | 24 | 2019-01-08T20:10:11.000Z | 2021-11-26T12:18:58.000Z | examples/first_char_last_column.py | timilchene/sta141c-winter19 | 129704ba0952a4b80f9b093dcfa49f49f37b052d | [
"MIT"
] | 1 | 2017-06-25T05:35:24.000Z | 2017-06-25T05:35:24.000Z | examples/first_char_last_column.py | timilchene/sta141c-winter19 | 129704ba0952a4b80f9b093dcfa49f49f37b052d | [
"MIT"
] | 22 | 2019-01-08T20:02:15.000Z | 2021-12-16T23:27:56.000Z | #!/usr/bin/env python3
"""
For the last column, print only the first character.
Usage:
$ printf "100,200\n0,\n" | python3 first_char_last_column.py
Should print "100,2\n0,"
"""
import csv
from sys import stdin, stdout
if __name__ == "__main__":
main()
| 19.258065 | 64 | 0.606365 |
d9fb744315858b3e553e097f0866c6de49262adf | 1,996 | py | Python | env_ci.py | reloadware/stickybeak | 8ac52a80849a3098fb6b2f47115970a734a73c14 | [
"Apache-2.0"
] | null | null | null | env_ci.py | reloadware/stickybeak | 8ac52a80849a3098fb6b2f47115970a734a73c14 | [
"Apache-2.0"
] | null | null | null | env_ci.py | reloadware/stickybeak | 8ac52a80849a3098fb6b2f47115970a734a73c14 | [
"Apache-2.0"
] | 1 | 2022-01-01T15:14:42.000Z | 2022-01-01T15:14:42.000Z | from pathlib import Path
root = Path(__file__).parent.absolute()
import envo
envo.add_source_roots([root])
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple
from envo import Env, Namespace, env_var, logger, run
from env_comm import StickybeakCommEnv as ParentEnv
p = Namespace("p")
ThisEnv = StickybeakCiEnv
| 22.942529 | 117 | 0.613727 |
d9fb745b63e853aa5e221b1f87db67c0723efc2d | 394 | py | Python | zmq_srv.py | iyedb/boost_asio_zeromq | 63110c18540c8303ac29d574f25cba234a00a22d | [
"MIT"
] | 4 | 2015-04-07T06:00:34.000Z | 2019-09-10T01:45:41.000Z | zmq_srv.py | iyedb/boost_asio_zeromq | 63110c18540c8303ac29d574f25cba234a00a22d | [
"MIT"
] | null | null | null | zmq_srv.py | iyedb/boost_asio_zeromq | 63110c18540c8303ac29d574f25cba234a00a22d | [
"MIT"
] | 3 | 2015-06-30T07:37:41.000Z | 2019-09-10T01:45:47.000Z | from __future__ import print_function
import zmq
import time
ADDR='tcp://127.0.0.1:11155'
ctx = zmq.Context()
srv = ctx.socket(zmq.REP)
srv.bind(ADDR)
#srv.setsockopt(zmq.RCVTIMEO, 3000);
while True:
try:
msg = srv.recv()
except Exception as e:
print('zmq socket revc timedout:', e)
else:
print('client says: %s' % msg)
srv.send('hi from server')
time.sleep(2)
| 17.130435 | 41 | 0.659898 |
d9fdf7b2da8d5e9203d4272f61f62e3af6000e66 | 10,408 | py | Python | mypy/server/aststrip.py | mmaryada27/mypy | 39103273d705fe45a55c4879779a0d5567f01876 | [
"PSF-2.0"
] | null | null | null | mypy/server/aststrip.py | mmaryada27/mypy | 39103273d705fe45a55c4879779a0d5567f01876 | [
"PSF-2.0"
] | null | null | null | mypy/server/aststrip.py | mmaryada27/mypy | 39103273d705fe45a55c4879779a0d5567f01876 | [
"PSF-2.0"
] | null | null | null | """Strip/reset AST in-place to match state after semantic analysis pass 1.
Fine-grained incremental mode reruns semantic analysis (passes 2 and 3)
and type checking for *existing* AST nodes (targets) when changes are
propagated using fine-grained dependencies. AST nodes attributes are
often changed during semantic analysis passes 2 and 3, and running
semantic analysis again on those nodes would produce incorrect
results, since these passes aren't idempotent. This pass resets AST
nodes to reflect the state after semantic analysis pass 1, so that we
can rerun semantic analysis.
(The above is in contrast to behavior with modules that have source code
changes, for which we reparse the entire module and reconstruct a fresh
AST. No stripping is required in this case. Both modes of operation should
have the same outcome.)
Notes:
* This is currently pretty fragile, as we must carefully undo whatever
changes can be made in semantic analysis passes 2 and 3, including changes
to symbol tables.
* We reuse existing AST nodes because it makes it relatively straightforward
to reprocess only a single target within a module efficiently. If there
was a way to parse a single target within a file, in time proportional to
the size of the target, we'd rather create fresh AST nodes than strip them.
Alas, no such facility exists and building it is non-trivial.
* Currently we don't actually reset all changes, but only those known to affect
non-idempotent semantic analysis behavior.
TODO: It would be more principled and less fragile to reset everything
changed in semantic analysis pass 2 and later.
* Reprocessing may recreate AST nodes (such as Var nodes, and TypeInfo nodes
created with assignment statements) that will get different identities from
the original AST. Thus running an AST merge is necessary after stripping,
even though some identities are preserved.
"""
import contextlib
from typing import Union, Iterator, Optional
from mypy.nodes import (
Node, FuncDef, NameExpr, MemberExpr, RefExpr, MypyFile, FuncItem, ClassDef, AssignmentStmt,
ImportFrom, Import, TypeInfo, SymbolTable, Var, CallExpr, Decorator, OverloadedFuncDef,
SuperExpr, UNBOUND_IMPORTED, GDEF, MDEF, IndexExpr
)
from mypy.traverser import TraverserVisitor
def strip_target(node: Union[MypyFile, FuncItem, OverloadedFuncDef]) -> None:
"""Reset a fine-grained incremental target to state after semantic analysis pass 1.
NOTE: Currently we opportunistically only reset changes that are known to otherwise
cause trouble.
"""
visitor = NodeStripVisitor()
if isinstance(node, MypyFile):
visitor.strip_file_top_level(node)
else:
node.accept(visitor)
# TODO: handle more node types
def is_self_member_ref(memberexpr: MemberExpr) -> bool:
"""Does memberexpr refer to an attribute of self?"""
# TODO: Merge with is_self_member_ref in semanal.py.
if not isinstance(memberexpr.expr, NameExpr):
return False
node = memberexpr.expr.node
return isinstance(node, Var) and node.is_self
| 41.13834 | 95 | 0.662952 |
d9fe5aa1f8632d451d56260ea6fb9079bd975a31 | 475 | py | Python | bsp/nrf5x/tools/sdk_dist.py | BreederBai/rt-thread | 53ed0314982556dfa9c5db75d4f3e02485d16ab5 | [
"Apache-2.0"
] | 7,482 | 2015-01-01T09:23:08.000Z | 2022-03-31T19:34:05.000Z | bsp/nrf5x/tools/sdk_dist.py | ArdaFu/rt-thread | eebb2561ec166e0016187c7b7998ada4f8212b3a | [
"Apache-2.0"
] | 2,543 | 2015-01-09T02:01:34.000Z | 2022-03-31T23:10:14.000Z | bsp/nrf5x/tools/sdk_dist.py | ArdaFu/rt-thread | eebb2561ec166e0016187c7b7998ada4f8212b3a | [
"Apache-2.0"
] | 4,645 | 2015-01-06T07:05:31.000Z | 2022-03-31T18:21:50.000Z | import os
import sys
import shutil
cwd_path = os.getcwd()
sys.path.append(os.path.join(os.path.dirname(cwd_path), 'rt-thread', 'tools'))
# BSP dist function
| 26.388889 | 78 | 0.734737 |
d9fe6882b9e62ad1b9764fdded272caab1b5cf79 | 9,991 | py | Python | lib/spack/spack/multimethod.py | kkauder/spack | 6ae8d5c380c1f42094b05d38be26b03650aafb39 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2 | 2020-09-10T22:50:08.000Z | 2021-01-12T22:18:54.000Z | lib/spack/spack/multimethod.py | kkauder/spack | 6ae8d5c380c1f42094b05d38be26b03650aafb39 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 17 | 2019-03-21T15:54:00.000Z | 2022-03-29T19:34:28.000Z | lib/spack/spack/multimethod.py | kkauder/spack | 6ae8d5c380c1f42094b05d38be26b03650aafb39 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2 | 2018-04-06T09:04:11.000Z | 2020-01-24T12:52:12.000Z | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""This module contains utilities for using multi-methods in
spack. You can think of multi-methods like overloaded methods --
they're methods with the same name, and we need to select a version
of the method based on some criteria. e.g., for overloaded
methods, you would select a version of the method to call based on
the types of its arguments.
In spack, multi-methods are used to ease the life of package
authors. They allow methods like install() (or other methods
called by install()) to declare multiple versions to be called when
the package is instantiated with different specs. e.g., if the
package is built with OpenMPI on x86_64,, you might want to call a
different install method than if it was built for mpich2 on
BlueGene/Q. Likewise, you might want to do a different type of
install for different versions of the package.
Multi-methods provide a simple decorator-based syntax for this that
avoids overly complicated rat nests of if statements. Obviously,
depending on the scenario, regular old conditionals might be clearer,
so package authors should use their judgement.
"""
import functools
import inspect
from llnl.util.lang import caller_locals
import spack.architecture
import spack.error
from spack.spec import Spec
| 38.875486 | 77 | 0.651887 |
d9fe73cee8f0ad5d98f81eb365b256cba7970cbe | 13,093 | gyp | Python | third_party/protobuf/protobuf.gyp | meego-tablet-ux/meego-app-browser | 0f4ef17bd4b399c9c990a2f6ca939099495c2b9c | [
"BSD-3-Clause"
] | 1 | 2015-10-12T09:14:22.000Z | 2015-10-12T09:14:22.000Z | third_party/protobuf/protobuf.gyp | meego-tablet-ux/meego-app-browser | 0f4ef17bd4b399c9c990a2f6ca939099495c2b9c | [
"BSD-3-Clause"
] | null | null | null | third_party/protobuf/protobuf.gyp | meego-tablet-ux/meego-app-browser | 0f4ef17bd4b399c9c990a2f6ca939099495c2b9c | [
"BSD-3-Clause"
] | 1 | 2020-11-04T07:22:28.000Z | 2020-11-04T07:22:28.000Z | # Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'conditions': [
['OS!="win"', {
'variables': {
'config_h_dir':
'.', # crafted for gcc/linux.
},
}, { # else, OS=="win"
'variables': {
'config_h_dir':
'vsprojects', # crafted for msvc.
},
'target_defaults': {
'msvs_disabled_warnings': [
4018, # signed/unsigned mismatch in comparison
4244, # implicit conversion, possible loss of data
4355, # 'this' used in base member initializer list
],
'defines!': [
'WIN32_LEAN_AND_MEAN', # Protobuf defines this itself.
],
},
}]
],
'targets': [
# The "lite" lib is about 1/7th the size of the heavy lib,
# but it doesn't support some of the more exotic features of
# protobufs, like reflection. To generate C++ code that can link
# against the lite version of the library, add the option line:
#
# option optimize_for = LITE_RUNTIME;
#
# to your .proto file.
{
'target_name': 'protobuf_lite',
'type': '<(library)',
'toolsets': ['host', 'target'],
'sources': [
'src/google/protobuf/stubs/common.h',
'src/google/protobuf/stubs/once.h',
'src/google/protobuf/extension_set.h',
'src/google/protobuf/generated_message_util.h',
'src/google/protobuf/message_lite.h',
'src/google/protobuf/repeated_field.h',
'src/google/protobuf/unknown_field_set.cc',
'src/google/protobuf/unknown_field_set.h',
'src/google/protobuf/wire_format_lite.h',
'src/google/protobuf/wire_format_lite_inl.h',
'src/google/protobuf/io/coded_stream.h',
'src/google/protobuf/io/zero_copy_stream.h',
'src/google/protobuf/io/zero_copy_stream_impl_lite.h',
'src/google/protobuf/stubs/common.cc',
'src/google/protobuf/stubs/once.cc',
'src/google/protobuf/stubs/hash.h',
'src/google/protobuf/stubs/map-util.h',
'src/google/protobuf/stubs/stl_util-inl.h',
'src/google/protobuf/extension_set.cc',
'src/google/protobuf/generated_message_util.cc',
'src/google/protobuf/message_lite.cc',
'src/google/protobuf/repeated_field.cc',
'src/google/protobuf/wire_format_lite.cc',
'src/google/protobuf/io/coded_stream.cc',
'src/google/protobuf/io/coded_stream_inl.h',
'src/google/protobuf/io/zero_copy_stream.cc',
'src/google/protobuf/io/zero_copy_stream_impl_lite.cc',
'<(config_h_dir)/config.h',
],
'include_dirs': [
'<(config_h_dir)',
'src',
],
# This macro must be defined to suppress the use of dynamic_cast<>,
# which requires RTTI.
'defines': [
'GOOGLE_PROTOBUF_NO_RTTI',
],
'direct_dependent_settings': {
'include_dirs': [
'<(config_h_dir)',
'src',
],
'defines': [
'GOOGLE_PROTOBUF_NO_RTTI',
],
},
},
# This is the full, heavy protobuf lib that's needed for c++ .proto's
# that don't specify the LITE_RUNTIME option. The protocol
# compiler itself (protoc) falls into that category.
#
# DO NOT LINK AGAINST THIS TARGET IN CHROME CODE --agl
{
'target_name': 'protobuf_full_do_not_use',
'type': '<(library)',
'toolsets': ['host','target'],
'sources': [
'src/google/protobuf/descriptor.h',
'src/google/protobuf/descriptor.pb.h',
'src/google/protobuf/descriptor_database.h',
'src/google/protobuf/dynamic_message.h',
'src/google/protobuf/generated_message_reflection.h',
'src/google/protobuf/message.h',
'src/google/protobuf/reflection_ops.h',
'src/google/protobuf/service.h',
'src/google/protobuf/text_format.h',
'src/google/protobuf/unknown_field_set.h',
'src/google/protobuf/wire_format.h',
'src/google/protobuf/io/gzip_stream.h',
'src/google/protobuf/io/printer.h',
'src/google/protobuf/io/tokenizer.h',
'src/google/protobuf/io/zero_copy_stream_impl.h',
'src/google/protobuf/compiler/code_generator.h',
'src/google/protobuf/compiler/command_line_interface.h',
'src/google/protobuf/compiler/importer.h',
'src/google/protobuf/compiler/parser.h',
'src/google/protobuf/stubs/strutil.cc',
'src/google/protobuf/stubs/strutil.h',
'src/google/protobuf/stubs/substitute.cc',
'src/google/protobuf/stubs/substitute.h',
'src/google/protobuf/stubs/structurally_valid.cc',
'src/google/protobuf/descriptor.cc',
'src/google/protobuf/descriptor.pb.cc',
'src/google/protobuf/descriptor_database.cc',
'src/google/protobuf/dynamic_message.cc',
'src/google/protobuf/extension_set_heavy.cc',
'src/google/protobuf/generated_message_reflection.cc',
'src/google/protobuf/message.cc',
'src/google/protobuf/reflection_ops.cc',
'src/google/protobuf/service.cc',
'src/google/protobuf/text_format.cc',
'src/google/protobuf/unknown_field_set.cc',
'src/google/protobuf/wire_format.cc',
# This file pulls in zlib, but it's not actually used by protoc, so
# instead of compiling zlib for the host, let's just exclude this.
# 'src/src/google/protobuf/io/gzip_stream.cc',
'src/google/protobuf/io/printer.cc',
'src/google/protobuf/io/tokenizer.cc',
'src/google/protobuf/io/zero_copy_stream_impl.cc',
'src/google/protobuf/compiler/importer.cc',
'src/google/protobuf/compiler/parser.cc',
],
'dependencies': [
'protobuf_lite',
],
'export_dependent_settings': [
'protobuf_lite',
],
},
{
'target_name': 'protoc',
'type': 'executable',
'toolsets': ['host'],
'sources': [
'src/google/protobuf/compiler/code_generator.cc',
'src/google/protobuf/compiler/command_line_interface.cc',
'src/google/protobuf/compiler/plugin.cc',
'src/google/protobuf/compiler/plugin.pb.cc',
'src/google/protobuf/compiler/subprocess.cc',
'src/google/protobuf/compiler/subprocess.h',
'src/google/protobuf/compiler/zip_writer.cc',
'src/google/protobuf/compiler/zip_writer.h',
'src/google/protobuf/compiler/cpp/cpp_enum.cc',
'src/google/protobuf/compiler/cpp/cpp_enum.h',
'src/google/protobuf/compiler/cpp/cpp_enum_field.cc',
'src/google/protobuf/compiler/cpp/cpp_enum_field.h',
'src/google/protobuf/compiler/cpp/cpp_extension.cc',
'src/google/protobuf/compiler/cpp/cpp_extension.h',
'src/google/protobuf/compiler/cpp/cpp_field.cc',
'src/google/protobuf/compiler/cpp/cpp_field.h',
'src/google/protobuf/compiler/cpp/cpp_file.cc',
'src/google/protobuf/compiler/cpp/cpp_file.h',
'src/google/protobuf/compiler/cpp/cpp_generator.cc',
'src/google/protobuf/compiler/cpp/cpp_helpers.cc',
'src/google/protobuf/compiler/cpp/cpp_helpers.h',
'src/google/protobuf/compiler/cpp/cpp_message.cc',
'src/google/protobuf/compiler/cpp/cpp_message.h',
'src/google/protobuf/compiler/cpp/cpp_message_field.cc',
'src/google/protobuf/compiler/cpp/cpp_message_field.h',
'src/google/protobuf/compiler/cpp/cpp_primitive_field.cc',
'src/google/protobuf/compiler/cpp/cpp_primitive_field.h',
'src/google/protobuf/compiler/cpp/cpp_service.cc',
'src/google/protobuf/compiler/cpp/cpp_service.h',
'src/google/protobuf/compiler/cpp/cpp_string_field.cc',
'src/google/protobuf/compiler/cpp/cpp_string_field.h',
'src/google/protobuf/compiler/java/java_enum.cc',
'src/google/protobuf/compiler/java/java_enum.h',
'src/google/protobuf/compiler/java/java_enum_field.cc',
'src/google/protobuf/compiler/java/java_enum_field.h',
'src/google/protobuf/compiler/java/java_extension.cc',
'src/google/protobuf/compiler/java/java_extension.h',
'src/google/protobuf/compiler/java/java_field.cc',
'src/google/protobuf/compiler/java/java_field.h',
'src/google/protobuf/compiler/java/java_file.cc',
'src/google/protobuf/compiler/java/java_file.h',
'src/google/protobuf/compiler/java/java_generator.cc',
'src/google/protobuf/compiler/java/java_helpers.cc',
'src/google/protobuf/compiler/java/java_helpers.h',
'src/google/protobuf/compiler/java/java_message.cc',
'src/google/protobuf/compiler/java/java_message.h',
'src/google/protobuf/compiler/java/java_message_field.cc',
'src/google/protobuf/compiler/java/java_message_field.h',
'src/google/protobuf/compiler/java/java_primitive_field.cc',
'src/google/protobuf/compiler/java/java_primitive_field.h',
'src/google/protobuf/compiler/java/java_service.cc',
'src/google/protobuf/compiler/java/java_service.h',
'src/google/protobuf/compiler/java/java_string_field.cc',
'src/google/protobuf/compiler/java/java_string_field.h',
'src/google/protobuf/compiler/python/python_generator.cc',
'src/google/protobuf/compiler/main.cc',
],
'dependencies': [
'protobuf_full_do_not_use',
],
'include_dirs': [
'<(config_h_dir)',
'src/src',
],
},
{
# Generate the python module needed by all protoc-generated Python code.
'target_name': 'py_proto',
'type': 'none',
'copies': [
{
'destination': '<(PRODUCT_DIR)/pyproto/google/',
'files': [
# google/ module gets an empty __init__.py.
'__init__.py',
],
},
{
'destination': '<(PRODUCT_DIR)/pyproto/google/protobuf',
'files': [
'python/google/protobuf/__init__.py',
'python/google/protobuf/descriptor.py',
'python/google/protobuf/message.py',
'python/google/protobuf/reflection.py',
'python/google/protobuf/service.py',
'python/google/protobuf/service_reflection.py',
'python/google/protobuf/text_format.py',
# TODO(ncarter): protoc's python generator treats descriptor.proto
# specially, but it's not possible to trigger the special treatment
# unless you run protoc from ./src/src (the treatment is based
# on the path to the .proto file matching a constant exactly).
# I'm not sure how to convince gyp to execute a rule from a
# different directory. Until this is resolved, use a copy of
# descriptor_pb2.py that I manually generated.
'descriptor_pb2.py',
],
},
{
'destination': '<(PRODUCT_DIR)/pyproto/google/protobuf/internal',
'files': [
'python/google/protobuf/internal/__init__.py',
'python/google/protobuf/internal/api_implementation.py',
'python/google/protobuf/internal/containers.py',
'python/google/protobuf/internal/cpp_message.py',
'python/google/protobuf/internal/decoder.py',
'python/google/protobuf/internal/encoder.py',
'python/google/protobuf/internal/generator_test.py',
'python/google/protobuf/internal/message_listener.py',
'python/google/protobuf/internal/python_message.py',
'python/google/protobuf/internal/type_checkers.py',
'python/google/protobuf/internal/wire_format.py',
],
},
],
# # We can't generate a proper descriptor_pb2.py -- see earlier comment.
# 'rules': [
# {
# 'rule_name': 'genproto',
# 'extension': 'proto',
# 'inputs': [
# '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
# ],
# 'variables': {
# # The protoc compiler requires a proto_path argument with the
# # directory containing the .proto file.
# 'rule_input_relpath': 'src/google/protobuf',
# },
# 'outputs': [
# '<(PRODUCT_DIR)/pyproto/google/protobuf/<(RULE_INPUT_ROOT)_pb2.py',
# ],
# 'action': [
# '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
# '-I./src',
# '-I.',
# '--python_out=<(PRODUCT_DIR)/pyproto/google/protobuf',
# 'google/protobuf/descriptor.proto',
# ],
# 'message': 'Generating Python code from <(RULE_INPUT_PATH)',
# },
# ],
# 'dependencies': [
# 'protoc#host',
# ],
# 'sources': [
# 'src/google/protobuf/descriptor.proto',
# ],
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:
| 41.302839 | 81 | 0.621554 |
d9ff0e5cd63921d7a1a7f3f682d268671ab38688 | 834 | py | Python | main.py | Yash-s-Code-Camp/Python-Day-4 | 887c4e172905b2b0dea493a3c9c1f61e403556fc | [
"MIT"
] | null | null | null | main.py | Yash-s-Code-Camp/Python-Day-4 | 887c4e172905b2b0dea493a3c9c1f61e403556fc | [
"MIT"
] | null | null | null | main.py | Yash-s-Code-Camp/Python-Day-4 | 887c4e172905b2b0dea493a3c9c1f61e403556fc | [
"MIT"
] | null | null | null | # def mul(a):
# return lambda b:b*a
# singler = mul(1) # addition = lambda b:b*1
# doubler = mul(2) # addition = lambda b:b*2
# tripler = mul(3) # addition = lambda b:b*3
# print(doubler(7)) # 7*2 = 14
# print(tripler(7)) # 7*3 = 21
# print(singler(7)) # 7*1 = 7
stud = BatchA("Thakor")
print(stud.printName())
rgb(255, 255, 255) # White
rgb(255, 0, 0) # Red
rgb(0, 0, 0) # Black
rgb(0, 255, 255) # Cyan
rgb(255, 255, 0) # Yellow
#00ff00 //green
#1e90ff //dodgerblue
| 17.020408 | 47 | 0.603118 |
8a00049d0a23118a6b45ced9a50bf455984aaa3c | 8,974 | py | Python | paperstream/create_diary.py | MarcoRosso/paperstream | f8d5485ea337334b036393f9566b74394b5dd234 | [
"MIT"
] | null | null | null | paperstream/create_diary.py | MarcoRosso/paperstream | f8d5485ea337334b036393f9566b74394b5dd234 | [
"MIT"
] | null | null | null | paperstream/create_diary.py | MarcoRosso/paperstream | f8d5485ea337334b036393f9566b74394b5dd234 | [
"MIT"
] | null | null | null | """
Create diaries in A5 and A4 sizes based on PDF templates.
Julio Vega
"""
import datetime
import math
import sys
from io import BytesIO
from pathlib import Path
from PyPDF2 import PdfFileReader, PdfFileWriter
from reportlab.lib.pagesizes import A5, A4
from reportlab.lib.utils import ImageReader
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFError, TTFont
from reportlab.pdfgen import canvas
def resource_path(relative_path):
""" Get absolute path to resource, works for dev and for PyInstaller """
base_path = getattr(sys, '_MEIPASS', Path(__file__).resolve().parent)
return base_path / Path(relative_path)
CORNER_DIR = resource_path("input/1_diaries_to_create/resources")
LOGO_PATH = resource_path(CORNER_DIR / Path("logo.png"))
DEFAULT_FONT = resource_path(CORNER_DIR / Path('FreeSansLocal.ttf'))
CREATED_DIARIES_DIR = resource_path("output/created_diaries/")
#############################################################
#############################################################
#############################################################
##### Algorithm to convert A4 pages into an A5 booklet ######
#############################################################
#############################################################
#############################################################
## Adapted from the work by Luke Plant, https://bitbucket.org/spookylukey/booklet-maker/src
def build_booklet(pages):
''' Build booklet '''
# Double sized page, with double-sided printing, fits 4 of the original.
sheet_count = int(math.ceil(len(pages) / 4.0))
booklet = [Sheet() for i in range(0, sheet_count)]
# Assign input pages to sheets
# This is the core algo. To understand it:
# * pick up 3 A4 sheets, landscape
# * number the sheets from 1 to 3, starting with bottom one
# * fold the stack in the middle to form an A5 booklet
# * work out what order you need to use the front left,
# front right, back left and back right sides.
for container, page in zip(containers(), pages):
container.page = page
return booklet
def add_double_page(writer, page_size, print_page):
''' Adds a double page '''
width, height = page_size
page = writer.insertBlankPage(width=width, height=height, index=writer.getNumPages())
# Merge the left page
l_page = print_page.left.page
if l_page is not None:
page.mergePage(l_page)
# Merge the right page with translation
r_page = print_page.right.page
if r_page is not None:
page.mergeTranslatedPage(r_page, width / 2, 0)
def convert_to_a5_booklet(input_file, blanks=0):
'''Converts a PDF into a double sided A5 file to print as an A4 (two A5 pages per A4 page)'''
# Create internal dir to save the a5 files
a5_booklets_dir = CREATED_DIARIES_DIR
Path.mkdir(a5_booklets_dir, parents=True, exist_ok=True)
# Create the a5 booklet's name
a5_booklet_name = Path(input_file).stem + "_as_a5_booklet"
a5_booklet = a5_booklets_dir / Path("{}.pdf".format(a5_booklet_name))
reader = PdfFileReader(open(input_file, "rb"))
pages = [reader.getPage(p) for p in range(0, reader.getNumPages())]
for index in range(0, blanks):
pages.insert(0, None)
sheets = build_booklet(pages)
writer = PdfFileWriter()
firs_page = reader.getPage(0)
input_width = firs_page.mediaBox.getWidth()
output_width = input_width * 2
input_height = firs_page.mediaBox.getHeight()
output_height = input_height
page_size = (output_width, output_height)
# We want to group fronts and backs together.
for sheet in sheets:
add_double_page(writer, page_size, sheet.back)
add_double_page(writer, page_size, sheet.front)
with open(a5_booklet, "wb") as a5_booklet_stream:
writer.write(a5_booklet_stream)
return a5_booklet
#############################################################
#############################################################
#############################################################
########## Create A4 paper diary ############
#############################################################
#############################################################
#############################################################
def create_diary_cover(participant_id, email, font):
'''Create cover of the A5 diary'''
packet = BytesIO()
cover_canvas = canvas.Canvas(packet, pagesize=A4)
width, height = A4
# Centering the logo or participant ID
if Path.exists(LOGO_PATH):
logo = ImageReader(LOGO_PATH)
cover_canvas.drawImage(logo, x=(width * (1/6.0)),
y=(height/4),
width=width * (4/6.0),
preserveAspectRatio=True,
mask='auto')
else:
cover_canvas.setFont(font, 50)
cover_canvas.drawCentredString(width/2, height/2, participant_id)
# Lost legend
if not (email is None or email == ""):
cover_canvas.setFont(font, 15)
cover_canvas.drawCentredString(width/2, 50,
"If you find this document, please email " + email)
cover_canvas.save()
packet.seek(0)
return PdfFileReader(packet).getPage(0)
def create_a4_diary(pdf_template, pages, top_left_text, email=None, font='Arial'):
"""Creates an A4 document with [PAGES] from [STARTING_DATE]"""
starting_date = parse_date(top_left_text)
font = set_active_font(font)
# Create output folder/file
if not Path(pdf_template).exists():
raise ValueError("Template does not exist {}".format(pdf_template))
Path.mkdir(CREATED_DIARIES_DIR, parents=True, exist_ok=True)
a4_document_name = Path(pdf_template).stem
a4_document_path = CREATED_DIARIES_DIR / Path("{}_document.pdf".format(a4_document_name))
pdf_file = PdfFileWriter()
# Cover
pdf_file.addPage(create_diary_cover(a4_document_name, email, font))
pdf_file.addBlankPage()
# Pages
for page in range(1, pages+1):
if starting_date is not None:
top_left_text = starting_date.strftime('%A, %d %b %Y')
starting_date += datetime.timedelta(days=1)
new_page = create_diary_page(pdf_template, font, top_left_text,page, a4_document_name)
pdf_file.addPage(new_page)
# Backcover
pdf_file.addBlankPage()
# Save a4 document
with open(a4_document_path, "wb") as output_stream:
pdf_file.write(output_stream)
return a4_document_path
def set_active_font(font):
"""Register the font to use in header and footer of the diary"""
try:
pdfmetrics.registerFont(TTFont(font, font + '.ttf'))
except TTFError:
font = 'FreeSansLocal'
pdfmetrics.registerFont(TTFont(font, DEFAULT_FONT))
return font | 33.864151 | 97 | 0.613885 |
8a009f467895ff4a7817d2ca2bfbdacdd183cb58 | 2,459 | py | Python | wextractor/extractors/csv_extractor.py | codeforamerica/w-drive-extractor | 1c62bfff6fc21c4cce4a4409b76355ec4e07daae | [
"MIT"
] | 3 | 2015-01-14T06:27:16.000Z | 2015-02-26T23:39:39.000Z | wextractor/extractors/csv_extractor.py | codeforamerica/w-drive-extractor | 1c62bfff6fc21c4cce4a4409b76355ec4e07daae | [
"MIT"
] | 8 | 2015-01-15T17:50:30.000Z | 2015-05-12T17:09:04.000Z | wextractor/extractors/csv_extractor.py | codeforamerica/w-drive-extractor | 1c62bfff6fc21c4cce4a4409b76355ec4e07daae | [
"MIT"
] | 4 | 2015-01-14T15:20:49.000Z | 2021-04-16T10:45:22.000Z | #!/usr/bin/env python
import urllib2
import httplib
from urlparse import urlparse
import csv
from wextractor.extractors.extractor import Extractor
| 34.152778 | 118 | 0.604311 |
8a01b2b39f8bda22480b43b79a5034c95f31f8f0 | 9,010 | py | Python | pyscf/geomopt/berny_solver.py | r-peng/pyscf | 9a14f9bcc63bc75f5939cb4d00eb47861d8d8989 | [
"Apache-2.0"
] | 2 | 2021-06-30T22:33:35.000Z | 2021-11-22T18:02:36.000Z | pyscf/geomopt/berny_solver.py | r-peng/pyscf | 9a14f9bcc63bc75f5939cb4d00eb47861d8d8989 | [
"Apache-2.0"
] | null | null | null | pyscf/geomopt/berny_solver.py | r-peng/pyscf | 9a14f9bcc63bc75f5939cb4d00eb47861d8d8989 | [
"Apache-2.0"
] | 2 | 2021-09-16T23:37:42.000Z | 2021-10-14T23:00:39.000Z | #!/usr/bin/env python
# Copyright 2014-2019 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Interface to geometry optimizer pyberny https://github.com/jhrmnn/pyberny
'''
from __future__ import absolute_import
import pkg_resources
try:
dist = pkg_resources.get_distribution('pyberny')
except pkg_resources.DistributionNotFound:
dist = None
if dist is None or [int(x) for x in dist.version.split('.')] < [0, 6, 2]:
msg = ('Geometry optimizer Pyberny not found or outdated. Install or update '
'with:\n\n\tpip install -U pyberny')
raise ImportError(msg)
import time
import numpy
import logging
from pyscf import lib
from pyscf.geomopt.addons import (as_pyscf_method, dump_mol_geometry,
symmetrize)
from pyscf import __config__
from pyscf.grad.rhf import GradientsBasics
from berny import Berny, geomlib, coords
# Overwrite pyberny's atomic unit
coords.angstrom = 1./lib.param.BOHR
INCLUDE_GHOST = getattr(__config__, 'geomopt_berny_solver_optimize_include_ghost', True)
ASSERT_CONV = getattr(__config__, 'geomopt_berny_solver_optimize_assert_convergence', True)
def to_berny_log(pyscf_log):
'''Adapter to allow pyberny to use pyscf.logger
'''
log = logging.getLogger('{}.{}'.format(__name__, id(pyscf_log)))
log.addHandler(PyscfHandler())
log.setLevel('INFO')
return log
def kernel(method, assert_convergence=ASSERT_CONV,
include_ghost=INCLUDE_GHOST, callback=None, **kwargs):
'''Optimize geometry with pyberny for the given method.
To adjust the convergence threshold, parameters can be set in kwargs as
below:
.. code-block:: python
conv_params = { # They are default settings
'gradientmax': 0.45e-3, # Eh/[Bohr|rad]
'gradientrms': 0.15e-3, # Eh/[Bohr|rad]
'stepmax': 1.8e-3, # [Bohr|rad]
'steprms': 1.2e-3, # [Bohr|rad]
}
from pyscf.geomopt import berny_solver
opt = berny_solver.GeometryOptimizer(method)
opt.params = conv_params
opt.kernel()
'''
t0 = time.clock(), time.time()
mol = method.mol.copy()
if 'log' in kwargs:
log = lib.logger.new_logger(method, kwargs['log'])
elif 'verbose' in kwargs:
log = lib.logger.new_logger(method, kwargs['verbose'])
else:
log = lib.logger.new_logger(method)
if isinstance(method, lib.GradScanner):
g_scanner = method
elif isinstance(method, GradientsBasics):
g_scanner = method.as_scanner()
elif getattr(method, 'nuc_grad_method', None):
g_scanner = method.nuc_grad_method().as_scanner()
else:
raise NotImplementedError('Nuclear gradients of %s not available' % method)
if not include_ghost:
g_scanner.atmlst = numpy.where(method.mol.atom_charges() != 0)[0]
# When symmetry is enabled, the molecule may be shifted or rotated to make
# the z-axis be the main axis. The transformation can cause inconsistency
# between the optimization steps. The transformation is muted by setting
# an explict point group to the keyword mol.symmetry (see symmetry
# detection code in Mole.build function).
if mol.symmetry:
mol.symmetry = mol.topgroup
# temporary interface, taken from berny.py optimize function
berny_log = to_berny_log(log)
geom = to_berny_geom(mol, include_ghost)
optimizer = Berny(geom, logger=berny_log, **kwargs)
t1 = t0
e_last = 0
for cycle, geom in enumerate(optimizer):
if log.verbose >= lib.logger.NOTE:
log.note('\nGeometry optimization cycle %d', cycle+1)
dump_mol_geometry(mol, geom.coords, log)
if mol.symmetry:
geom.coords = symmetrize(mol, geom.coords)
mol.set_geom_(_geom_to_atom(mol, geom, include_ghost), unit='Bohr')
energy, gradients = g_scanner(mol)
log.note('cycle %d: E = %.12g dE = %g norm(grad) = %g', cycle+1,
energy, energy - e_last, numpy.linalg.norm(gradients))
e_last = energy
if callable(callback):
callback(locals())
if assert_convergence and not g_scanner.converged:
raise RuntimeError('Nuclear gradients of %s not converged' % method)
optimizer.send((energy, gradients))
t1 = log.timer('geomoetry optimization cycle %d'%cycle, *t1)
t0 = log.timer('geomoetry optimization', *t0)
return optimizer._converged, mol
def optimize(method, assert_convergence=ASSERT_CONV,
include_ghost=INCLUDE_GHOST, callback=None, **kwargs):
'''Optimize geometry with pyberny for the given method.
To adjust the convergence threshold, parameters can be set in kwargs as
below:
.. code-block:: python
conv_params = { # They are default settings
'gradientmax': 0.45e-3, # Eh/[Bohr|rad]
'gradientrms': 0.15e-3, # Eh/[Bohr|rad]
'stepmax': 1.8e-3, # [Bohr|rad]
'steprms': 1.2e-3, # [Bohr|rad]
}
from pyscf.geomopt import berny_solver
newmol = berny_solver.optimize(method, **conv_params)
'''
return kernel(method, assert_convergence, include_ghost, callback,
**kwargs)[1]
optimize = kernel
del(INCLUDE_GHOST, ASSERT_CONV)
if __name__ == '__main__':
from pyscf import gto
from pyscf import scf, dft, cc, mp
mol = gto.M(atom='''
C 1.1879 -0.3829 0.0000
C 0.0000 0.5526 0.0000
O -1.1867 -0.2472 0.0000
H -1.9237 0.3850 0.0000
H 2.0985 0.2306 0.0000
H 1.1184 -1.0093 0.8869
H 1.1184 -1.0093 -0.8869
H -0.0227 1.1812 0.8852
H -0.0227 1.1812 -0.8852
''',
basis='3-21g')
mf = scf.RHF(mol)
conv_params = {
'gradientmax': 6e-3, # Eh/Bohr
'gradientrms': 2e-3, # Eh/Bohr
'stepmax': 2e-2, # Bohr
'steprms': 1.5e-2, # Bohr
}
mol1 = optimize(mf, **conv_params)
print(mf.kernel() - -153.219208484874)
print(scf.RHF(mol1).kernel() - -153.222680852335)
mf = dft.RKS(mol)
mf.xc = 'pbe,'
mf.conv_tol = 1e-7
mol1 = optimize(mf)
mymp2 = mp.MP2(scf.RHF(mol))
mol1 = optimize(mymp2)
mycc = cc.CCSD(scf.RHF(mol))
mol1 = optimize(mycc)
| 34.787645 | 91 | 0.642619 |
8a01ccf4f5933cd1046863655e9835118928c6fc | 1,838 | py | Python | src/main/python/taf/foundation/api/ui/aut.py | WesleyPeng/uiXautomation | 2d2c4d5a774ffda934d5615036a80c449bac930d | [
"Apache-2.0"
] | 6 | 2017-09-19T15:05:47.000Z | 2021-07-16T16:07:46.000Z | src/main/python/taf/foundation/api/ui/aut.py | WesleyPeng/uiXautomation | 2d2c4d5a774ffda934d5615036a80c449bac930d | [
"Apache-2.0"
] | 1 | 2018-06-02T18:45:51.000Z | 2018-06-02T18:45:51.000Z | src/main/python/taf/foundation/api/ui/aut.py | WesleyPeng/uiXautomation | 2d2c4d5a774ffda934d5615036a80c449bac930d | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2017-2018 {Flair Inc.} WESLEY PENG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from taf.foundation.utils import ConnectionCache
| 25.887324 | 74 | 0.62568 |
8a02d8606a3a24d720ef5682953d80e75a8dcabc | 1,758 | py | Python | algo/vigenere.py | dkushche/Crypto | 75919d6df2084aee1de76c9999ac4e361c4efd48 | [
"MIT"
] | 3 | 2020-05-07T22:03:48.000Z | 2021-03-11T16:36:56.000Z | algo/vigenere.py | dkushche/Crypto | 75919d6df2084aee1de76c9999ac4e361c4efd48 | [
"MIT"
] | null | null | null | algo/vigenere.py | dkushche/Crypto | 75919d6df2084aee1de76c9999ac4e361c4efd48 | [
"MIT"
] | null | null | null | import crypto_tools
from itertools import cycle
vigenere.little_doc = vigenere_little_doc
vigenere.full_doc = vigenere_full_doc
| 30.310345 | 78 | 0.660978 |
8a03248b6fead646cb68e7a6a935435de664969c | 14,492 | py | Python | anaconda-mode/0.1.13/jedi-0.15.1-py3.7.egg/jedi/evaluate/base_context.py | space-scl/emacs.d | 6285c38714023b72a023fe24cbcb5e4fcdcdb949 | [
"Apache-2.0"
] | 4 | 2019-07-26T11:32:22.000Z | 2019-09-11T05:34:59.000Z | anaconda-mode/0.1.13/jedi-0.15.1-py3.7.egg/jedi/evaluate/base_context.py | space-scl/emacs.d | 6285c38714023b72a023fe24cbcb5e4fcdcdb949 | [
"Apache-2.0"
] | 10 | 2020-05-11T20:29:28.000Z | 2022-01-13T01:41:27.000Z | anaconda-mode/0.1.13/jedi-0.15.1-py3.7.egg/jedi/evaluate/base_context.py | space-scl/emacs.d | 6285c38714023b72a023fe24cbcb5e4fcdcdb949 | [
"Apache-2.0"
] | 2 | 2019-08-28T14:57:54.000Z | 2019-11-26T16:18:30.000Z | """
Contexts are the "values" that Python would return. However Contexts are at the
same time also the "contexts" that a user is currently sitting in.
A ContextSet is typically used to specify the return of a function or any other
static analysis operation. In jedi there are always multiple returns and not
just one.
"""
from functools import reduce
from operator import add
from parso.python.tree import ExprStmt, SyncCompFor
from jedi import debug
from jedi._compatibility import zip_longest, unicode
from jedi.parser_utils import clean_scope_docstring
from jedi.common import BaseContextSet, BaseContext
from jedi.evaluate.helpers import SimpleGetItemNotFound
from jedi.evaluate.utils import safe_property
from jedi.evaluate.cache import evaluator_as_method_param_cache
from jedi.cache import memoize_method
_sentinel = object()
def iterate_contexts(contexts, contextualized_node=None, is_async=False):
"""
Calls `iterate`, on all contexts but ignores the ordering and just returns
all contexts that the iterate functions yield.
"""
return ContextSet.from_sets(
lazy_context.infer()
for lazy_context in contexts.iterate(contextualized_node, is_async=is_async)
)
class LazyContextWrapper(_ContextWrapperBase):
class ContextWrapper(_ContextWrapperBase):
class TreeContext(Context):
class ContextualizedNode(object):
class ContextualizedName(ContextualizedNode):
# TODO merge with TreeNameDefinition?!
def assignment_indexes(self):
"""
Returns an array of tuple(int, node) of the indexes that are used in
tuple assignments.
For example if the name is ``y`` in the following code::
x, (y, z) = 2, ''
would result in ``[(1, xyz_node), (0, yz_node)]``.
When searching for b in the case ``a, *b, c = [...]`` it will return::
[(slice(1, -1), abc_node)]
"""
indexes = []
is_star_expr = False
node = self.node.parent
compare = self.node
while node is not None:
if node.type in ('testlist', 'testlist_comp', 'testlist_star_expr', 'exprlist'):
for i, child in enumerate(node.children):
if child == compare:
index = int(i / 2)
if is_star_expr:
from_end = int((len(node.children) - i) / 2)
index = slice(index, -from_end)
indexes.insert(0, (index, node))
break
else:
raise LookupError("Couldn't find the assignment.")
is_star_expr = False
elif node.type == 'star_expr':
is_star_expr = True
elif isinstance(node, (ExprStmt, SyncCompFor)):
break
compare = node
node = node.parent
return indexes
class ContextSet(BaseContextSet):
NO_CONTEXTS = ContextSet([])
| 33.162471 | 94 | 0.637524 |
8a036923cf292987a326de518f02ae1d70e60da4 | 974 | py | Python | kiwi_scp/commands/cmd_cmd.py | yavook/kiwi-scp | ca4263d913cfbdedc8b14334e3cad61c3b95f0a7 | [
"MIT"
] | null | null | null | kiwi_scp/commands/cmd_cmd.py | yavook/kiwi-scp | ca4263d913cfbdedc8b14334e3cad61c3b95f0a7 | [
"MIT"
] | null | null | null | kiwi_scp/commands/cmd_cmd.py | yavook/kiwi-scp | ca4263d913cfbdedc8b14334e3cad61c3b95f0a7 | [
"MIT"
] | null | null | null | from typing import Tuple
import click
from .cmd import KiwiCommandType, KiwiCommand
from .decorators import kiwi_command
from ..executable import COMPOSE_EXE
from ..instance import Instance
from ..project import Project
| 26.324324 | 91 | 0.708419 |
8a03ced3330b9102f19e53ae0f85a628054986d1 | 36 | py | Python | tools/__init__.py | BranKein/Flask-template | 3d8f43b3c44163e855c727de2a0dfe37d3b788f9 | [
"MIT"
] | null | null | null | tools/__init__.py | BranKein/Flask-template | 3d8f43b3c44163e855c727de2a0dfe37d3b788f9 | [
"MIT"
] | null | null | null | tools/__init__.py | BranKein/Flask-template | 3d8f43b3c44163e855c727de2a0dfe37d3b788f9 | [
"MIT"
] | null | null | null | from . import ip
__all__ = ['ip']
| 7.2 | 16 | 0.583333 |
8a040db174b4e066ad1fcf13a9fc64667e2a81e2 | 274 | py | Python | leetCode/algorithms/easy/count_and_say.py | ferhatelmas/algo | a7149c7a605708bc01a5cd30bf5455644cefd04d | [
"WTFPL"
] | 25 | 2015-01-21T16:39:18.000Z | 2021-05-24T07:01:24.000Z | leetCode/algorithms/easy/count_and_say.py | gauravsingh58/algo | 397859a53429e7a585e5f6964ad24146c6261326 | [
"WTFPL"
] | 2 | 2020-09-30T19:39:36.000Z | 2020-10-01T17:15:16.000Z | leetCode/algorithms/easy/count_and_say.py | ferhatelmas/algo | a7149c7a605708bc01a5cd30bf5455644cefd04d | [
"WTFPL"
] | 15 | 2015-01-21T16:39:27.000Z | 2020-10-01T17:00:22.000Z | from itertools import groupby
| 19.571429 | 72 | 0.463504 |
8a045d9a56c4a8715b77c0b2cd2d5ff977fa98ed | 609 | py | Python | conf/feature_config.py | pupuwudi/nlp_xiaojiang | 182ac4522b6012a52de6e1d0db7e6a47cb716e5b | [
"MIT"
] | null | null | null | conf/feature_config.py | pupuwudi/nlp_xiaojiang | 182ac4522b6012a52de6e1d0db7e6a47cb716e5b | [
"MIT"
] | null | null | null | conf/feature_config.py | pupuwudi/nlp_xiaojiang | 182ac4522b6012a52de6e1d0db7e6a47cb716e5b | [
"MIT"
] | 2 | 2021-01-18T10:07:20.000Z | 2022-01-12T10:09:47.000Z | # -*- coding: UTF-8 -*-
# !/usr/bin/python
# @time :2019/5/10 9:13
# @author :Mo
# @function :path of FeatureProject
import pathlib
import sys
import os
# base dir
projectdir = str(pathlib.Path(os.path.abspath(__file__)).parent.parent)
sys.path.append(projectdir)
# path of BERT model
model_dir = projectdir + '/Data/chinese_L-12_H-768_A-12'
config_name = model_dir + '/bert_config.json'
ckpt_name = model_dir + '/bert_model.ckpt'
vocab_file = model_dir + '/vocab.txt'
# gpu
gpu_memory_fraction = 0.32
#
layer_indexes = [-2]
#
max_seq_len = 32
| 22.555556 | 72 | 0.689655 |
8a047a8d5dd4c7ba8745cc48738110ca5fef1d2f | 813 | py | Python | tests/test_prep_read.py | taruma/hidrokit | a96c4ba2235d58d2bbc905be44d1b413ed19b3d2 | [
"MIT"
] | 5 | 2019-07-15T13:35:52.000Z | 2020-04-01T17:34:16.000Z | tests/test_prep_read.py | taruma/hidrokit | a96c4ba2235d58d2bbc905be44d1b413ed19b3d2 | [
"MIT"
] | 107 | 2019-01-03T02:12:26.000Z | 2020-02-18T00:48:27.000Z | tests/test_prep_read.py | hidrokit/hidrokit | c8b949aa6a81981684a24e5dd1e498ec82cbe0ca | [
"MIT"
] | 2 | 2020-06-17T00:08:32.000Z | 2020-08-24T18:55:38.000Z | """Test for .prep.read module
"""
from hidrokit.prep import read
import numpy as np
import pandas as pd
A = pd.DataFrame(
data=[
[1, 3, 4, np.nan, 2, np.nan],
[np.nan, 2, 3, np.nan, 1, 4],
[2, np.nan, 1, 3, 4, np.nan]
],
columns=['A', 'B', 'C', 'D', 'E', 'F']
)
A_date = A.set_index(pd.date_range("20190617", "20190619"))
res_A_number = {'A': [1], 'B': [2], 'C': [], 'D': [0, 1], 'E': [], 'F': [0, 2]}
res_A_date = {'A': ['0618'], 'B': ['0619'], 'C': [],
'D': ['0617', '0618'], 'E': [], 'F': ['0617', '0619']}
| 25.40625 | 79 | 0.526445 |
8a047dbb3e81227c03ec206589ca325125601905 | 1,721 | py | Python | app/blueprints/department_blueprint.py | Maxcutex/personal_ecommerce | be09fb20eae1b225523acde06f8e75effcc3676f | [
"MIT"
] | null | null | null | app/blueprints/department_blueprint.py | Maxcutex/personal_ecommerce | be09fb20eae1b225523acde06f8e75effcc3676f | [
"MIT"
] | 2 | 2019-05-21T08:44:29.000Z | 2021-04-30T20:46:08.000Z | app/blueprints/department_blueprint.py | Maxcutex/personal_ecommerce | be09fb20eae1b225523acde06f8e75effcc3676f | [
"MIT"
] | null | null | null | from flasgger import swag_from
from app.blueprints.base_blueprint import Blueprint, BaseBlueprint, request, Security, Auth
from app.controllers.department_controller import DepartmentController
url_prefix = '{}/departments'.format(BaseBlueprint.base_url_prefix)
department_blueprint = Blueprint('department', __name__, url_prefix=url_prefix)
department_controller = DepartmentController(request)
| 41.97561 | 91 | 0.820453 |
8a04b26d17a373b84c1afb19abef67f291bb970a | 9,747 | py | Python | src/train_DFCAN.py | ikecoglu/DL-SR | 5e4c794f1434cd4a9b2b1aecf3738065b11bede1 | [
"MIT"
] | 46 | 2021-01-07T03:38:07.000Z | 2022-03-24T19:11:23.000Z | src/train_DFCAN.py | ikecoglu/DL-SR | 5e4c794f1434cd4a9b2b1aecf3738065b11bede1 | [
"MIT"
] | 7 | 2021-02-06T14:23:18.000Z | 2022-02-13T04:08:45.000Z | src/train_DFCAN.py | ikecoglu/DL-SR | 5e4c794f1434cd4a9b2b1aecf3738065b11bede1 | [
"MIT"
] | 16 | 2021-01-26T16:22:49.000Z | 2022-02-26T03:21:08.000Z | import argparse
from keras import optimizers
import matplotlib.pyplot as plt
import numpy as np
import datetime
from keras.callbacks import TensorBoard
import glob
import os
import tensorflow as tf
from models import *
from utils.lr_controller import ReduceLROnPlateau
from utils.data_loader import data_loader, data_loader_multi_channel
from utils.utils import img_comp
from utils.loss import loss_mse_ssim
parser = argparse.ArgumentParser()
parser.add_argument("--gpu_id", type=int, default=1)
parser.add_argument("--gpu_memory_fraction", type=float, default=0.3)
parser.add_argument("--mixed_precision_training", type=int, default=1)
parser.add_argument("--data_dir", type=str, default="../dataset/train/F-actin")
parser.add_argument("--save_weights_dir", type=str, default="../trained_models")
parser.add_argument("--model_name", type=str, default="DFCAN")
parser.add_argument("--patch_height", type=int, default=128)
parser.add_argument("--patch_width", type=int, default=128)
parser.add_argument("--input_channels", type=int, default=9)
parser.add_argument("--scale_factor", type=int, default=2)
parser.add_argument("--norm_flag", type=int, default=1)
parser.add_argument("--iterations", type=int, default=1000000)
parser.add_argument("--sample_interval", type=int, default=1000)
parser.add_argument("--validate_interval", type=int, default=2000)
parser.add_argument("--validate_num", type=int, default=500)
parser.add_argument("--batch_size", type=int, default=4)
parser.add_argument("--start_lr", type=float, default=1e-4)
parser.add_argument("--lr_decay_factor", type=float, default=0.5)
parser.add_argument("--load_weights", type=int, default=0)
parser.add_argument("--optimizer_name", type=str, default="adam")
args = parser.parse_args()
gpu_id = str(args.gpu_id)
gpu_memory_fraction = args.gpu_memory_fraction
mixed_precision_training = str(args.mixed_precision_training)
data_dir = args.data_dir
save_weights_dir = args.save_weights_dir
validate_interval = args.validate_interval
batch_size = args.batch_size
start_lr = args.start_lr
lr_decay_factor = args.lr_decay_factor
patch_height = args.patch_height
patch_width = args.patch_width
input_channels = args.input_channels
scale_factor = args.scale_factor
norm_flag = args.norm_flag
validate_num = args.validate_num
iterations = args.iterations
load_weights = args.load_weights
optimizer_name = args.optimizer_name
model_name = args.model_name
sample_interval = args.sample_interval
os.environ["TF_ENABLE_AUTO_MIXED_PRECISION"] = mixed_precision_training
os.environ["CUDA_VISIBLE_DEVICES"] = gpu_id
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=gpu_memory_fraction)
tf.Session(config=tf.ConfigProto(gpu_options=gpu_options))
data_name = data_dir.split('/')[-1]
if input_channels == 1:
save_weights_name = model_name + '-SISR_' + data_name
cur_data_loader = data_loader
train_images_path = data_dir + '/training_wf/'
validate_images_path = data_dir + '/validate_wf/'
else:
save_weights_name = model_name + '-SIM_' + data_name
cur_data_loader = data_loader_multi_channel
train_images_path = data_dir + '/training/'
validate_images_path = data_dir + '/validate/'
save_weights_path = save_weights_dir + '/' + save_weights_name + '/'
train_gt_path = data_dir + '/training_gt/'
validate_gt_path = data_dir + '/validate_gt/'
sample_path = save_weights_path + 'sampled_img/'
if not os.path.exists(save_weights_path):
os.mkdir(save_weights_path)
if not os.path.exists(sample_path):
os.mkdir(sample_path)
# --------------------------------------------------------------------------------
# select models and optimizer
# --------------------------------------------------------------------------------
modelFns = {'DFCAN': DFCAN16.DFCAN}
modelFN = modelFns[model_name]
optimizer_g = optimizers.adam(lr=start_lr, beta_1=0.9, beta_2=0.999)
# --------------------------------------------------------------------------------
# define combined model
# --------------------------------------------------------------------------------
g = modelFN((patch_height, patch_width, input_channels))
g.compile(loss=loss_mse_ssim, optimizer=optimizer_g)
lr_controller = ReduceLROnPlateau(model=g, factor=lr_decay_factor, patience=10, mode='min', min_delta=1e-4,
cooldown=0, min_lr=start_lr * 0.1, verbose=1)
# --------------------------------------------------------------------------------
# about Tensorboard
# --------------------------------------------------------------------------------
log_path = save_weights_path + 'graph'
if not os.path.exists(log_path):
os.mkdir(log_path)
callback = TensorBoard(log_path)
callback.set_model(g)
train_names = 'training_loss'
val_names = ['val_MSE', 'val_SSIM', 'val_PSNR', 'val_NRMSE']
# --------------------------------------------------------------------------------
# Sample and validate
# --------------------------------------------------------------------------------
# --------------------------------------------------------------------------------
# if exist, load weights
# --------------------------------------------------------------------------------
if load_weights:
if os.path.exists(save_weights_path + 'weights.best'):
g.save_weights(save_weights_path + 'weights.best')
print('Loading weights successfully: ' + save_weights_path + 'weights.best')
elif os.path.exists(save_weights_path + 'weights.latest'):
g.save_weights(save_weights_path + 'weights.latest')
print('Loading weights successfully: ' + save_weights_path + 'weights.latest')
# --------------------------------------------------------------------------------
# training
# --------------------------------------------------------------------------------
start_time = datetime.datetime.now()
loss_record = []
validate_nrmse = [np.Inf]
lr_controller.on_train_begin()
images_path = glob.glob(train_images_path + '/*')
for it in range(iterations):
# ------------------------------------
# train generator
# ------------------------------------
input_g, gt_g = cur_data_loader(images_path, train_images_path, train_gt_path, patch_height, patch_width,
batch_size, norm_flag=norm_flag, scale=scale_factor)
loss_generator = g.train_on_batch(input_g, gt_g)
loss_record.append(loss_generator)
elapsed_time = datetime.datetime.now() - start_time
print("%d epoch: time: %s, g_loss = %s" % (it + 1, elapsed_time, loss_generator))
if (it + 1) % sample_interval == 0:
images_path = glob.glob(train_images_path + '/*')
Validate(it + 1, sample=1)
if (it + 1) % validate_interval == 0:
Validate(it + 1, sample=0)
write_log(callback, train_names, np.mean(loss_record), it + 1)
loss_record = []
| 45.125 | 109 | 0.612804 |
8a04bef0858eef7458b1e38ddd409346a98cb2cc | 2,635 | py | Python | catalyst/exchange/live_graph_clock.py | erlendve/catalyst | 463575bc23c0abd1287f8ec81c4377baabf2b8b8 | [
"Apache-2.0"
] | null | null | null | catalyst/exchange/live_graph_clock.py | erlendve/catalyst | 463575bc23c0abd1287f8ec81c4377baabf2b8b8 | [
"Apache-2.0"
] | null | null | null | catalyst/exchange/live_graph_clock.py | erlendve/catalyst | 463575bc23c0abd1287f8ec81c4377baabf2b8b8 | [
"Apache-2.0"
] | null | null | null | import pandas as pd
from catalyst.constants import LOG_LEVEL
from catalyst.exchange.utils.stats_utils import prepare_stats
from catalyst.gens.sim_engine import (
BAR,
SESSION_START
)
from logbook import Logger
log = Logger('LiveGraphClock', level=LOG_LEVEL)
| 35.133333 | 79 | 0.666414 |
8a04ff873e3cd041bc9cad7f7fc7707f7c185cce | 6,652 | py | Python | invera/api/tests.py | LeoLeiva/todo-challenge | f6f24f53758eb4e425c91516bcab7af8cad66814 | [
"MIT"
] | null | null | null | invera/api/tests.py | LeoLeiva/todo-challenge | f6f24f53758eb4e425c91516bcab7af8cad66814 | [
"MIT"
] | null | null | null | invera/api/tests.py | LeoLeiva/todo-challenge | f6f24f53758eb4e425c91516bcab7af8cad66814 | [
"MIT"
] | 1 | 2021-01-10T20:19:42.000Z | 2021-01-10T20:19:42.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import inspect
from task.models import InveraTask
from api.utils import send_test_csv_report
from django.contrib.auth.models import User
from rest_framework.test import APIClient, APITestCase
from rest_framework.reverse import reverse
from rest_framework import status
TEST_RESULTS = []
RECIPIENTS = ['[email protected]']
| 37.370787 | 165 | 0.634245 |
8a06be2dde291c66efbc5f80746f557a0f2cecaa | 336 | py | Python | experiments/seidel-2d/tmp_files/6745.py | LoopTilingBenchmark/benchmark | 52a3d2e70216552a498fd91de02a2fa9cb62122c | [
"BSD-2-Clause"
] | null | null | null | experiments/seidel-2d/tmp_files/6745.py | LoopTilingBenchmark/benchmark | 52a3d2e70216552a498fd91de02a2fa9cb62122c | [
"BSD-2-Clause"
] | null | null | null | experiments/seidel-2d/tmp_files/6745.py | LoopTilingBenchmark/benchmark | 52a3d2e70216552a498fd91de02a2fa9cb62122c | [
"BSD-2-Clause"
] | null | null | null | from chill import *
source('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/polybench/polybench-code/stencils/seidel-2d/kernel.c')
destination('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/experiments/seidel-2d/tmp_files/6745.c')
procedure('kernel_seidel_2d')
loop(0)
known(' n > 2 ')
tile(0,2,16,2)
tile(0,4,16,4)
| 30.545455 | 118 | 0.764881 |
8a06d974512def3c400fb25769c0185d59195602 | 1,405 | py | Python | baymax/api.py | dmrz/baymax | 60cca5ae2e7cb42e093747f91b809e34e6782fcd | [
"MIT"
] | 34 | 2018-02-14T09:37:26.000Z | 2021-02-13T10:06:54.000Z | baymax/api.py | Avishekbhattacharjee/baymax | 487930c4f3021ff50504d371de09ff31e458c09f | [
"MIT"
] | 1 | 2018-03-03T02:55:38.000Z | 2018-03-17T21:57:15.000Z | baymax/api.py | Avishekbhattacharjee/baymax | 487930c4f3021ff50504d371de09ff31e458c09f | [
"MIT"
] | 7 | 2018-02-28T07:35:35.000Z | 2022-01-26T11:54:40.000Z | import json
import aiohttp
| 29.270833 | 72 | 0.641993 |
8a072b60d911bf4164d6e02341054f5f6f3f27f0 | 3,479 | py | Python | nautobot_device_onboarding/tests/test_netdev_keeper.py | pszulczewski/nautobot-plugin-device-onboarding | 9ddec52d7bcc751c4616bd7c1180ed2a1d31ff2c | [
"Apache-2.0"
] | 13 | 2021-03-05T10:47:50.000Z | 2022-03-18T19:07:09.000Z | nautobot_device_onboarding/tests/test_netdev_keeper.py | pszulczewski/nautobot-plugin-device-onboarding | 9ddec52d7bcc751c4616bd7c1180ed2a1d31ff2c | [
"Apache-2.0"
] | 18 | 2021-03-05T10:29:13.000Z | 2022-03-08T13:10:38.000Z | nautobot_device_onboarding/tests/test_netdev_keeper.py | pszulczewski/nautobot-plugin-device-onboarding | 9ddec52d7bcc751c4616bd7c1180ed2a1d31ff2c | [
"Apache-2.0"
] | 14 | 2021-03-06T19:33:46.000Z | 2022-03-28T16:31:38.000Z | """Unit tests for nautobot_device_onboarding.netdev_keeper module and its classes.
(c) 2020-2021 Network To Code
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from socket import gaierror
from unittest import mock
from django.test import TestCase
from nautobot.dcim.models import Site, DeviceRole, Platform
from nautobot_device_onboarding.exceptions import OnboardException
from nautobot_device_onboarding.helpers import onboarding_task_fqdn_to_ip
from nautobot_device_onboarding.models import OnboardingTask
| 44.602564 | 108 | 0.728658 |
8a076cdd50a9d69b52cffcb8dbe3df578f17d801 | 2,577 | py | Python | superneurons/tools/img_val/main.py | Phaeton-lang/baselines | 472c248047fbb55b5fa0e620758047b7f0a1d041 | [
"MIT"
] | null | null | null | superneurons/tools/img_val/main.py | Phaeton-lang/baselines | 472c248047fbb55b5fa0e620758047b7f0a1d041 | [
"MIT"
] | null | null | null | superneurons/tools/img_val/main.py | Phaeton-lang/baselines | 472c248047fbb55b5fa0e620758047b7f0a1d041 | [
"MIT"
] | null | null | null | # Created by ay27 at 17/4/9
import os
import matplotlib.pyplot as plt
import struct
import numpy as np
if __name__ == '__main__':
# read_image('../../data/ilsvrc2012/img.bin')
# read_label('../../data/ilsvrc2012/label.bin', '../../data/ilsvrc2012/val.txt')
# read_image('../../build/cifar100_train_image.bin')
# read_label('../../build/cifar100_train_label.bin')
read_image('../../build/val_data_8.bin')
for i in range(10):
read_label('../../build/val_label_%d.bin' % i)
# labels = []
# for i in range(10):
# labels.append(read_label('../../build/val_label_%d.bin' % i))
#
# ground = []
# with open('../../build/shuffled_list') as file:
# ground.append() | 28.01087 | 84 | 0.509895 |
8a0988ba1c9ee5db70eabfa7b9b35ad041f9c1f7 | 2,238 | py | Python | pymatgen/analysis/tests/test_piezo.py | exenGT/pymatgen | a8ffb820ab8fc3f60251099e38c8888f45eae618 | [
"MIT"
] | 1 | 2021-11-02T21:10:11.000Z | 2021-11-02T21:10:11.000Z | pymatgen/analysis/tests/test_piezo.py | exenGT/pymatgen | a8ffb820ab8fc3f60251099e38c8888f45eae618 | [
"MIT"
] | 5 | 2018-08-07T23:00:23.000Z | 2021-01-05T22:46:23.000Z | pymatgen/analysis/tests/test_piezo.py | exenGT/pymatgen | a8ffb820ab8fc3f60251099e38c8888f45eae618 | [
"MIT"
] | 6 | 2019-04-26T18:50:41.000Z | 2020-03-29T17:58:34.000Z | # Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
Test for the piezo tensor class
"""
__author__ = "Shyam Dwaraknath"
__version__ = "0.1"
__maintainer__ = "Shyam Dwaraknath"
__email__ = "[email protected]"
__status__ = "Development"
__date__ = "4/1/16"
import os
import unittest
import numpy as np
from pymatgen.analysis.piezo import PiezoTensor
from pymatgen.util.testing import PymatgenTest
if __name__ == "__main__":
unittest.main()
| 31.521127 | 76 | 0.594281 |
8a0ab3edf5559c9bdaa844115d82ed95f2b065a1 | 63,787 | py | Python | nova/virt/driver.py | larsbutler/nova | fb190f30a911658d8b0c4deaf43cbb8c9e35b672 | [
"Apache-2.0"
] | null | null | null | nova/virt/driver.py | larsbutler/nova | fb190f30a911658d8b0c4deaf43cbb8c9e35b672 | [
"Apache-2.0"
] | null | null | null | nova/virt/driver.py | larsbutler/nova | fb190f30a911658d8b0c4deaf43cbb8c9e35b672 | [
"Apache-2.0"
] | null | null | null | # Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Driver base-classes:
(Beginning of) the contract that compute drivers must follow, and shared
types that support that contract
"""
import sys
from oslo_log import log as logging
from oslo_utils import importutils
import nova.conf
from nova.i18n import _, _LE, _LI
from nova import utils
from nova.virt import event as virtevent
CONF = nova.conf.CONF
LOG = logging.getLogger(__name__)
def get_block_device_info(instance, block_device_mapping):
"""Converts block device mappings for an instance to driver format.
Virt drivers expect block device mapping to be presented in the format
of a dict containing the following keys:
- root_device_name: device name of the root disk
- ephemerals: a (potentially empty) list of DriverEphemeralBlockDevice
instances
- swap: An instance of DriverSwapBlockDevice or None
- block_device_mapping: a (potentially empty) list of
DriverVolumeBlockDevice or any of it's more
specialized subclasses.
"""
from nova.virt import block_device as virt_block_device
block_device_info = {
'root_device_name': instance.root_device_name,
'ephemerals': virt_block_device.convert_ephemerals(
block_device_mapping),
'block_device_mapping':
virt_block_device.convert_all_volumes(*block_device_mapping)
}
swap_list = virt_block_device.convert_swap(block_device_mapping)
block_device_info['swap'] = virt_block_device.get_swap(swap_list)
return block_device_info
def load_compute_driver(virtapi, compute_driver=None):
"""Load a compute driver module.
Load the compute driver module specified by the compute_driver
configuration option or, if supplied, the driver name supplied as an
argument.
Compute drivers constructors take a VirtAPI object as their first object
and this must be supplied.
:param virtapi: a VirtAPI instance
:param compute_driver: a compute driver name to override the config opt
:returns: a ComputeDriver instance
"""
if not compute_driver:
compute_driver = CONF.compute_driver
if not compute_driver:
LOG.error(_LE("Compute driver option required, but not specified"))
sys.exit(1)
LOG.info(_LI("Loading compute driver '%s'"), compute_driver)
try:
driver = importutils.import_object(
'nova.virt.%s' % compute_driver,
virtapi)
return utils.check_isinstance(driver, ComputeDriver)
except ImportError:
LOG.exception(_LE("Unable to load the virtualization driver"))
sys.exit(1)
| 39.205286 | 79 | 0.651246 |
8a0afacd436c5c382b382e786080775c8a2d6bf7 | 5,581 | py | Python | otp/chat/ChatInputNormal.py | P1ayerOne/src | 3a4343e29f844fe95da7d51aaee7fb680d02bf72 | [
"BSD-3-Clause"
] | null | null | null | otp/chat/ChatInputNormal.py | P1ayerOne/src | 3a4343e29f844fe95da7d51aaee7fb680d02bf72 | [
"BSD-3-Clause"
] | null | null | null | otp/chat/ChatInputNormal.py | P1ayerOne/src | 3a4343e29f844fe95da7d51aaee7fb680d02bf72 | [
"BSD-3-Clause"
] | null | null | null | from direct.showbase import DirectObject
from otp.otpbase import OTPGlobals
import sys
from direct.gui.DirectGui import *
from pandac.PandaModules import *
from otp.otpbase import OTPLocalizer
| 34.450617 | 99 | 0.583766 |
8a0b53a65038120d7c635ea3a3f7ba3752ca109e | 14,068 | py | Python | train_text_summarizer.py | stevaras2/bert | 1efaa300eb91dea85c40de5e1586e8d2c94b89bb | [
"Apache-2.0"
] | 1 | 2019-11-28T10:03:09.000Z | 2019-11-28T10:03:09.000Z | train_text_summarizer.py | stevaras2/bert | 1efaa300eb91dea85c40de5e1586e8d2c94b89bb | [
"Apache-2.0"
] | null | null | null | train_text_summarizer.py | stevaras2/bert | 1efaa300eb91dea85c40de5e1586e8d2c94b89bb | [
"Apache-2.0"
] | null | null | null | import argparse
import json
import numpy as np
import pandas as pd
import os
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn.metrics import classification_report,f1_score
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras import backend as K
from keras.utils.vis_utils import plot_model
from sklearn.externals import joblib
import time
def get_embeddings(sentences_list,layer_json):
'''
:param sentences_list: the path o the sentences.txt
:param layer_json: the path of the json file that contains the embeddings of the sentences
:return: Dictionary with key each sentence of the sentences_list and as value the embedding
'''
sentences = dict()#dict with key the index of each line of the sentences_list.txt and as value the sentence
embeddings = dict()##dict with key the index of each sentence and as value the its embedding
sentence_emb = dict()#key:sentence,value:its embedding
with open(sentences_list,'r') as file:
for index,line in enumerate(file):
sentences[index] = line.strip()
with open(layer_json, 'r',encoding='utf-8') as f:
for line in f:
embeddings[json.loads(line)['linex_index']] = np.asarray(json.loads(line)['features'])
for key,value in sentences.items():
sentence_emb[value] = embeddings[key]
return sentence_emb
def train_classifier(sentences_list,layer_json,dataset_csv,filename):
'''
:param sentences_list: the path o the sentences.txt
:param layer_json: the path of the json file that contains the embeddings of the sentences
:param dataset_csv: the path of the dataset
:param filename: The path of the pickle file that the model will be stored
:return:
'''
dataset = pd.read_csv(dataset_csv)
bert_dict = get_embeddings(sentences_list,layer_json)
length = list()
sentence_emb = list()
previous_emb = list()
next_list = list()
section_list = list()
label = list()
errors = 0
for row in dataset.iterrows():
sentence = row[1][0].strip()
previous = row[1][1].strip()
nexts = row[1][2].strip()
section = row[1][3].strip()
if sentence in bert_dict:
sentence_emb.append(bert_dict[sentence])
else:
sentence_emb.append(np.zeros(768))
print(sentence)
errors += 1
if previous in bert_dict:
previous_emb.append(bert_dict[previous])
else:
previous_emb.append(np.zeros(768))
if nexts in bert_dict:
next_list.append(bert_dict[nexts])
else:
next_list.append(np.zeros(768))
if section in bert_dict:
section_list.append(bert_dict[section])
else:
section_list.append(np.zeros(768))
length.append(row[1][4])
label.append(row[1][5])
sentence_emb = np.asarray(sentence_emb)
print(sentence_emb.shape)
next_emb = np.asarray(next_list)
print(next_emb.shape)
previous_emb = np.asarray(previous_emb)
print(previous_emb.shape)
section_emb = np.asarray(section_list)
print(sentence_emb.shape)
length = np.asarray(length)
print(length.shape)
label = np.asarray(label)
print(errors)
features = np.concatenate([sentence_emb, previous_emb, next_emb,section_emb], axis=1)
features = np.column_stack([features, length]) # np.append(features,length,axis=1)
print(features.shape)
X_train, X_val, y_train, y_val = train_test_split(features, label, test_size=0.33, random_state=42)
log = LogisticRegression(random_state=0, solver='newton-cg', max_iter=1000, C=0.1)
log.fit(X_train, y_train)
#save the model
_ = joblib.dump(log, filename, compress=9)
predictions = log.predict(X_val)
print("###########################################")
print("Results using embeddings from the",layer_json,"file")
print(classification_report(y_val, predictions))
print("F1 score using Logistic Regression:",f1_score(y_val, predictions))
print("###########################################")
#train a DNN
f1_results = list()
for i in range(3):
model = Sequential()
model.add(Dense(64, activation='relu', trainable=True))
model.add(Dense(128, activation='relu', trainable=True))
model.add(Dropout(0.30))
model.add(Dense(64, activation='relu', trainable=True))
model.add(Dropout(0.25))
model.add(Dense(64, activation='relu', trainable=True))
model.add(Dropout(0.35))
model.add(Dense(1, activation='sigmoid'))
# compile network
model.compile(loss='binary_crossentropy', optimizer='sgd', metrics=[f1])
# fit network
model.fit(X_train, y_train, epochs=100, batch_size=64)
loss, f_1 = model.evaluate(X_val, y_val, verbose=1)
print('\nTest F1: %f' % (f_1 * 100))
f1_results.append(f_1)
model = None
print("###########################################")
print("Results using embeddings from the", layer_json, "file")
# evaluate
print(np.mean(f1_results))
print("###########################################")
def parameter_tuning_LR(sentences_list,layer_json,dataset_csv):
'''
:param sentences_list: the path o the sentences.txt
:param layer_json: the path of the json file that contains the embeddings of the sentences
:param dataset_csv: the path of the dataset
:return:
'''
dataset = pd.read_csv(dataset_csv)
bert_dict = get_embeddings(sentences_list,layer_json)
length = list()
sentence_emb = list()
previous_emb = list()
next_list = list()
section_list = list()
label = list()
errors = 0
for row in dataset.iterrows():
sentence = row[1][0].strip()
previous = row[1][1].strip()
nexts = row[1][2].strip()
section = row[1][3].strip()
if sentence in bert_dict:
sentence_emb.append(bert_dict[sentence])
else:
sentence_emb.append(np.zeros(768))
print(sentence)
errors += 1
if previous in bert_dict:
previous_emb.append(bert_dict[previous])
else:
previous_emb.append(np.zeros(768))
if nexts in bert_dict:
next_list.append(bert_dict[nexts])
else:
next_list.append(np.zeros(768))
if section in bert_dict:
section_list.append(bert_dict[section])
else:
section_list.append(np.zeros(768))
length.append(row[1][4])
label.append(row[1][5])
sentence_emb = np.asarray(sentence_emb)
print(sentence_emb.shape)
next_emb = np.asarray(next_list)
print(next_emb.shape)
previous_emb = np.asarray(previous_emb)
print(previous_emb.shape)
section_emb = np.asarray(section_list)
print(sentence_emb.shape)
length = np.asarray(length)
print(length.shape)
label = np.asarray(label)
print(errors)
features = np.concatenate([sentence_emb, previous_emb, next_emb,section_emb], axis=1)
features = np.column_stack([features, length])
print(features.shape)
X_train, X_val, y_train, y_val = train_test_split(features, label, test_size=0.33, random_state=42)
C = [0.1,1,2,5,10]
solver = ['newton-cg','saga','sag']
best_params = dict()
best_score = 0.0
for c in C:
for s in solver:
start = time.time()
log = LogisticRegression(random_state=0, solver=s, max_iter=1000, C=c)
log.fit(X_train, y_train)
predictions = log.predict(X_val)
print("###########################################")
print("LR with C =",c,'and solver = ',s)
print("Results using embeddings from the", layer_json, "file")
print(classification_report(y_val, predictions))
f1 = f1_score(y_val, predictions)
if f1 > best_score:
best_score = f1
best_params['c'] = c
best_params['solver'] = s
print("F1 score using Logistic Regression:",f1)
print("###########################################")
end = time.time()
running_time = end - start
print("Running time:"+str(running_time))
def visualize_DNN(file_to_save):
'''
Save the DNN architecture to a png file. Better use the Visulize_DNN.ipynd
:param file_to_save: the png file that the architecture of the DNN will be saved.
:return: None
'''
model = Sequential()
model.add(Dense(64, activation='relu', trainable=True))
model.add(Dense(128, activation='relu', trainable=True))
model.add(Dropout(0.30))
model.add(Dense(64, activation='relu', trainable=True))
model.add(Dropout(0.25))
model.add(Dense(64, activation='relu', trainable=True))
model.add(Dropout(0.35))
model.add(Dense(1, activation='sigmoid'))
plot_model(model, to_file=file_to_save, show_shapes=True)
if __name__ == '__main__':
#save_model('sentences_list.txt','Fudan_output_layer_-1.json','train_sentences1.csv','summarizer1.pkl')
ap = argparse.ArgumentParser()
ap.add_argument("-s", "--sentences", required=True, help="sentences list")
ap.add_argument("-o", "--output", required=True, help="output")
ap.add_argument("-ts", "--train set", required=True, help="path to train set")
ap.add_argument("-sp", "--summarizer path", required=True, help="path to save summarizer")
args = vars(ap.parse_args())
layer = train_classifier(args['sentences'], args['output'], args['train set'],args['summarizer path'])
#layer_1 = train_classifier('sentences_list.txt', 'new_output_layer_-1.json', 'train_sentences1.csv','fine_tune_BERT_sentence_classification1.pkl')
#layer_2 = train_classifier('sentences_list.txt','new_output_layer_-2.json','train_sentences1.csv','fine_tune_BERT_sentence_classification2.pkl')
#layer_3 = train_classifier('sentences_list.txt','new_output_layer_-3.json','train_sentences1.csv','fine_tune_BERT_sentence_classification3.pkl')
#layer_4 = train_classifier('sentences_list.txt','new_output_layer_-4.json','train_sentences1.csv','fine_tune_BERT_sentence_classification4.pkl')
#tuning = parameter_tuning_LR('sentences_list.txt','new_output_layer_-1.json','train_sentences1.csv')
#layer_1 = train_classifier('sentences_list.txt','output_layer_-1.json','train_sentences1.csv','fine_tune_BERT_sentence_classification.pkl')
#layer_2 = train_classifier('sentences_list.txt','output_layer_-2.json','train_sentences1.csv','fine_tune_BERT_sentence_classification.pkl')
#layer_3 = train_classifier('sentences_list.txt','output_layer_-3.json','train_sentences1.csv','fine_tune_BERT_sentence_classification.pkl')
#layer_4 = train_classifier('sentences_list.txt','output_layer_-4.json','train_sentences1.csv','fine_tune_BERT_sentence_classification.pkl')
| 35.705584 | 151 | 0.649062 |
8a0d48bd45e2a77d4024e66ae20d64213df72227 | 1,493 | py | Python | src/test/python/apache/aurora/executor/test_status_manager.py | zmanji/incubator-aurora | 9f594f1de6bbf46c74863dd3fc4d2708b7a974f2 | [
"Apache-2.0"
] | null | null | null | src/test/python/apache/aurora/executor/test_status_manager.py | zmanji/incubator-aurora | 9f594f1de6bbf46c74863dd3fc4d2708b7a974f2 | [
"Apache-2.0"
] | null | null | null | src/test/python/apache/aurora/executor/test_status_manager.py | zmanji/incubator-aurora | 9f594f1de6bbf46c74863dd3fc4d2708b7a974f2 | [
"Apache-2.0"
] | null | null | null | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import time
from unittest import TestCase
import mock
from mesos.interface.mesos_pb2 import TaskState
from apache.aurora.executor.common.status_checker import StatusChecker
from apache.aurora.executor.status_manager import StatusManager
| 29.27451 | 74 | 0.750167 |
8a0d98e91f0c9a170743b5f41866a399dbce8684 | 3,494 | py | Python | Supplemental/A5. Collision estimation module/Con_est.py | wangqf1997/Human-injury-based-safety-decision-of-automated-vehicles | b104fdeb3d85e867f6b04c5ae7b5a197e705aeba | [
"CC-BY-4.0"
] | null | null | null | Supplemental/A5. Collision estimation module/Con_est.py | wangqf1997/Human-injury-based-safety-decision-of-automated-vehicles | b104fdeb3d85e867f6b04c5ae7b5a197e705aeba | [
"CC-BY-4.0"
] | null | null | null | Supplemental/A5. Collision estimation module/Con_est.py | wangqf1997/Human-injury-based-safety-decision-of-automated-vehicles | b104fdeb3d85e867f6b04c5ae7b5a197e705aeba | [
"CC-BY-4.0"
] | null | null | null | '''
-------------------------------------------------------------------------------------------------
This code accompanies the paper titled "Human injury-based safety decision of automated vehicles"
Author: Qingfan Wang, Qing Zhou, Miao Lin, Bingbing Nie
Corresponding author: Bingbing Nie ([email protected])
-------------------------------------------------------------------------------------------------
'''
import torch
import numpy as np
from torch import nn
from torch.nn.utils import weight_norm
__author__ = "Qingfan Wang"
def Collision_cond(veh_striking_list, V1_v, V2_v, delta_angle, veh_param):
''' Estimate the collision condition. '''
(veh_l, veh_w, veh_cgf, veh_cgs, veh_k, veh_m) = veh_param
delta_angle_2 = np.arccos(np.abs(np.cos(delta_angle)))
if -1e-6 < delta_angle_2 < 1e-6:
delta_angle_2 = 1e-6
delta_v1_list = []
delta_v2_list = []
# Estimate the collision condition (delat-v) according to the principal impact direction.
for veh_striking in veh_striking_list:
if veh_striking[0] == 1:
veh_ca = np.arctan(veh_cgf[0] / veh_cgs[0])
veh_a2 = np.abs(veh_cgs[1] - veh_striking[3])
veh_RDS = np.abs(V1_v * np.cos(delta_angle) - V2_v)
veh_a1 = np.abs(np.sqrt(veh_cgf[0] ** 2 + veh_cgs[0] ** 2) * np.cos(veh_ca + delta_angle_2))
if (veh_striking[1]+1) in [16, 1, 2, 3, 17, 20, 21] and (veh_striking[2]+1) in [16, 1, 2, 3, 17, 20, 21]:
veh_e = 2 / veh_RDS
else:
veh_e = 0.5 / veh_RDS
elif veh_striking[0] == 2:
veh_ca = np.arctan(veh_cgf[0] / veh_cgs[0])
veh_a2 = np.abs(veh_cgf[1] - veh_striking[3])
veh_a1 = np.abs(np.sqrt(veh_cgf[0] ** 2 + veh_cgs[0] ** 2) * np.cos(delta_angle_2 - veh_ca + np.pi / 2))
veh_RDS = V1_v * np.sin(delta_angle_2)
veh_e = 1.5 / veh_RDS
elif veh_striking[0] == 3:
veh_ca = np.arctan(veh_cgf[1] / veh_cgs[1])
veh_a1 = np.abs(veh_cgs[0] - veh_striking[3])
veh_RDS = np.abs(V2_v * np.cos(delta_angle) - V1_v)
veh_a2 = np.abs(np.sqrt(veh_cgf[1] ** 2 + veh_cgs[1] ** 2) * np.cos(veh_ca + delta_angle_2))
if (veh_striking[1]+1) in [16, 1, 2, 3, 17, 20, 21] and (veh_striking[2]+1) in [16, 1, 2, 3, 17, 20, 21]:
veh_e = 2 / veh_RDS
else:
veh_e = 0.5 / veh_RDS
elif veh_striking[0] == 4:
veh_ca = np.arctan(veh_cgf[1] / veh_cgs[1])
veh_a1 = np.abs(veh_cgf[0] - veh_striking[3])
veh_a2 = np.abs(np.sqrt(veh_cgf[1] ** 2 + veh_cgs[1] ** 2) * np.cos(delta_angle_2 - veh_ca + np.pi / 2))
veh_RDS = V2_v * np.sin(delta_angle_2)
veh_e = 1.5 / veh_RDS
# Obtain delta-v based on the plane 2-DOF rigid-body collision model with momentum conservation.
veh_y1 = veh_k[0] ** 2 / (veh_a1 ** 2 + veh_k[0] ** 2)
veh_y2 = veh_k[1] ** 2 / (veh_a2 ** 2 + veh_k[1] ** 2)
delta_v1 = (1 + veh_e) * veh_m[1] * veh_y1 * veh_y2 * veh_RDS / (veh_m[0] * veh_y1 + veh_m[1] * veh_y2)
delta_v2 = (1 + veh_e) * veh_m[0] * veh_y1 * veh_y2 * veh_RDS / (veh_m[0] * veh_y1 + veh_m[1] * veh_y2)
delta_v1_list.append(delta_v1)
delta_v2_list.append(delta_v2)
delta_v1_ = max(delta_v1_list)
delta_v2_ = max(delta_v2_list)
index = delta_v1_list.index(max(delta_v1_list))
return delta_v1_, delta_v2_, index | 43.135802 | 117 | 0.556955 |
8a0ead4871ddc6b047237522f5f34d4d48742f52 | 11,790 | py | Python | train/train.py | TontonTremblay/pixel-nerf | 349b5f3f173cd76def05b6de8aa52c69a4f0c7fa | [
"BSD-2-Clause"
] | null | null | null | train/train.py | TontonTremblay/pixel-nerf | 349b5f3f173cd76def05b6de8aa52c69a4f0c7fa | [
"BSD-2-Clause"
] | null | null | null | train/train.py | TontonTremblay/pixel-nerf | 349b5f3f173cd76def05b6de8aa52c69a4f0c7fa | [
"BSD-2-Clause"
] | null | null | null | # Training to a set of multiple objects (e.g. ShapeNet or DTU)
# tensorboard logs available in logs/<expname>
import sys
import os
sys.path.insert(
0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "src"))
)
import warnings
import trainlib
from model import make_model, loss
from render import NeRFRenderer
from data import get_split_dataset
import util
import numpy as np
import torch.nn.functional as F
import torch
from dotmap import DotMap
args, conf = util.args.parse_args(extra_args, training=True, default_ray_batch_size=128)
device = util.get_cuda(args.gpu_id[0])
dset, val_dset, _ = get_split_dataset(args.dataset_format, args.datadir)
print(
"dset z_near {}, z_far {}, lindisp {}".format(dset.z_near, dset.z_far, dset.lindisp)
)
net = make_model(conf["model"]).to(device=device)
net.stop_encoder_grad = args.freeze_enc
if args.freeze_enc:
print("Encoder frozen")
net.encoder.eval()
renderer = NeRFRenderer.from_conf(conf["renderer"], lindisp=dset.lindisp,).to(
device=device
)
# Parallize
render_par = renderer.bind_parallel(net, args.gpu_id).eval()
nviews = list(map(int, args.nviews.split()))
trainer = PixelNeRFTrainer()
trainer.start()
| 33.976945 | 112 | 0.566073 |
8a10a1ae5c36176cfdd1c3ad55656efe8325a99f | 20,351 | py | Python | napari/_qt/dialogs/qt_plugin_dialog.py | kne42/napari | d61d0be0ef8ea622dd3d6acd270c0529816c11ec | [
"BSD-3-Clause"
] | null | null | null | napari/_qt/dialogs/qt_plugin_dialog.py | kne42/napari | d61d0be0ef8ea622dd3d6acd270c0529816c11ec | [
"BSD-3-Clause"
] | null | null | null | napari/_qt/dialogs/qt_plugin_dialog.py | kne42/napari | d61d0be0ef8ea622dd3d6acd270c0529816c11ec | [
"BSD-3-Clause"
] | null | null | null | import os
import sys
from pathlib import Path
from typing import Sequence
from napari_plugin_engine.dist import standard_metadata
from napari_plugin_engine.exceptions import PluginError
from qtpy.QtCore import QEvent, QProcess, QProcessEnvironment, QSize, Qt, Slot
from qtpy.QtGui import QFont, QMovie
from qtpy.QtWidgets import (
QCheckBox,
QDialog,
QFrame,
QHBoxLayout,
QLabel,
QLineEdit,
QListWidget,
QListWidgetItem,
QPushButton,
QSizePolicy,
QSplitter,
QTextEdit,
QVBoxLayout,
QWidget,
)
import napari.resources
from ...plugins import plugin_manager
from ...plugins.pypi import (
ProjectInfo,
iter_napari_plugin_info,
normalized_name,
)
from ...utils._appdirs import user_plugin_dir, user_site_packages
from ...utils.misc import parse_version, running_as_bundled_app
from ...utils.translations import trans
from ..qthreading import create_worker
from ..widgets.qt_eliding_label import ElidingLabel
from ..widgets.qt_plugin_sorter import QtPluginSorter
from .qt_plugin_report import QtPluginErrReporter
# TODO: add error icon and handle pip install errors
# TODO: add queue to handle clicks when already processing
class QtPluginDialog(QDialog):
if __name__ == "__main__":
from qtpy.QtWidgets import QApplication
app = QApplication([])
w = QtPluginDialog()
w.show()
app.exec_()
| 37.617375 | 79 | 0.640214 |
8a112375ff4d16de8957c825f7c7971fdb15e0cc | 1,179 | py | Python | hata/discord/webhook/utils.py | WizzyBots/hata | f6991afc0bebf7dad932888a536f4d010f8663c7 | [
"0BSD"
] | 1 | 2022-03-02T03:59:57.000Z | 2022-03-02T03:59:57.000Z | hata/discord/webhook/utils.py | m0nk3ybraindead/hata | f87ed3d7009eeae31d6ea158772efd33775c7b1c | [
"0BSD"
] | 1 | 2022-02-08T16:54:39.000Z | 2022-02-08T16:54:39.000Z | hata/discord/webhook/utils.py | WizzyBots/hata | f6991afc0bebf7dad932888a536f4d010f8663c7 | [
"0BSD"
] | null | null | null | __all__ = ('create_partial_webhook_from_id', )
from scarletio import export
from ..core import USERS
from .preinstanced import WebhookType
from .webhook import Webhook
| 27.418605 | 115 | 0.653096 |
8a1292fe9e365e4f3b12243aeeeb62b3fcd34222 | 1,067 | py | Python | MIT/600.1x - Introduction to Computer Science and Programming Using Python/Unit 4/Problem Set 4/get_word_score.py | henriqueumeda/-Python-study | 28e93a377afa4732037a29eb74d4bc7c9e24b62f | [
"MIT"
] | null | null | null | MIT/600.1x - Introduction to Computer Science and Programming Using Python/Unit 4/Problem Set 4/get_word_score.py | henriqueumeda/-Python-study | 28e93a377afa4732037a29eb74d4bc7c9e24b62f | [
"MIT"
] | null | null | null | MIT/600.1x - Introduction to Computer Science and Programming Using Python/Unit 4/Problem Set 4/get_word_score.py | henriqueumeda/-Python-study | 28e93a377afa4732037a29eb74d4bc7c9e24b62f | [
"MIT"
] | null | null | null | SCRABBLE_LETTER_VALUES = {
'a': 1, 'b': 3, 'c': 3, 'd': 2, 'e': 1, 'f': 4, 'g': 2, 'h': 4, 'i': 1, 'j': 8, 'k': 5, 'l': 1, 'm': 3, 'n': 1,
'o': 1, 'p': 3, 'q': 10, 'r': 1, 's': 1, 't': 1, 'u': 1, 'v': 4, 'w': 4, 'x': 8, 'y': 4, 'z': 10
}
def getWordScore(word, n):
"""
Returns the score for a word. Assumes the word is a valid word.
The score for a word is the sum of the points for letters in the
word, multiplied by the length of the word, PLUS 50 points if all n
letters are used on the first turn.
Letters are scored as in Scrabble; A is worth 1, B is worth 3, C is
worth 3, D is worth 2, E is worth 1, and so on (see SCRABBLE_LETTER_VALUES)
word: string (lowercase letters)
n: integer (HAND_SIZE; i.e., hand size required for additional points)
returns: int >= 0
"""
total_points = 0
for letter in word:
total_points += SCRABBLE_LETTER_VALUES[letter]
total_points *= len(word)
if len(word) == n:
total_points += 50
return total_points
print(getWordScore('waybill', 7))
| 35.566667 | 115 | 0.585754 |
8a13575cd76b03c2660c0f973dca2598509c1205 | 34,179 | py | Python | sdk/lusid/models/lusid_instrument.py | rizwansaeed/lusid-sdk-python-preview | 52d092d6d4099b8526f0318f3fe1ddc0b943da6a | [
"MIT"
] | null | null | null | sdk/lusid/models/lusid_instrument.py | rizwansaeed/lusid-sdk-python-preview | 52d092d6d4099b8526f0318f3fe1ddc0b943da6a | [
"MIT"
] | null | null | null | sdk/lusid/models/lusid_instrument.py | rizwansaeed/lusid-sdk-python-preview | 52d092d6d4099b8526f0318f3fe1ddc0b943da6a | [
"MIT"
] | null | null | null | # coding: utf-8
"""
LUSID API
# Introduction This page documents the [LUSID APIs](https://www.lusid.com/api/swagger), which allows authorised clients to query and update their data within the LUSID platform. SDKs to interact with the LUSID APIs are available in the following languages : * [C#](https://github.com/finbourne/lusid-sdk-csharp) * [Java](https://github.com/finbourne/lusid-sdk-java) * [JavaScript](https://github.com/finbourne/lusid-sdk-js) * [Python](https://github.com/finbourne/lusid-sdk-python) # Data Model The LUSID API has a relatively lightweight but extremely powerful data model. One of the goals of LUSID was not to enforce on clients a single rigid data model but rather to provide a flexible foundation onto which clients can map their own data models. The core entities in LUSID provide a minimal structure and set of relationships, and the data model can be extended using Properties. The LUSID data model is exposed through the LUSID APIs. The APIs provide access to both business objects and the meta data used to configure the systems behaviours. The key business entities are: - * **Portfolios** A portfolio is a container for transactions and holdings (a **Transaction Portfolio**) or constituents (a **Reference Portfolio**). * **Derived Portfolios**. Derived Portfolios allow Portfolios to be created based on other Portfolios, by overriding or adding specific items. * **Holdings** A Holding is a quantity of an Instrument or a balance of cash within a Portfolio. Holdings can only be adjusted via Transactions. * **Transactions** A Transaction is an economic event that occurs in a Portfolio, causing its holdings to change. * **Corporate Actions** A corporate action is a market event which occurs to an Instrument and thus applies to all portfolios which holding the instrument. Examples are stock splits or mergers. * **Constituents** A constituent is a record in a Reference Portfolio containing an Instrument and an associated weight. * **Instruments** An instrument represents a currency, tradable instrument or OTC contract that is attached to a transaction and a holding. * **Properties** All major entities allow additional user defined properties to be associated with them. For example, a Portfolio manager may be associated with a portfolio. Meta data includes: - * **Transaction Types** Transactions are booked with a specific transaction type. The types are client defined and are used to map the Transaction to a series of movements which update the portfolio holdings. * **Properties Types** Types of user defined properties used within the system. ## Scope All data in LUSID is segregated at the client level. Entities in LUSID are identifiable by a unique code. Every entity lives within a logical data partition known as a Scope. Scope is an identity namespace allowing two entities with the same unique code to co-exist within individual address spaces. For example, prices for equities from different vendors may be uploaded into different scopes such as `client/vendor1` and `client/vendor2`. A portfolio may then be valued using either of the price sources by referencing the appropriate scope. LUSID Clients cannot access scopes of other clients. ## Instruments LUSID has its own built-in instrument master which you can use to master your own instrument universe. Every instrument must be created with one or more unique market identifiers, such as [FIGI](https://openfigi.com/). For any non-listed instruments (eg OTCs), you can upload an instrument against a custom ID of your choosing. In addition, LUSID will allocate each instrument a unique 'LUSID instrument identifier'. The LUSID instrument identifier is what is used when uploading transactions, holdings, prices, etc. The API exposes an `instrument/lookup` endpoint which can be used to lookup these LUSID identifiers using their market identifiers. Cash can be referenced using the ISO currency code prefixed with \"`CCY_`\" e.g. `CCY_GBP` ## Instrument Data Instrument data can be uploaded to the system using the [Instrument Properties](#tag/InstrumentProperties) endpoint. | Field|Type|Description | | ---|---|--- | | Key|propertykey|The key of the property. This takes the format {domain}/{scope}/{code} e.g. 'Instrument/system/Name' or 'Transaction/strategy/quantsignal'. | | Value|string|The value of the property. | | EffectiveFrom|datetimeoffset|The effective datetime from which the property is valid. | | EffectiveUntil|datetimeoffset|The effective datetime until which the property is valid. If not supplied this will be valid indefinitely, potentially overwriting values with EffectiveFrom's in the future. | ## Transaction Portfolios Portfolios are the top-level entity containers within LUSID, containing transactions, corporate actions and holdings. The transactions build up the portfolio holdings on which valuations, analytics profit & loss and risk can be calculated. Properties can be associated with Portfolios to add in additional data. Portfolio properties can be changed over time, for example to allow a Portfolio Manager to be linked with a Portfolio. Additionally, portfolios can be securitised and held by other portfolios, allowing LUSID to perform \"drill-through\" into underlying fund holdings ### Derived Portfolios LUSID also allows for a portfolio to be composed of another portfolio via derived portfolios. A derived portfolio can contain its own transactions and also inherits any transactions from its parent portfolio. Any changes made to the parent portfolio are automatically reflected in derived portfolio. Derived portfolios in conjunction with scopes are a powerful construct. For example, to do pre-trade what-if analysis, a derived portfolio could be created a new namespace linked to the underlying live (parent) portfolio. Analysis can then be undertaken on the derived portfolio without affecting the live portfolio. ### Transactions A transaction represents an economic activity against a Portfolio. Transactions are processed according to a configuration. This will tell the LUSID engine how to interpret the transaction and correctly update the holdings. LUSID comes with a set of transaction types you can use out of the box, or you can configure your own set(s) of transactions. For more details see the [LUSID Getting Started Guide for transaction configuration.](https://support.lusid.com/configuring-transaction-types) | Field|Type|Description | | ---|---|--- | | TransactionId|string|The unique identifier for the transaction. | | Type|string|The type of the transaction e.g. 'Buy', 'Sell'. The transaction type should have been pre-configured via the System Configuration API endpoint. If it hasn't been pre-configured the transaction will still be updated or inserted however you will be unable to generate the resultant holdings for the portfolio that contains this transaction as LUSID does not know how to process it. | | InstrumentIdentifiers|map|A set of instrument identifiers to use to resolve the transaction to a unique instrument. | | TransactionDate|dateorcutlabel|The date of the transaction. | | SettlementDate|dateorcutlabel|The settlement date of the transaction. | | Units|decimal|The number of units transacted in the associated instrument. | | TransactionPrice|transactionprice|The price for each unit of the transacted instrument in the transaction currency. | | TotalConsideration|currencyandamount|The total value of the transaction in the settlement currency. | | ExchangeRate|decimal|The exchange rate between the transaction and settlement currency. For example if the transaction currency is in USD and the settlement currency is in GBP this this the USD/GBP rate. | | TransactionCurrency|currency|The transaction currency. | | Properties|map|Set of unique transaction properties and associated values to store with the transaction. Each property must be from the 'Transaction' domain. | | CounterpartyId|string|The identifier for the counterparty of the transaction. | | Source|string|The source of the transaction. This is used to look up the appropriate transaction group set in the transaction type configuration. | From these fields, the following values can be calculated * **Transaction value in Transaction currency**: TotalConsideration / ExchangeRate * **Transaction value in Portfolio currency**: Transaction value in Transaction currency * TradeToPortfolioRate #### Example Transactions ##### A Common Purchase Example Three example transactions are shown in the table below. They represent a purchase of USD denominated IBM shares within a Sterling denominated portfolio. * The first two transactions are for separate buy and fx trades * Buying 500 IBM shares for $71,480.00 * A spot foreign exchange conversion to fund the IBM purchase. (Buy $71,480.00 for £54,846.60) * The third transaction is an alternate version of the above trades. Buying 500 IBM shares and settling directly in Sterling. | Column | Buy Trade | Fx Trade | Buy Trade with foreign Settlement | | ----- | ----- | ----- | ----- | | TransactionId | FBN00001 | FBN00002 | FBN00003 | | Type | Buy | FxBuy | Buy | | InstrumentIdentifiers | { \"figi\", \"BBG000BLNNH6\" } | { \"CCY\", \"CCY_USD\" } | { \"figi\", \"BBG000BLNNH6\" } | | TransactionDate | 2018-08-02 | 2018-08-02 | 2018-08-02 | | SettlementDate | 2018-08-06 | 2018-08-06 | 2018-08-06 | | Units | 500 | 71480 | 500 | | TransactionPrice | 142.96 | 1 | 142.96 | | TradeCurrency | USD | USD | USD | | ExchangeRate | 1 | 0.7673 | 0.7673 | | TotalConsideration.Amount | 71480.00 | 54846.60 | 54846.60 | | TotalConsideration.Currency | USD | GBP | GBP | | Trade/default/TradeToPortfolioRate* | 0.7673 | 0.7673 | 0.7673 | [* This is a property field] ##### A Forward FX Example LUSID has a flexible transaction modelling system, meaning there are a number of different ways of modelling forward fx trades. The default LUSID transaction types are FwdFxBuy and FwdFxSell. Using these transaction types, LUSID will generate two holdings for each Forward FX trade, one for each currency in the trade. An example Forward Fx trade to sell GBP for USD in a JPY-denominated portfolio is shown below: | Column | Forward 'Sell' Trade | Notes | | ----- | ----- | ---- | | TransactionId | FBN00004 | | | Type | FwdFxSell | | | InstrumentIdentifiers | { \"Instrument/default/Currency\", \"GBP\" } | | | TransactionDate | 2018-08-02 | | | SettlementDate | 2019-02-06 | Six month forward | | Units | 10000.00 | Units of GBP | | TransactionPrice | 1 | | | TradeCurrency | GBP | Currency being sold | | ExchangeRate | 1.3142 | Agreed rate between GBP and USD | | TotalConsideration.Amount | 13142.00 | Amount in the settlement currency, USD | | TotalConsideration.Currency | USD | Settlement currency | | Trade/default/TradeToPortfolioRate | 142.88 | Rate between trade currency, GBP and portfolio base currency, JPY | Please note that exactly the same economic behaviour could be modelled using the FwdFxBuy Transaction Type with the amounts and rates reversed. ### Holdings A holding represents a position in an instrument or cash on a given date. | Field|Type|Description | | ---|---|--- | | InstrumentUid|string|The unqiue Lusid Instrument Id (LUID) of the instrument that the holding is in. | | SubHoldingKeys|map|The sub-holding properties which identify the holding. Each property will be from the 'Transaction' domain. These are configured when a transaction portfolio is created. | | Properties|map|The properties which have been requested to be decorated onto the holding. These will be from the 'Instrument' or 'Holding' domain. | | HoldingType|string|The type of the holding e.g. Position, Balance, CashCommitment, Receivable, ForwardFX etc. | | Units|decimal|The total number of units of the holding. | | SettledUnits|decimal|The total number of settled units of the holding. | | Cost|currencyandamount|The total cost of the holding in the transaction currency. | | CostPortfolioCcy|currencyandamount|The total cost of the holding in the portfolio currency. | | Transaction|transaction|The transaction associated with an unsettled holding. | ## Corporate Actions Corporate actions are represented within LUSID in terms of a set of instrument-specific 'transitions'. These transitions are used to specify the participants of the corporate action, and the effect that the corporate action will have on holdings in those participants. ### Corporate Action | Field|Type|Description | | ---|---|--- | | CorporateActionCode|code|The unique identifier of this corporate action | | Description|string| | | AnnouncementDate|datetimeoffset|The announcement date of the corporate action | | ExDate|datetimeoffset|The ex date of the corporate action | | RecordDate|datetimeoffset|The record date of the corporate action | | PaymentDate|datetimeoffset|The payment date of the corporate action | | Transitions|corporateactiontransition[]|The transitions that result from this corporate action | ### Transition | Field|Type|Description | | ---|---|--- | | InputTransition|corporateactiontransitioncomponent|Indicating the basis of the corporate action - which security and how many units | | OutputTransitions|corporateactiontransitioncomponent[]|What will be generated relative to the input transition | ### Example Corporate Action Transitions #### A Dividend Action Transition In this example, for each share of IBM, 0.20 units (or 20 pence) of GBP are generated. | Column | Input Transition | Output Transition | | ----- | ----- | ----- | | Instrument Identifiers | { \"figi\" : \"BBG000BLNNH6\" } | { \"ccy\" : \"CCY_GBP\" } | | Units Factor | 1 | 0.20 | | Cost Factor | 1 | 0 | #### A Split Action Transition In this example, for each share of IBM, we end up with 2 units (2 shares) of IBM, with total value unchanged. | Column | Input Transition | Output Transition | | ----- | ----- | ----- | | Instrument Identifiers | { \"figi\" : \"BBG000BLNNH6\" } | { \"figi\" : \"BBG000BLNNH6\" } | | Units Factor | 1 | 2 | | Cost Factor | 1 | 1 | #### A Spinoff Action Transition In this example, for each share of IBM, we end up with 1 unit (1 share) of IBM and 3 units (3 shares) of Celestica, with 85% of the value remaining on the IBM share, and 5% in each Celestica share (15% total). | Column | Input Transition | Output Transition 1 | Output Transition 2 | | ----- | ----- | ----- | ----- | | Instrument Identifiers | { \"figi\" : \"BBG000BLNNH6\" } | { \"figi\" : \"BBG000BLNNH6\" } | { \"figi\" : \"BBG000HBGRF3\" } | | Units Factor | 1 | 1 | 3 | | Cost Factor | 1 | 0.85 | 0.15 | ## Reference Portfolios Reference portfolios are portfolios that contain constituents with weights. They are designed to represent entities such as indices and benchmarks. ### Constituents | Field|Type|Description | | ---|---|--- | | InstrumentIdentifiers|map|Unique instrument identifiers | | InstrumentUid|string|LUSID's internal unique instrument identifier, resolved from the instrument identifiers | | Currency|decimal| | | Weight|decimal| | | FloatingWeight|decimal| | ## Portfolio Groups Portfolio groups allow the construction of a hierarchy from portfolios and groups. Portfolio operations on the group are executed on an aggregated set of portfolios in the hierarchy. For example: * Global Portfolios _(group)_ * APAC _(group)_ * Hong Kong _(portfolio)_ * Japan _(portfolio)_ * Europe _(group)_ * France _(portfolio)_ * Germany _(portfolio)_ * UK _(portfolio)_ In this example **Global Portfolios** is a group that consists of an aggregate of **Hong Kong**, **Japan**, **France**, **Germany** and **UK** portfolios. ## Properties Properties are key-value pairs that can be applied to any entity within a domain (where a domain is `trade`, `portfolio`, `security` etc). Properties must be defined before use with a `PropertyDefinition` and can then subsequently be added to entities. ## Schema A detailed description of the entities used by the API and parameters for endpoints which take a JSON document can be retrieved via the `schema` endpoint. ## Meta data The following headers are returned on all responses from LUSID | Name | Purpose | | --- | --- | | lusid-meta-duration | Duration of the request | | lusid-meta-success | Whether or not LUSID considered the request to be successful | | lusid-meta-requestId | The unique identifier for the request | | lusid-schema-url | Url of the schema for the data being returned | | lusid-property-schema-url | Url of the schema for any properties | # Error Codes | Code|Name|Description | | ---|---|--- | | <a name=\"-10\">-10</a>|Server Configuration Error| | | <a name=\"-1\">-1</a>|Unknown error|An unexpected error was encountered on our side. | | <a name=\"102\">102</a>|Version Not Found| | | <a name=\"103\">103</a>|Api Rate Limit Violation| | | <a name=\"104\">104</a>|Instrument Not Found| | | <a name=\"105\">105</a>|Property Not Found| | | <a name=\"106\">106</a>|Portfolio Recursion Depth| | | <a name=\"108\">108</a>|Group Not Found| | | <a name=\"109\">109</a>|Portfolio Not Found| | | <a name=\"110\">110</a>|Property Schema Not Found| | | <a name=\"111\">111</a>|Portfolio Ancestry Not Found| | | <a name=\"112\">112</a>|Portfolio With Id Already Exists| | | <a name=\"113\">113</a>|Orphaned Portfolio| | | <a name=\"119\">119</a>|Missing Base Claims| | | <a name=\"121\">121</a>|Property Not Defined| | | <a name=\"122\">122</a>|Cannot Delete System Property| | | <a name=\"123\">123</a>|Cannot Modify Immutable Property Field| | | <a name=\"124\">124</a>|Property Already Exists| | | <a name=\"125\">125</a>|Invalid Property Life Time| | | <a name=\"126\">126</a>|Property Constraint Style Excludes Properties| | | <a name=\"127\">127</a>|Cannot Modify Default Data Type| | | <a name=\"128\">128</a>|Group Already Exists| | | <a name=\"129\">129</a>|No Such Data Type| | | <a name=\"130\">130</a>|Undefined Value For Data Type| | | <a name=\"131\">131</a>|Unsupported Value Type Defined On Data Type| | | <a name=\"132\">132</a>|Validation Error| | | <a name=\"133\">133</a>|Loop Detected In Group Hierarchy| | | <a name=\"134\">134</a>|Undefined Acceptable Values| | | <a name=\"135\">135</a>|Sub Group Already Exists| | | <a name=\"138\">138</a>|Price Source Not Found| | | <a name=\"139\">139</a>|Analytic Store Not Found| | | <a name=\"141\">141</a>|Analytic Store Already Exists| | | <a name=\"143\">143</a>|Client Instrument Already Exists| | | <a name=\"144\">144</a>|Duplicate In Parameter Set| | | <a name=\"147\">147</a>|Results Not Found| | | <a name=\"148\">148</a>|Order Field Not In Result Set| | | <a name=\"149\">149</a>|Operation Failed| | | <a name=\"150\">150</a>|Elastic Search Error| | | <a name=\"151\">151</a>|Invalid Parameter Value| | | <a name=\"153\">153</a>|Command Processing Failure| | | <a name=\"154\">154</a>|Entity State Construction Failure| | | <a name=\"155\">155</a>|Entity Timeline Does Not Exist| | | <a name=\"156\">156</a>|Concurrency Conflict Failure| | | <a name=\"157\">157</a>|Invalid Request| | | <a name=\"158\">158</a>|Event Publish Unknown| | | <a name=\"159\">159</a>|Event Query Failure| | | <a name=\"160\">160</a>|Blob Did Not Exist| | | <a name=\"162\">162</a>|Sub System Request Failure| | | <a name=\"163\">163</a>|Sub System Configuration Failure| | | <a name=\"165\">165</a>|Failed To Delete| | | <a name=\"166\">166</a>|Upsert Client Instrument Failure| | | <a name=\"167\">167</a>|Illegal As At Interval| | | <a name=\"168\">168</a>|Illegal Bitemporal Query| | | <a name=\"169\">169</a>|Invalid Alternate Id| | | <a name=\"170\">170</a>|Cannot Add Source Portfolio Property Explicitly| | | <a name=\"171\">171</a>|Entity Already Exists In Group| | | <a name=\"173\">173</a>|Entity With Id Already Exists| | | <a name=\"174\">174</a>|Derived Portfolio Details Do Not Exist| | | <a name=\"176\">176</a>|Portfolio With Name Already Exists| | | <a name=\"177\">177</a>|Invalid Transactions| | | <a name=\"178\">178</a>|Reference Portfolio Not Found| | | <a name=\"179\">179</a>|Duplicate Id| | | <a name=\"180\">180</a>|Command Retrieval Failure| | | <a name=\"181\">181</a>|Data Filter Application Failure| | | <a name=\"182\">182</a>|Search Failed| | | <a name=\"183\">183</a>|Movements Engine Configuration Key Failure| | | <a name=\"184\">184</a>|Fx Rate Source Not Found| | | <a name=\"185\">185</a>|Accrual Source Not Found| | | <a name=\"186\">186</a>|Access Denied| | | <a name=\"187\">187</a>|Invalid Identity Token| | | <a name=\"188\">188</a>|Invalid Request Headers| | | <a name=\"189\">189</a>|Price Not Found| | | <a name=\"190\">190</a>|Invalid Sub Holding Keys Provided| | | <a name=\"191\">191</a>|Duplicate Sub Holding Keys Provided| | | <a name=\"192\">192</a>|Cut Definition Not Found| | | <a name=\"193\">193</a>|Cut Definition Invalid| | | <a name=\"194\">194</a>|Time Variant Property Deletion Date Unspecified| | | <a name=\"195\">195</a>|Perpetual Property Deletion Date Specified| | | <a name=\"196\">196</a>|Time Variant Property Upsert Date Unspecified| | | <a name=\"197\">197</a>|Perpetual Property Upsert Date Specified| | | <a name=\"200\">200</a>|Invalid Unit For Data Type| | | <a name=\"201\">201</a>|Invalid Type For Data Type| | | <a name=\"202\">202</a>|Invalid Value For Data Type| | | <a name=\"203\">203</a>|Unit Not Defined For Data Type| | | <a name=\"204\">204</a>|Units Not Supported On Data Type| | | <a name=\"205\">205</a>|Cannot Specify Units On Data Type| | | <a name=\"206\">206</a>|Unit Schema Inconsistent With Data Type| | | <a name=\"207\">207</a>|Unit Definition Not Specified| | | <a name=\"208\">208</a>|Duplicate Unit Definitions Specified| | | <a name=\"209\">209</a>|Invalid Units Definition| | | <a name=\"210\">210</a>|Invalid Instrument Identifier Unit| | | <a name=\"211\">211</a>|Holdings Adjustment Does Not Exist| | | <a name=\"212\">212</a>|Could Not Build Excel Url| | | <a name=\"213\">213</a>|Could Not Get Excel Version| | | <a name=\"214\">214</a>|Instrument By Code Not Found| | | <a name=\"215\">215</a>|Entity Schema Does Not Exist| | | <a name=\"216\">216</a>|Feature Not Supported On Portfolio Type| | | <a name=\"217\">217</a>|Quote Not Found| | | <a name=\"218\">218</a>|Invalid Quote Identifier| | | <a name=\"219\">219</a>|Invalid Metric For Data Type| | | <a name=\"220\">220</a>|Invalid Instrument Definition| | | <a name=\"221\">221</a>|Instrument Upsert Failure| | | <a name=\"222\">222</a>|Reference Portfolio Request Not Supported| | | <a name=\"223\">223</a>|Transaction Portfolio Request Not Supported| | | <a name=\"224\">224</a>|Invalid Property Value Assignment| | | <a name=\"230\">230</a>|Transaction Type Not Found| | | <a name=\"231\">231</a>|Transaction Type Duplication| | | <a name=\"232\">232</a>|Portfolio Does Not Exist At Given Date| | | <a name=\"233\">233</a>|Query Parser Failure| | | <a name=\"234\">234</a>|Duplicate Constituent| | | <a name=\"235\">235</a>|Unresolved Instrument Constituent| | | <a name=\"236\">236</a>|Unresolved Instrument In Transition| | | <a name=\"237\">237</a>|Missing Side Definitions| | | <a name=\"299\">299</a>|Invalid Recipe| | | <a name=\"300\">300</a>|Missing Recipe| | | <a name=\"301\">301</a>|Dependencies| | | <a name=\"304\">304</a>|Portfolio Preprocess Failure| | | <a name=\"310\">310</a>|Valuation Engine Failure| | | <a name=\"311\">311</a>|Task Factory Failure| | | <a name=\"312\">312</a>|Task Evaluation Failure| | | <a name=\"313\">313</a>|Task Generation Failure| | | <a name=\"314\">314</a>|Engine Configuration Failure| | | <a name=\"315\">315</a>|Model Specification Failure| | | <a name=\"320\">320</a>|Market Data Key Failure| | | <a name=\"321\">321</a>|Market Resolver Failure| | | <a name=\"322\">322</a>|Market Data Failure| | | <a name=\"330\">330</a>|Curve Failure| | | <a name=\"331\">331</a>|Volatility Surface Failure| | | <a name=\"332\">332</a>|Volatility Cube Failure| | | <a name=\"350\">350</a>|Instrument Failure| | | <a name=\"351\">351</a>|Cash Flows Failure| | | <a name=\"352\">352</a>|Reference Data Failure| | | <a name=\"360\">360</a>|Aggregation Failure| | | <a name=\"361\">361</a>|Aggregation Measure Failure| | | <a name=\"370\">370</a>|Result Retrieval Failure| | | <a name=\"371\">371</a>|Result Processing Failure| | | <a name=\"372\">372</a>|Vendor Result Processing Failure| | | <a name=\"373\">373</a>|Vendor Result Mapping Failure| | | <a name=\"374\">374</a>|Vendor Library Unauthorised| | | <a name=\"375\">375</a>|Vendor Connectivity Error| | | <a name=\"376\">376</a>|Vendor Interface Error| | | <a name=\"377\">377</a>|Vendor Pricing Failure| | | <a name=\"378\">378</a>|Vendor Translation Failure| | | <a name=\"379\">379</a>|Vendor Key Mapping Failure| | | <a name=\"380\">380</a>|Vendor Reflection Failure| | | <a name=\"390\">390</a>|Attempt To Upsert Duplicate Quotes| | | <a name=\"391\">391</a>|Corporate Action Source Does Not Exist| | | <a name=\"392\">392</a>|Corporate Action Source Already Exists| | | <a name=\"393\">393</a>|Instrument Identifier Already In Use| | | <a name=\"394\">394</a>|Properties Not Found| | | <a name=\"395\">395</a>|Batch Operation Aborted| | | <a name=\"400\">400</a>|Invalid Iso4217 Currency Code| | | <a name=\"401\">401</a>|Cannot Assign Instrument Identifier To Currency| | | <a name=\"402\">402</a>|Cannot Assign Currency Identifier To Non Currency| | | <a name=\"403\">403</a>|Currency Instrument Cannot Be Deleted| | | <a name=\"404\">404</a>|Currency Instrument Cannot Have Economic Definition| | | <a name=\"405\">405</a>|Currency Instrument Cannot Have Lookthrough Portfolio| | | <a name=\"406\">406</a>|Cannot Create Currency Instrument With Multiple Identifiers| | | <a name=\"407\">407</a>|Specified Currency Is Undefined| | | <a name=\"410\">410</a>|Index Does Not Exist| | | <a name=\"411\">411</a>|Sort Field Does Not Exist| | | <a name=\"413\">413</a>|Negative Pagination Parameters| | | <a name=\"414\">414</a>|Invalid Search Syntax| | | <a name=\"415\">415</a>|Filter Execution Timeout| | | <a name=\"420\">420</a>|Side Definition Inconsistent| | | <a name=\"450\">450</a>|Invalid Quote Access Metadata Rule| | | <a name=\"451\">451</a>|Access Metadata Not Found| | | <a name=\"452\">452</a>|Invalid Access Metadata Identifier| | | <a name=\"460\">460</a>|Standard Resource Not Found| | | <a name=\"461\">461</a>|Standard Resource Conflict| | | <a name=\"462\">462</a>|Calendar Not Found| | | <a name=\"463\">463</a>|Date In A Calendar Not Found| | | <a name=\"464\">464</a>|Invalid Date Source Data| | | <a name=\"465\">465</a>|Invalid Timezone| | | <a name=\"601\">601</a>|Person Identifier Already In Use| | | <a name=\"602\">602</a>|Person Not Found| | | <a name=\"603\">603</a>|Cannot Set Identifier| | | <a name=\"617\">617</a>|Invalid Recipe Specification In Request| | | <a name=\"618\">618</a>|Inline Recipe Deserialisation Failure| | | <a name=\"619\">619</a>|Identifier Types Not Set For Entity| | | <a name=\"620\">620</a>|Cannot Delete All Client Defined Identifiers| | | <a name=\"650\">650</a>|The Order requested was not found.| | | <a name=\"654\">654</a>|The Allocation requested was not found.| | | <a name=\"655\">655</a>|Cannot build the fx forward target with the given holdings.| | | <a name=\"656\">656</a>|Group does not contain expected entities.| | | <a name=\"667\">667</a>|Relation definition already exists| | | <a name=\"673\">673</a>|Missing entitlements for entities in Group| | | <a name=\"674\">674</a>|Next Best Action not found| | | <a name=\"676\">676</a>|Relation definition not defined| | | <a name=\"677\">677</a>|Invalid entity identifier for relation| | | <a name=\"681\">681</a>|Sorting by specified field not supported|One or more of the provided fields to order by were either invalid or not supported. | | <a name=\"682\">682</a>|Too many fields to sort by|The number of fields to sort the data by exceeds the number allowed by the endpoint | | <a name=\"684\">684</a>|Sequence Not Found| | | <a name=\"685\">685</a>|Sequence Already Exists| | | <a name=\"686\">686</a>|Non-cycling sequence has been exhausted| | | <a name=\"687\">687</a>|Legal Entity Identifier Already In Use| | | <a name=\"688\">688</a>|Legal Entity Not Found| | | <a name=\"689\">689</a>|The supplied pagination token is invalid| | | <a name=\"690\">690</a>|Property Type Is Not Supported| | | <a name=\"691\">691</a>|Multiple Tax-lots For Currency Type Is Not Supported| | # noqa: E501
The version of the OpenAPI document: 0.11.2275
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, LusidInstrument):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 221.941558 | 28,647 | 0.692179 |
8a13a931088f76e07468fa49084284d44b5cf0eb | 936 | py | Python | autolatex-master/exemplos_codigo/certificados/certificados.py | luizgui05/autolatex. | 366eb3d88b7e60c119737f958e35cce99e8775e9 | [
"MIT"
] | null | null | null | autolatex-master/exemplos_codigo/certificados/certificados.py | luizgui05/autolatex. | 366eb3d88b7e60c119737f958e35cce99e8775e9 | [
"MIT"
] | null | null | null | autolatex-master/exemplos_codigo/certificados/certificados.py | luizgui05/autolatex. | 366eb3d88b7e60c119737f958e35cce99e8775e9 | [
"MIT"
] | null | null | null | import os
import sys
import sqlite3
con = None
filename = 'certificado'
# Abrir banco de dados para ler nomes.
try:
con = sqlite3.connect('math.db')
cur = con.cursor()
cur.execute('select * from math')
data = cur.fetchall()
except sqlite3.Error, e:
print "Error %s:" % e.args[0]
sys.exit(1)
finally:
if con:
con.close()
# Gerar um certificado para cada nome.
for row in data:
f = open(filename+'.tex','r+')
old = f.readlines()
if old[0][1:4] == 'def':
offset = 1
else:
offset = 0
f.seek(0)
f.write('\\def\\name {'+row[0]+'}\n')
f.writelines(old[offset:])
f.close()
# Compilar arquivo LaTeX
try:
os.system('pdflatex '+filename+'.tex')
os.system('mv '+filename+'.pdf '+filename+'_'+row[0].replace(' ','_')+'.pdf')
#os.system('xdg-open '+filename+'.pdf &')
except OSError:
print('LaTeX not installed.')
| 20.8 | 85 | 0.569444 |
8a13fa0bd9273ba0ef6fc5a2231a5c8269835d8e | 280 | py | Python | nanoepiseg/main_list_chunks.py | snajder-r/nanoepiseg | 2fe36a82e5b899330da5db6559eb45fe12cad37c | [
"MIT"
] | null | null | null | nanoepiseg/main_list_chunks.py | snajder-r/nanoepiseg | 2fe36a82e5b899330da5db6559eb45fe12cad37c | [
"MIT"
] | null | null | null | nanoepiseg/main_list_chunks.py | snajder-r/nanoepiseg | 2fe36a82e5b899330da5db6559eb45fe12cad37c | [
"MIT"
] | null | null | null | from pathlib import Path
from meth5.meth5 import MetH5File
| 31.111111 | 64 | 0.696429 |
8a14e512e0f7f79c5bcbfd4af00b8cc29f035958 | 6,376 | py | Python | qscatv2/make_seasonal_images.py | tmilliman/sir_to_netcdf | d4641cdc5a9e92a55c0edb2dc6cd8c0e2da6f1fa | [
"MIT"
] | null | null | null | qscatv2/make_seasonal_images.py | tmilliman/sir_to_netcdf | d4641cdc5a9e92a55c0edb2dc6cd8c0e2da6f1fa | [
"MIT"
] | null | null | null | qscatv2/make_seasonal_images.py | tmilliman/sir_to_netcdf | d4641cdc5a9e92a55c0edb2dc6cd8c0e2da6f1fa | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# script to make seasonal means and stddev images of 4-day sig0
# values.
import os
import sys
import glob
import numpy as np
import sirpy2 as sp2
import argparse
from osgeo import gdal
DATADIR = "./"
NODATA_VALUE = -9999.0
Q2M = {
"JAS": list(range(7, 10)),
"OND": list(range(10, 13)),
"JFM": list(range(1, 4)),
"AMJ": list(range(4, 7)),
}
# this allows GDAL to throw Python Exceptions
gdal.UseExceptions()
if __name__ == "__main__":
# set up arguments
parser = argparse.ArgumentParser(
"script to make quarterly " + "means and stdevs of qscat dB values"
)
parser.add_argument(
"-v",
"--verbose",
help="increase output verbosity",
action="store_true",
default=False,
)
parser.add_argument(
"-q",
"--quarter",
nargs="?",
choices=("JAS", "OND", "JFM", "AMJ"),
default="JAS",
const="JAS",
help="Quarter for aggregation. Default=JAS",
)
parser.add_argument("region", help="BYU region string (e.g. SAm, NAm, Ama, etc.)")
parser.add_argument(
"year", type=int, help="Year e.g. 1999 (qscat data start in 1999)"
)
args = parser.parse_args()
verbose = args.verbose
year = args.year
quarter = args.quarter
# region list (LAEA regions only)
valid_region_list = [
"Grn",
"Ala",
"CAm",
"NAm",
"SAm",
"NAf",
"SAf",
"Sib",
"Eur",
"SAs",
"ChJ",
"Ind",
"Aus",
"Ber",
]
region = args.region
try:
region_index = valid_region_list.index(region)
except Exception:
sys.stderr.write("Region not valid.\n")
sys.stderr.write("Valid regions are:\n")
sys.stderr.write("{}\n".format(valid_region_list))
sys.exit(1)
if verbose:
print("region: {}".format(region))
print("year: {}".format(year))
print("quarter: {}".format(quarter))
# set data dir
indir = os.path.join(DATADIR, "geotiffs", region, str(year))
outdir = indir
if year == 1999:
year2 = 99
else:
year2 = "{:02d}".format(year - 2000)
monthlist = Q2M[quarter]
# make a list of files for this year
filepatt = "quev-a-{}{}-*.tif".format(region, year2)
globpatt = os.path.join(indir, filepatt)
if verbose:
print("glob pattern: {}".format(globpatt))
filelist = glob.glob(globpatt)
qlist = []
for filepath in filelist:
fn = os.path.basename(filepath)
if verbose:
print(fn)
fn_dt = sp2.fn2dt(fn, date_flag="center")
iyear = fn_dt.year
imonth = fn_dt.month
iday = fn_dt.day
if imonth in monthlist:
qlist.append(fn)
if verbose:
print("{}: {}-{}-{}".format(fn, iyear, imonth, iday))
print("{}-{}: {}".format(year, quarter, qlist))
if len(qlist) == 0:
warnmsg = "No images found for this quarter.\n"
sys.stdout.write(warnmsg)
sys.exit(0)
# loop over images for this quarter
db_quarter = []
for i, image in enumerate(qlist):
a_imgpath = os.path.join(indir, image)
try:
a_ds = gdal.Open(a_imgpath)
except Exception:
print("Unable to open {}".format(a_imgpath))
sys.exit(1)
try:
srcband = a_ds.GetRasterBand(1)
except Exception:
print("Band ({}) not found".format(1))
sys.exit(1)
a_data = srcband.ReadAsArray()
a_mask = a_data == NODATA_VALUE
# if this is the first image get projection and geotransform
if i == 0:
prj = a_ds.GetProjection()
gt = a_ds.GetGeoTransform()
ny, nx = a_data.shape
db_data = a_data
db_masked = np.ma.MaskedArray(db_data, a_mask)
# add image to db_quarter list
db_quarter.append(db_masked)
# close datasets
a_ds = None
# stack list into array and find mean and std
dbarray = np.ma.stack(db_quarter, axis=2)
dbmean = np.ma.mean(dbarray, axis=2)
dbstd = np.ma.std(dbarray, axis=2)
print(dbmean.shape)
# finally, save as a geotiff
output_format = "GTiff"
driver = gdal.GetDriverByName(output_format)
dst_filename = "{}-quev-mean-db-{}-{}.tif"
dst_filename = dst_filename.format(region, year, quarter)
dst_dir = os.path.join(DATADIR, "geotiffs", region, str(year))
dst_path = os.path.join(dst_dir, dst_filename)
if verbose:
print("Output file for sig0 means: {}".format(dst_path))
dst_ds = driver.Create(dst_path, nx, ny, 1, gdal.GDT_Float32)
dst_data = np.ma.filled(dbmean, fill_value=NODATA_VALUE)
dst_ds.GetRasterBand(1).WriteArray(dst_data)
dst_ds.GetRasterBand(1).SetNoDataValue(NODATA_VALUE)
print("gt: {}".format(gt))
dst_ds.SetGeoTransform(gt)
dst_ds.SetProjection(prj)
dst_ds = None
dbmean_min = dbmean.min()
dbmean_max = dbmean.max()
dbmean_median = np.ma.median(dbmean)
print("Quarterly ({}) Mean Stats".format(quarter))
print(" Min: {}".format(dbmean_min))
print(" Max: {}".format(dbmean_max))
print(" Median: {}".format(dbmean_median))
# repeat for standard deviation
output_format = "GTiff"
driver = gdal.GetDriverByName(output_format)
dst_filename = "{}-quev-std-db-{}-{}.tif".format(region, year, quarter)
dst_dir = os.path.join(DATADIR, "geotiffs", region, str(year))
dst_path = os.path.join(dst_dir, dst_filename)
if verbose:
print("Output file: {}".format(dst_path))
dst_ds = driver.Create(dst_path, nx, ny, 1, gdal.GDT_Float32)
dst_data = np.ma.filled(dbstd, fill_value=NODATA_VALUE)
dst_ds.GetRasterBand(1).WriteArray(dst_data)
dst_ds.GetRasterBand(1).SetNoDataValue(NODATA_VALUE)
print("gt: {}".format(gt))
dst_ds.SetGeoTransform(gt)
dst_ds.SetProjection(prj)
dst_ds = None
dbstd_min = dbstd.min()
dbstd_max = dbstd.max()
dbstd_median = np.ma.median(dbstd)
print("Quarterly ({}) Stdev Stats".format(quarter))
print(" Min: {}".format(dbstd_min))
print(" Max: {}".format(dbstd_max))
print(" Median: {}".format(dbstd_median))
| 27.601732 | 86 | 0.592848 |
8a15ab57e7398ab067062419a83d15fd9bf34d36 | 434 | py | Python | ex062.py | noahbarros/Python-Exercises | fafda898473bc984280e201ed11d8ad76cc8624a | [
"MIT"
] | 1 | 2021-07-13T21:41:00.000Z | 2021-07-13T21:41:00.000Z | ex062.py | noahbarros/Python-Exercises | fafda898473bc984280e201ed11d8ad76cc8624a | [
"MIT"
] | null | null | null | ex062.py | noahbarros/Python-Exercises | fafda898473bc984280e201ed11d8ad76cc8624a | [
"MIT"
] | null | null | null | primeiro = int(input('Digite o priemiro termo da PA: '))
razo = int(input('Digite a razo da PA: '))
termo = primeiro
cont = 1
total = 0
mais = 10
while mais != 0:
total += mais
while cont <= total:
print(f'{termo} ', end='')
termo += razo
cont += 1
print('Pausa')
mais = int(input('Quantos termos voc quer usar a mais? '))
print(f'a progresso foi finalizada com {total} termos mostrados')
| 27.125 | 66 | 0.612903 |
8a16b528f332e28d501ffe602ae57113af02e27c | 3,720 | py | Python | arxml_data_extractor/handler/object_handler.py | Brokdar/ArxmlDataExtractor | 2853112cbd4d001418b11ccb99f1db268347dfab | [
"MIT"
] | 16 | 2020-08-16T09:13:35.000Z | 2022-03-17T13:39:26.000Z | arxml_data_extractor/handler/object_handler.py | Brokdar/ArxmlDataExtractor | 2853112cbd4d001418b11ccb99f1db268347dfab | [
"MIT"
] | null | null | null | arxml_data_extractor/handler/object_handler.py | Brokdar/ArxmlDataExtractor | 2853112cbd4d001418b11ccb99f1db268347dfab | [
"MIT"
] | 2 | 2020-10-14T10:54:37.000Z | 2021-07-06T01:30:44.000Z | from lxml.etree import Element, QName
from typing import Union, List, Any
from tqdm import tqdm
import logging
from arxml_data_extractor.handler import value_handler
from arxml_data_extractor.handler.path_handler import PathHandler
from arxml_data_extractor.asr.asr_parser import AsrParser
from arxml_data_extractor.query.data_query import DataQuery
from arxml_data_extractor.query.data_object import DataObject
from arxml_data_extractor.query.data_value import DataValue
| 42.272727 | 125 | 0.595968 |
8a19876a956cc7df8eee4ce39d6fc5531c4cfc7c | 3,401 | py | Python | src/api/datamanage/pro/lifecycle/data_trace/data_set_create.py | Chromico/bk-base | be822d9bbee544a958bed4831348185a75604791 | [
"MIT"
] | 84 | 2021-06-30T06:20:23.000Z | 2022-03-22T03:05:49.000Z | src/api/datamanage/pro/lifecycle/data_trace/data_set_create.py | Chromico/bk-base | be822d9bbee544a958bed4831348185a75604791 | [
"MIT"
] | 7 | 2021-06-30T06:21:16.000Z | 2022-03-29T07:36:13.000Z | src/api/datamanage/pro/lifecycle/data_trace/data_set_create.py | Chromico/bk-base | be822d9bbee544a958bed4831348185a75604791 | [
"MIT"
] | 40 | 2021-06-30T06:21:26.000Z | 2022-03-29T12:42:26.000Z | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making BK-BASE available.
Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
BK-BASE is licensed under the MIT License.
License for BK-BASE :
--------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial
portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from copy import deepcopy
from datamanage.pro import exceptions as dm_pro_errors
from datamanage.utils.api import MetaApi
from datamanage.pro.utils.time import utc_to_local, str_to_datetime
from datamanage.pro.lifecycle.models_dict import (
DATASET_CREATE_MAPPINGS,
DATASET_CREATE_EVENT_INFO_DICT,
DataTraceShowType,
ComplexSearchBackendType,
DataTraceFinishStatus,
)
def get_dataset_create_info(dataset_id, dataset_type):
"""
:param dataset_id: id
:param dataset_type:
:return:
:rtype: list
"""
# 1)dgraph
data_set_create_info_statement = """
{
get_dataset_create_info(func: eq(%s, "%s")){created_by created_at}
}
""" % (
DATASET_CREATE_MAPPINGS[dataset_type]['data_set_pk'],
dataset_id,
)
query_result = MetaApi.complex_search(
{"backend_type": ComplexSearchBackendType.DGRAPH.value, "statement": data_set_create_info_statement}, raw=True
)
create_info_ret = query_result['data']['data']['get_dataset_create_info']
if not (isinstance(create_info_ret, list) and create_info_ret):
raise dm_pro_errors.GetDataSetCreateInfoError(message_kv={'dataset_id': dataset_id})
# 2)
create_trace_dict = deepcopy(DATASET_CREATE_EVENT_INFO_DICT)
create_trace_dict.update(
{
"sub_type": dataset_type,
"sub_type_alias": DATASET_CREATE_MAPPINGS[dataset_type]['data_set_create_alias'],
"description": DATASET_CREATE_MAPPINGS[dataset_type]['data_set_create_alias'],
"created_at": utc_to_local(create_info_ret[0]['created_at']),
"created_by": create_info_ret[0]['created_by'],
"show_type": DataTraceShowType.DISPLAY.value,
"datetime": str_to_datetime(utc_to_local(create_info_ret[0]['created_at'])),
"status": DataTraceFinishStatus.STATUS,
"status_alias": DataTraceFinishStatus.STATUS_ALIAS,
}
)
return [create_trace_dict]
| 44.168831 | 118 | 0.728021 |
8a1ac6639a0c1cc52dd72036d5f1b225c44d7b2f | 4,363 | py | Python | vectors2.py | shivam13verma/judge-embeddings | 9b861319a1240529d25c15799952e32dde2e894e | [
"MIT"
] | null | null | null | vectors2.py | shivam13verma/judge-embeddings | 9b861319a1240529d25c15799952e32dde2e894e | [
"MIT"
] | null | null | null | vectors2.py | shivam13verma/judge-embeddings | 9b861319a1240529d25c15799952e32dde2e894e | [
"MIT"
] | null | null | null | import locale
import glob
import os
import os.path
import requests
import tarfile
import sys
import re
import gensim
from gensim.models.doc2vec import TaggedDocument
from collections import namedtuple
from gensim.models import Doc2Vec
import gensim.models.doc2vec
from collections import OrderedDict
import multiprocessing
from gensim.test.test_doc2vec import ConcatenatedDoc2Vec
import pickle
reload(sys)
sys.setdefaultencoding("utf-8")
#dirname = '/scratch/ap4608/judge_data'
#locale.setlocale(locale.LC_ALL, 'C')
#
#
## Convert text to lower-case and strip punctuation/symbols from words
#def normalize_text(text):
# norm_text = text.lower()
#
# # Replace breaks with spaces
# norm_text = norm_text.replace('<br />', ' ')
#
# # Pad punctuation with spaces on both sides
# for char in ['.', '"', ',', '(', ')', '!', '?', ';', ':']:
# norm_text = norm_text.replace(char, ' ' + char + ' ')
#
# return norm_text
#
#
## Concat and normalize test/train data
#folders = os.listdir(dirname)
#alldata = ''
#
#for fol in folders:
# temp = ''
# output = fol.replace('/', '-') + '.txt'
#
# # Is there a better pattern to use?
# txt_files = glob.glob('/'.join([dirname, fol, '*.txt']))
#
# for txt in txt_files:
# with open(txt, 'r') as t:
# control_chars = [chr(0x85)]
# t_clean = t.read()
#
# t_clean = t_clean.replace('\n', ' ')
# t_clean = re.sub(r'[^\x00-\x7F]+',' ', t_clean)
#
# for c in control_chars:
# t_clean = t_clean.replace(c, ' ')
#
# temp += t_clean
#
# temp += "\n"
#
# temp_norm = normalize_text(temp)
#
# if len(temp_norm) == 1:
# continue
#
# with open('/'.join([dirname, output]), 'w') as n:
# n.write(temp_norm)
#
# alldata += temp_norm
#
#with open('/'.join([dirname, 'alldata-id.txt']), 'w') as f:
# for idx, line in enumerate(alldata.splitlines()):
# num_line = "_*{0} {1}\n".format(idx, line)
# f.write(num_line)
#
#SentimentDocument = namedtuple('SentimentDocument', 'words tags split sentiment')
#
#alldocs = [] # will hold all docs in original order
#with open(os.path.join(dirname, 'alldata-id.txt')) as alldata:
# for line_no, line in enumerate(alldata):
# tokens = gensim.utils.to_unicode(line).split()
# words = tokens[1:]
# tags = [line_no] # `tags = [tokens[0]]` would also work at extra memory cost
# split = ['train','test','extra','extra'][line_no//25000] # 25k train, 25k test, 25k extra
# sentiment = [1.0, 0.0, 1.0, 0.0, None, None, None, None][line_no//12500] # [12.5K pos, 12.5K neg]*2 then unknown
# alldocs.append(SentimentDocument(words, tags, split, sentiment))
#
#train_docs = [doc for doc in alldocs if doc.split == 'train']
#test_docs = [doc for doc in alldocs if doc.split == 'test']
#doc_list = alldocs[:] # for reshuffling per pass
#
#cores = multiprocessing.cpu_count()
#assert gensim.models.doc2vec.FAST_VERSION > -1, "this will be painfully slow otherwise"
#
#simple_models = [
# # PV-DM w/concatenation - window=5 (both sides) approximates paper's 10-word total window size
# Doc2Vec(dm=1, dm_concat=1, size=100, window=5, negative=5, hs=0, min_count=2, workers=cores),
# # PV-DBOW
# Doc2Vec(dm=0, size=100, negative=5, hs=0, min_count=2, workers=cores),
# # PV-DM w/average
# Doc2Vec(dm=1, dm_mean=1, size=100, window=10, negative=5, hs=0, min_count=2, workers=cores),
#]
#
## speed setup by sharing results of 1st model's vocabulary scan
#simple_models[0].build_vocab(alldocs) # PV-DM/concat requires one special NULL word so it serves as template
#for model in simple_models[1:]:
# model.reset_from(simple_models[0])
#
#models_by_name = OrderedDict((str(model), model) for model in simple_models)
#
#models_by_name['dbow+dmm'] = ConcatenatedDoc2Vec([simple_models[1], simple_models[2]])
#models_by_name['dbow+dmc'] = ConcatenatedDoc2Vec([simple_models[1], simple_models[0]])
#
## Create a document vector list and save it
#doc_vec_list = [x.docvecs for x in simple_models]
docvecs = pickle.load(open('docvecs.p', 'rb'))
print len(docvecs)
print len(docvecs[0])
print docvecs[0]
for i,x in enumerate(docvecs):
with open('docvecs_'+str(i)+'.txt','w') as f:
for vec in x:
f.write(vec)
f.write("\n")
# pickle.dump(models_by_name, open('model.p', 'wb'))
| 31.388489 | 121 | 0.651845 |
8a1c71c22813d34b18261a3c040c83b4a288d938 | 1,272 | py | Python | caravan_search_engine/test/test_task.py | crest-cassia/caravan | 0a8e606e31d2d36a9379bdc00fafe55cf9144da6 | [
"MIT"
] | 4 | 2017-12-27T06:04:46.000Z | 2018-04-27T04:07:49.000Z | caravan_search_engine/test/test_task.py | crest-cassia/caravan | 0a8e606e31d2d36a9379bdc00fafe55cf9144da6 | [
"MIT"
] | null | null | null | caravan_search_engine/test/test_task.py | crest-cassia/caravan | 0a8e606e31d2d36a9379bdc00fafe55cf9144da6 | [
"MIT"
] | null | null | null | import unittest
from caravan.task import Task
from caravan.tables import Tables
if __name__ == '__main__':
unittest.main()
| 30.285714 | 63 | 0.589623 |
8a1cd65b30b7bbba4f6241ea55e68759c3f56fc4 | 15,868 | py | Python | splash/render_options.py | tashidexiaoL/splashnew | 2bbb886bae8fa88c30a4460f41ca940c4b010287 | [
"BSD-3-Clause"
] | 3,612 | 2015-01-04T07:22:20.000Z | 2022-03-31T07:12:19.000Z | splash/render_options.py | tashidexiaoL/splashnew | 2bbb886bae8fa88c30a4460f41ca940c4b010287 | [
"BSD-3-Clause"
] | 983 | 2015-01-01T17:54:49.000Z | 2022-03-29T05:05:53.000Z | splash/render_options.py | tashidexiaoL/splashnew | 2bbb886bae8fa88c30a4460f41ca940c4b010287 | [
"BSD-3-Clause"
] | 570 | 2015-01-06T17:48:46.000Z | 2022-03-31T12:35:32.000Z | # -*- coding: utf-8 -*-
import os
import json
from splash import defaults
from splash.utils import to_bytes, path_join_secure
from splash.errors import BadOption
def save_args_to_cache(self, cache):
"""
Process save_args and put all values to cache.
Return a list of (name, key) pairs.
"""
save_args = self.get_save_args()
save_values = [self.data.get(name) for name in save_args]
keys = cache.add_many(save_values)
return list(zip(save_args, keys))
def validate_size_str(size_str):
"""
Validate size string in WxH format.
Can be used to validate both viewport and window size strings. Does not
special-case ``'full'`` viewport. Raises ``ValueError`` if anything goes
wrong.
:param size_str: string to validate
"""
max_width = defaults.VIEWPORT_MAX_WIDTH
max_heigth = defaults.VIEWPORT_MAX_HEIGTH
max_area = defaults.VIEWPORT_MAX_AREA
try:
w, h = map(int, size_str.split('x'))
except ValueError:
raise ValueError("Invalid viewport format: %s" % size_str)
else:
if not ((0 < w <= max_width) and (0 < h <= max_heigth) and
(w * h < max_area)):
raise ValueError("Viewport (%dx%d, area=%d) is out of range (%dx%d, area=%d)" %
(w, h, w * h, max_width, max_heigth, max_area))
| 35.578475 | 91 | 0.568188 |
8a1d8b11d101fed7641300b2c4ef25ddc8a61c8b | 362 | py | Python | syntax/func.py | sangumee/Opentutorials-Webn-Python | 9f813f8f342ea99ffee6e31f363f175fa023c489 | [
"MIT"
] | null | null | null | syntax/func.py | sangumee/Opentutorials-Webn-Python | 9f813f8f342ea99ffee6e31f363f175fa023c489 | [
"MIT"
] | null | null | null | syntax/func.py | sangumee/Opentutorials-Webn-Python | 9f813f8f342ea99ffee6e31f363f175fa023c489 | [
"MIT"
] | null | null | null | # code....
a = 1
b = 2
c = 3
s = a+b+c
r = s/3
print(r)
# code....
'''
def average():
a=1
b=2
c=3
s=a+b+c
r=s/3
print(r)
average()
'''
'''
#input
#parameter
#argument
def average(a,b,c):
s=a+b+c
r=s/3
print(r)
average(10,20,30)
'''
print(average(10, 20, 30))
| 9.05 | 26 | 0.466851 |
8a1dc389d59f49c155580d9fe0bb5e5e94a7281e | 1,718 | py | Python | tools/evolution/codingSnps_filter.py | ramezrawas/galaxy-1 | c03748dd49c060a68d07bce56eae33e0ba154414 | [
"CC-BY-3.0"
] | 1 | 2019-11-03T11:45:43.000Z | 2019-11-03T11:45:43.000Z | tools/evolution/codingSnps_filter.py | ramezrawas/galaxy-1 | c03748dd49c060a68d07bce56eae33e0ba154414 | [
"CC-BY-3.0"
] | 7 | 2016-12-07T22:19:37.000Z | 2019-01-30T15:04:26.000Z | tools/evolution/codingSnps_filter.py | ramezrawas/galaxy-1 | c03748dd49c060a68d07bce56eae33e0ba154414 | [
"CC-BY-3.0"
] | null | null | null | #!/usr/bin/env python
# runs after the job (and after the default post-filter)
from galaxy.tools.parameters import DataToolParameter
# Older py compatibility
try:
set()
except:
from sets import Set as set
| 40.904762 | 117 | 0.586147 |
8a1e3dc4bc93e35762cbfc644a38e3db21861cda | 5,290 | py | Python | qa/rpc-tests/listtransactions.py | DeftNerd/bitcoinclassic | afff0155e0dd528145818c43f259743f54966d95 | [
"MIT"
] | 8 | 2016-03-31T18:47:31.000Z | 2021-09-30T05:42:32.000Z | qa/rpc-tests/listtransactions.py | DeftNerd/bitcoinclassic | afff0155e0dd528145818c43f259743f54966d95 | [
"MIT"
] | 1 | 2017-10-06T08:55:30.000Z | 2017-10-06T08:55:30.000Z | qa/rpc-tests/listtransactions.py | DeftNerd/bitcoinclassic | afff0155e0dd528145818c43f259743f54966d95 | [
"MIT"
] | 2 | 2020-02-03T03:38:10.000Z | 2021-09-30T05:42:36.000Z | #!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Exercise the listtransactions API
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
def check_array_result(object_array, to_match, expected):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
"""
num_matched = 0
for item in object_array:
all_match = True
for key,value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
for key,value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
num_matched = num_matched+1
if num_matched == 0:
raise AssertionError("No objects matched %s"%(str(to_match)))
if __name__ == '__main__':
ListTransactionsTest().main()
| 48.53211 | 105 | 0.542722 |
8a1ef1c625b2d34cef5abbf769654ee6310e0334 | 25,352 | py | Python | salt/modules/mount.py | aletourneau/salt | d7013a2f64eb4b79592220d76274bc5dde609e08 | [
"Apache-2.0"
] | null | null | null | salt/modules/mount.py | aletourneau/salt | d7013a2f64eb4b79592220d76274bc5dde609e08 | [
"Apache-2.0"
] | null | null | null | salt/modules/mount.py | aletourneau/salt | d7013a2f64eb4b79592220d76274bc5dde609e08 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
'''
Salt module to manage unix mounts and the fstab file
'''
from __future__ import absolute_import
# Import python libs
import os
import re
import logging
# Import salt libs
import salt.utils
from salt._compat import string_types
from salt.utils import which as _which
from salt.exceptions import CommandNotFoundError, CommandExecutionError
# Set up logger
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'mount'
def __virtual__():
'''
Only load on POSIX-like systems
'''
# Disable on Windows, a specific file module exists:
if salt.utils.is_windows():
return False
return True
def _active_mounts(ret):
'''
List active mounts on Linux systems
'''
_list = _list_mounts()
filename = '/proc/self/mounts'
if not os.access(filename, os.R_OK):
msg = 'File not readable {0}'
raise CommandExecutionError(msg.format(filename))
with salt.utils.fopen(filename) as ifile:
for line in ifile:
comps = line.split()
ret[comps[1]] = {'device': comps[0],
'alt_device': _list.get(comps[1], None),
'fstype': comps[2],
'opts': comps[3].split(',')}
return ret
def _active_mounts_freebsd(ret):
'''
List active mounts on FreeBSD systems
'''
for line in __salt__['cmd.run_stdout']('mount -p').split('\n'):
comps = re.sub(r"\s+", " ", line).split()
ret[comps[1]] = {'device': comps[0],
'fstype': comps[2],
'opts': comps[3].split(',')}
return ret
def _active_mounts_solaris(ret):
'''
List active mounts on Solaris systems
'''
for line in __salt__['cmd.run_stdout']('mount -v').split('\n'):
comps = re.sub(r"\s+", " ", line).split()
ret[comps[2]] = {'device': comps[0],
'fstype': comps[4],
'opts': comps[5].split('/')}
return ret
def _active_mounts_openbsd(ret):
'''
List active mounts on OpenBSD systems
'''
for line in __salt__['cmd.run_stdout']('mount -v').split('\n'):
comps = re.sub(r"\s+", " ", line).split()
nod = __salt__['cmd.run_stdout']('ls -l {0}'.format(comps[0]))
nod = ' '.join(nod.split()).split(" ")
parens = re.findall(r'\((.*?)\)', line, re.DOTALL)
ret[comps[3]] = {'device': comps[0],
'fstype': comps[5],
'opts': parens[1].split(", "),
'major': str(nod[4].strip(",")),
'minor': str(nod[5]),
'device_uuid': parens[0]}
return ret
def _active_mounts_darwin(ret):
'''
List active mounts on Mac OS systems
'''
for line in __salt__['cmd.run_stdout']('mount').split('\n'):
comps = re.sub(r"\s+", " ", line).split()
parens = re.findall(r'\((.*?)\)', line, re.DOTALL)[0].split(", ")
ret[comps[2]] = {'device': comps[0],
'fstype': parens[0],
'opts': parens[1:]}
return ret
def active(extended=False):
'''
List the active mounts.
CLI Example:
.. code-block:: bash
salt '*' mount.active
'''
ret = {}
if __grains__['os'] == 'FreeBSD':
_active_mounts_freebsd(ret)
elif __grains__['os'] == 'Solaris':
_active_mounts_solaris(ret)
elif __grains__['os'] == 'OpenBSD':
_active_mounts_openbsd(ret)
elif __grains__['os'] in ['MacOS', 'Darwin']:
_active_mounts_darwin(ret)
else:
if extended:
try:
_active_mountinfo(ret)
except CommandExecutionError:
_active_mounts(ret)
else:
_active_mounts(ret)
return ret
def fstab(config='/etc/fstab'):
'''
List the contents of the fstab
CLI Example:
.. code-block:: bash
salt '*' mount.fstab
'''
ret = {}
if not os.path.isfile(config):
return ret
with salt.utils.fopen(config) as ifile:
for line in ifile:
if line.startswith('#'):
# Commented
continue
if not line.strip():
# Blank line
continue
comps = line.split()
if len(comps) != 6:
# Invalid entry
continue
ret[comps[1]] = {'device': comps[0],
'fstype': comps[2],
'opts': comps[3].split(','),
'dump': comps[4],
'pass': comps[5]}
return ret
def rm_fstab(name, device, config='/etc/fstab'):
'''
Remove the mount point from the fstab
CLI Example:
.. code-block:: bash
salt '*' mount.rm_fstab /mnt/foo
'''
contents = fstab(config)
if name not in contents:
return True
# The entry is present, get rid of it
lines = []
try:
with salt.utils.fopen(config, 'r') as ifile:
for line in ifile:
if line.startswith('#'):
# Commented
lines.append(line)
continue
if not line.strip():
# Blank line
lines.append(line)
continue
comps = line.split()
if len(comps) != 6:
# Invalid entry
lines.append(line)
continue
comps = line.split()
if device:
if comps[1] == name and comps[0] == device:
continue
else:
if comps[1] == name:
continue
lines.append(line)
except (IOError, OSError) as exc:
msg = "Couldn't read from {0}: {1}"
raise CommandExecutionError(msg.format(config, str(exc)))
try:
with salt.utils.fopen(config, 'w+') as ofile:
ofile.writelines(lines)
except (IOError, OSError) as exc:
msg = "Couldn't write to {0}: {1}"
raise CommandExecutionError(msg.format(config, str(exc)))
return True
def set_fstab(
name,
device,
fstype,
opts='defaults',
dump=0,
pass_num=0,
config='/etc/fstab',
test=False,
**kwargs):
'''
Verify that this mount is represented in the fstab, change the mount
to match the data passed, or add the mount if it is not present.
CLI Example:
.. code-block:: bash
salt '*' mount.set_fstab /mnt/foo /dev/sdz1 ext4
'''
# Fix the opts type if it is a list
if isinstance(opts, list):
opts = ','.join(opts)
lines = []
change = False
present = False
if not os.path.isfile(config):
raise CommandExecutionError('Bad config file "{0}"'.format(config))
try:
with salt.utils.fopen(config, 'r') as ifile:
for line in ifile:
if line.startswith('#'):
# Commented
lines.append(line)
continue
if not line.strip():
# Blank line
lines.append(line)
continue
comps = line.split()
if len(comps) != 6:
# Invalid entry
lines.append(line)
continue
if comps[1] == name or comps[0] == device:
# check to see if there are changes
# and fix them if there are any
present = True
if comps[0] != device:
change = True
comps[0] = device
if comps[1] != name:
change = True
comps[1] = name
if comps[2] != fstype:
change = True
comps[2] = fstype
if comps[3] != opts:
change = True
comps[3] = opts
if comps[4] != str(dump):
change = True
comps[4] = str(dump)
if comps[5] != str(pass_num):
change = True
comps[5] = str(pass_num)
if change:
log.debug(
'fstab entry for mount point {0} needs to be '
'updated'.format(name)
)
newline = (
'{0}\t\t{1}\t{2}\t{3}\t{4} {5}\n'.format(
device, name, fstype, opts, dump, pass_num
)
)
lines.append(newline)
else:
lines.append(line)
except (IOError, OSError) as exc:
msg = 'Couldn\'t read from {0}: {1}'
raise CommandExecutionError(msg.format(config, str(exc)))
if change:
if not salt.utils.test_mode(test=test, **kwargs):
try:
with salt.utils.fopen(config, 'w+') as ofile:
# The line was changed, commit it!
ofile.writelines(lines)
except (IOError, OSError):
msg = 'File not writable {0}'
raise CommandExecutionError(msg.format(config))
return 'change'
if not change:
if present:
# The right entry is already here
return 'present'
else:
if not salt.utils.test_mode(test=test, **kwargs):
# The entry is new, add it to the end of the fstab
newline = '{0}\t\t{1}\t{2}\t{3}\t{4} {5}\n'.format(device,
name,
fstype,
opts,
dump,
pass_num)
lines.append(newline)
try:
with salt.utils.fopen(config, 'w+') as ofile:
# The line was changed, commit it!
ofile.writelines(lines)
except (IOError, OSError):
raise CommandExecutionError(
'File not writable {0}'.format(
config
)
)
return 'new'
def rm_automaster(name, device, config='/etc/auto_salt'):
'''
Remove the mount point from the auto_master
CLI Example:
.. code-block:: bash
salt '*' mount.rm_automaster /mnt/foo
'''
contents = automaster(config)
if name not in contents:
return True
# The entry is present, get rid of it
lines = []
try:
with salt.utils.fopen(config, 'r') as ifile:
for line in ifile:
if line.startswith('#'):
# Commented
lines.append(line)
continue
if not line.strip():
# Blank line
lines.append(line)
continue
comps = line.split()
if len(comps) != 3:
# Invalid entry
lines.append(line)
continue
comps = line.split()
prefix = "/.."
name_chk = comps[0].replace(prefix, "")
device_fmt = comps[2].split(":")
if device:
if name_chk == name and device_fmt[1] == device:
continue
else:
if name_chk == name:
continue
lines.append(line)
except (IOError, OSError) as exc:
msg = "Couldn't read from {0}: {1}"
raise CommandExecutionError(msg.format(config, str(exc)))
try:
with salt.utils.fopen(config, 'w+') as ofile:
ofile.writelines(lines)
except (IOError, OSError) as exc:
msg = "Couldn't write to {0}: {1}"
raise CommandExecutionError(msg.format(config, str(exc)))
# Update automount
__salt__['cmd.run']('automount -cv')
return True
def set_automaster(
name,
device,
fstype,
opts='',
config='/etc/auto_salt',
test=False,
**kwargs):
'''
Verify that this mount is represented in the auto_salt, change the mount
to match the data passed, or add the mount if it is not present.
CLI Example:
.. code-block:: bash
salt '*' mount.set_automaster /mnt/foo /dev/sdz1 ext4
'''
# Fix the opts type if it is a list
if isinstance(opts, list):
opts = ','.join(opts)
lines = []
change = False
present = False
automaster_file = "/etc/auto_master"
if not os.path.isfile(config):
__salt__['file.touch'](config)
__salt__['file.append'](automaster_file, "/-\t\t\t{0}".format(config))
name = "/..{0}".format(name)
device_fmt = "{0}:{1}".format(fstype, device)
type_opts = "-fstype={0},{1}".format(fstype, opts)
if fstype == 'smbfs':
device_fmt = device_fmt.replace(fstype, "")
try:
with salt.utils.fopen(config, 'r') as ifile:
for line in ifile:
if line.startswith('#'):
# Commented
lines.append(line)
continue
if not line.strip():
# Blank line
lines.append(line)
continue
comps = line.split()
if len(comps) != 3:
# Invalid entry
lines.append(line)
continue
if comps[0] == name or comps[2] == device_fmt:
# check to see if there are changes
# and fix them if there are any
present = True
if comps[0] != name:
change = True
comps[0] = name
if comps[1] != type_opts:
change = True
comps[1] = type_opts
if comps[2] != device_fmt:
change = True
comps[2] = device_fmt
if change:
log.debug(
'auto_master entry for mount point {0} needs to be '
'updated'.format(name)
)
newline = (
'{0}\t{1}\t{2}\n'.format(
name, type_opts, device_fmt)
)
lines.append(newline)
else:
lines.append(line)
except (IOError, OSError) as exc:
msg = 'Couldn\'t read from {0}: {1}'
raise CommandExecutionError(msg.format(config, str(exc)))
if change:
if not salt.utils.test_mode(test=test, **kwargs):
try:
with salt.utils.fopen(config, 'w+') as ofile:
# The line was changed, commit it!
ofile.writelines(lines)
except (IOError, OSError):
msg = 'File not writable {0}'
raise CommandExecutionError(msg.format(config))
return 'change'
if not change:
if present:
# The right entry is already here
return 'present'
else:
if not salt.utils.test_mode(test=test, **kwargs):
# The entry is new, add it to the end of the fstab
newline = (
'{0}\t{1}\t{2}\n'.format(
name, type_opts, device_fmt)
)
lines.append(newline)
try:
with salt.utils.fopen(config, 'w+') as ofile:
# The line was changed, commit it!
ofile.writelines(lines)
except (IOError, OSError):
raise CommandExecutionError(
'File not writable {0}'.format(
config
)
)
return 'new'
def automaster(config='/etc/auto_salt'):
'''
List the contents of the fstab
CLI Example:
.. code-block:: bash
salt '*' mount.fstab
'''
ret = {}
if not os.path.isfile(config):
return ret
with salt.utils.fopen(config) as ifile:
for line in ifile:
if line.startswith('#'):
# Commented
continue
if not line.strip():
# Blank line
continue
comps = line.split()
if len(comps) != 3:
# Invalid entry
continue
prefix = "/.."
name = comps[0].replace(prefix, "")
device_fmt = comps[2].split(":")
opts = comps[1].split(',')
ret[name] = {'device': device_fmt[1],
'fstype': opts[0],
'opts': opts[1:]}
return ret
def mount(name, device, mkmnt=False, fstype='', opts='defaults', user=None):
'''
Mount a device
CLI Example:
.. code-block:: bash
salt '*' mount.mount /mnt/foo /dev/sdz1 True
'''
# Darwin doesn't expect defaults when mounting without other options
if 'defaults' in opts and __grains__['os'] in ['MacOS', 'Darwin']:
opts = None
if isinstance(opts, string_types):
opts = opts.split(',')
if not os.path.exists(name) and mkmnt:
__salt__['file.mkdir'](name=name, user=user)
args = ''
if opts is not None:
lopts = ','.join(opts)
args = '-o {0}'.format(lopts)
if fstype:
args += ' -t {0}'.format(fstype)
cmd = 'mount {0} {1} {2} '.format(args, device, name)
out = __salt__['cmd.run_all'](cmd, runas=user)
if out['retcode']:
return out['stderr']
return True
def remount(name, device, mkmnt=False, fstype='', opts='defaults', user=None):
'''
Attempt to remount a device, if the device is not already mounted, mount
is called
CLI Example:
.. code-block:: bash
salt '*' mount.remount /mnt/foo /dev/sdz1 True
'''
force_mount = False
if __grains__['os'] in ['MacOS', 'Darwin']:
if opts == 'defaults':
opts = 'noowners'
if fstype == 'smbfs':
force_mount = True
if isinstance(opts, string_types):
opts = opts.split(',')
mnts = active()
if name in mnts:
# The mount point is mounted, attempt to remount it with the given data
if 'remount' not in opts and __grains__['os'] not in ['OpenBSD', 'MacOS', 'Darwin']:
opts.append('remount')
if force_mount:
# We need to force the mount but first we should unmount
umount(name, device, user=user)
lopts = ','.join(opts)
args = '-o {0}'.format(lopts)
if fstype:
args += ' -t {0}'.format(fstype)
if __grains__['os'] not in ['OpenBSD', 'MacOS', 'Darwin'] or force_mount:
cmd = 'mount {0} {1} {2} '.format(args, device, name)
else:
cmd = 'mount -u {0} {1} {2} '.format(args, device, name)
out = __salt__['cmd.run_all'](cmd, runas=user)
if out['retcode']:
return out['stderr']
return True
# Mount a filesystem that isn't already
return mount(name, device, mkmnt, fstype, opts, user=user)
def umount(name, device=None, user=None):
'''
Attempt to unmount a device by specifying the directory it is mounted on
CLI Example:
.. code-block:: bash
salt '*' mount.umount /mnt/foo
.. versionadded:: Lithium
salt '*' mount.umount /mnt/foo /dev/xvdc1
'''
mnts = active()
if name not in mnts:
return "{0} does not have anything mounted".format(name)
if not device:
cmd = 'umount {0}'.format(name)
else:
cmd = 'umount {0}'.format(device)
out = __salt__['cmd.run_all'](cmd, runas=user)
if out['retcode']:
return out['stderr']
return True
def is_fuse_exec(cmd):
'''
Returns true if the command passed is a fuse mountable application.
CLI Example:
.. code-block:: bash
salt '*' mount.is_fuse_exec sshfs
'''
cmd_path = _which(cmd)
# No point in running ldd on a command that doesn't exist
if not cmd_path:
return False
elif not _which('ldd'):
raise CommandNotFoundError('ldd')
out = __salt__['cmd.run']('ldd {0}'.format(cmd_path))
return 'libfuse' in out
def swaps():
'''
Return a dict containing information on active swap
CLI Example:
.. code-block:: bash
salt '*' mount.swaps
'''
ret = {}
if __grains__['os'] != 'OpenBSD':
with salt.utils.fopen('/proc/swaps') as fp_:
for line in fp_:
if line.startswith('Filename'):
continue
comps = line.split()
ret[comps[0]] = {'type': comps[1],
'size': comps[2],
'used': comps[3],
'priority': comps[4]}
else:
for line in __salt__['cmd.run_stdout']('swapctl -kl').splitlines():
if line.startswith(('Device', 'Total')):
continue
swap_type = "file"
comps = line.split()
if comps[0].startswith('/dev/'):
swap_type = "partition"
ret[comps[0]] = {'type': swap_type,
'size': comps[1],
'used': comps[2],
'priority': comps[5]}
return ret
def swapon(name, priority=None):
'''
Activate a swap disk
CLI Example:
.. code-block:: bash
salt '*' mount.swapon /root/swapfile
'''
ret = {}
on_ = swaps()
if name in on_:
ret['stats'] = on_[name]
ret['new'] = False
return ret
cmd = 'swapon {0}'.format(name)
if priority:
cmd += ' -p {0}'.format(priority)
__salt__['cmd.run'](cmd)
on_ = swaps()
if name in on_:
ret['stats'] = on_[name]
ret['new'] = True
return ret
return ret
def swapoff(name):
'''
Deactivate a named swap mount
CLI Example:
.. code-block:: bash
salt '*' mount.swapoff /root/swapfile
'''
on_ = swaps()
if name in on_:
if __grains__['os'] != 'OpenBSD':
__salt__['cmd.run']('swapoff {0}'.format(name))
else:
__salt__['cmd.run']('swapctl -d {0}'.format(name))
on_ = swaps()
if name in on_:
return False
return True
return None
def is_mounted(name):
'''
.. versionadded:: 2014.7.0
Provide information if the path is mounted
CLI Example:
.. code-block:: bash
salt '*' mount.is_mounted /mnt/share
'''
active_ = active()
if name in active_:
return True
else:
return False
| 30.109264 | 92 | 0.473651 |
8a1f6ceee24cfa74cb693e71048a38117f2ad54b | 907 | py | Python | base/admin.py | ExpertOfNone/expert_of_none | 9ff4e4279a570712766546122c014c754f753485 | [
"MIT"
] | null | null | null | base/admin.py | ExpertOfNone/expert_of_none | 9ff4e4279a570712766546122c014c754f753485 | [
"MIT"
] | null | null | null | base/admin.py | ExpertOfNone/expert_of_none | 9ff4e4279a570712766546122c014c754f753485 | [
"MIT"
] | null | null | null | from django.contrib import admin
from base.models import Topic, Photo
admin.site.register(Topic, TopicAdmin)
admin.site.register(Photo, PhotoAdmin)
| 21.093023 | 96 | 0.675854 |
8a1ff54283494d2484e0c67f1c0d0ff7dcc46387 | 1,165 | py | Python | met/metadataparser/models/entity_type.py | z1digitalstudio/met | 7840e7520bb4c3cb0328d5988468eefe6639f950 | [
"BSD-2-Clause"
] | 11 | 2016-06-30T13:20:39.000Z | 2021-01-14T20:53:15.000Z | met/metadataparser/models/entity_type.py | z1digitalstudio/met | 7840e7520bb4c3cb0328d5988468eefe6639f950 | [
"BSD-2-Clause"
] | 50 | 2016-02-03T14:49:06.000Z | 2022-02-24T01:35:50.000Z | met/metadataparser/models/entity_type.py | z1digitalstudio/met | 7840e7520bb4c3cb0328d5988468eefe6639f950 | [
"BSD-2-Clause"
] | 9 | 2016-04-22T19:24:36.000Z | 2022-01-11T10:30:54.000Z | #################################################################
# MET v2 Metadate Explorer Tool
#
# This Software is Open Source. See License: https://github.com/TERENA/met/blob/master/LICENSE.md
# Copyright (c) 2012, TERENA All rights reserved.
#
# This Software is based on MET v1 developed for TERENA by Yaco Sistemas, http://www.yaco.es/
# MET v2 was developed for TERENA by Tamim Ziai, DAASI International GmbH, http://www.daasi.de
# Current version of MET has been revised for performance improvements by Andrea Biancini,
# Consortium GARR, http://www.garr.it
##########################################################################
from django.db import models
from django.utils.translation import ugettext_lazy as _
| 38.833333 | 97 | 0.614592 |
8a2004bf04417c6b520430e6ac9ec351a3c37f83 | 9,312 | py | Python | wxpy/bot.py | daimajia/wxpy | 2b56fb67b9ccb072538fd778a27a8fef8d9c93e6 | [
"MIT"
] | 34 | 2017-03-01T06:32:04.000Z | 2021-11-16T12:48:46.000Z | wxpy/bot.py | daimajia/wxpy | 2b56fb67b9ccb072538fd778a27a8fef8d9c93e6 | [
"MIT"
] | null | null | null | wxpy/bot.py | daimajia/wxpy | 2b56fb67b9ccb072538fd778a27a8fef8d9c93e6 | [
"MIT"
] | 17 | 2017-03-01T08:41:22.000Z | 2021-09-16T06:25:43.000Z | import traceback
from pprint import pformat
from threading import Thread
import itchat
import logging
from wxpy.chat import Chat
from wxpy.chats import Chats
from wxpy.friend import Friend
from wxpy.group import Group
from wxpy.message import MessageConfigs, Messages, Message, MessageConfig
from wxpy.mp import MP
from wxpy.response import ResponseError
from wxpy.user import User
from wxpy.utils.constants import SYSTEM
from wxpy.utils.tools import handle_response, get_user_name, wrap_user_name, ensure_list
logger = logging.getLogger('wxpy')
# chats
def except_self(self, chats_or_dicts):
"""
:param chats_or_dicts:
:return:
"""
return list(filter(lambda x: get_user_name(x) != self.self.user_name, chats_or_dicts))
def chats(self, update=False):
"""
:param update:
:return:
"""
return Chats(self.friends(update) + self.groups(update) + self.mps(update), self)
def friends(self, update=False):
"""
:param update:
:return:
"""
ret = do()
ret.source = self
return ret
def search(self, name=None, **attributes):
"""
:param name: ()
:param attributes: sex(), province(), city() province=''
:return:
"""
return self.chats().search(name, **attributes)
# add / create
def create_group(self, users, topic=None):
"""
:param users:
:param topic:
:return:
"""
ret = request()
user_name = ret.get('ChatRoomName')
if user_name:
return Group(self.core.update_chatroom(userName=user_name))
else:
raise ResponseError('Failed to create group:\n{}'.format(pformat(ret)))
# messages
def _process_message(self, msg):
"""
"""
if not self.alive:
return
func, run_async = self.message_configs.get_func(msg)
if not func:
return
if run_async:
Thread(target=process).start()
else:
process()
def register(
self, chats=None, msg_types=None,
except_self=True, run_async=True, enabled=True
):
"""
:param chats:
:param msg_types: (SYSTEM )
:param except_self:
:param run_async:
:param enabled:
"""
return register
def start(self, block=True):
"""
:param block: False
"""
if block:
listen()
else:
t = Thread(target=listen, daemon=True)
t.start()
| 27.22807 | 94 | 0.560997 |
8a2014bc56418a4e4967160efe3f9656c573b77f | 1,432 | py | Python | glue/__init__.py | HPLegion/glue | 1843787ccb4de852dfe103ff58473da13faccf5f | [
"BSD-3-Clause"
] | 550 | 2015-01-08T13:51:06.000Z | 2022-03-31T11:54:47.000Z | glue/__init__.py | HPLegion/glue | 1843787ccb4de852dfe103ff58473da13faccf5f | [
"BSD-3-Clause"
] | 1,362 | 2015-01-03T19:15:52.000Z | 2022-03-30T13:23:11.000Z | glue/__init__.py | HPLegion/glue | 1843787ccb4de852dfe103ff58473da13faccf5f | [
"BSD-3-Clause"
] | 142 | 2015-01-08T13:08:00.000Z | 2022-03-18T13:25:57.000Z | # Set up configuration variables
__all__ = ['custom_viewer', 'qglue', 'test']
import os
import sys
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution('glue-core').version
except DistributionNotFound:
__version__ = 'undefined'
from ._mpl_backend import MatplotlibBackendSetter
sys.meta_path.append(MatplotlibBackendSetter())
from glue.viewers.custom.helper import custom_viewer
# Load user's configuration file
from .config import load_configuration
env = load_configuration()
from .qglue import qglue
from .main import load_plugins # noqa
from glue._settings_helpers import load_settings
load_settings()
# In PyQt 5.5+, PyQt overrides the default exception catching and fatally
# crashes the Qt application without printing out any details about the error.
# Below we revert the exception hook to the original Python one. Note that we
# can't just do sys.excepthook = sys.__excepthook__ otherwise PyQt will detect
# the default excepthook is in place and override it.
sys.excepthook = handle_exception
| 26.036364 | 78 | 0.775838 |
8a2036147565ecfe3e374843c7669120715a456c | 93 | py | Python | run.py | pran01/AlgoVision | 40e85f3c55266f43ee103dfa0852a63af306a8d4 | [
"MIT"
] | 33 | 2020-10-05T01:04:55.000Z | 2021-06-24T01:52:31.000Z | run.py | learning-zones/AlgoVision | 9261e00ecb2540d8bb950d47d670bb6b2c69db0f | [
"MIT"
] | 14 | 2020-10-07T03:15:12.000Z | 2021-01-15T11:53:29.000Z | run.py | learning-zones/AlgoVision | 9261e00ecb2540d8bb950d47d670bb6b2c69db0f | [
"MIT"
] | 9 | 2020-10-05T07:16:45.000Z | 2021-03-01T15:44:31.000Z | from algovision import app
if(__name__=="__main__"):
app.run(debug=True,host='0.0.0.0')
| 18.6 | 38 | 0.688172 |
8a206c0ba5cec93f4c2890bee22ea35305190260 | 1,477 | py | Python | readthedocs/settings/proxito/base.py | rffontenelle/readthedocs.org | a7a9072215551156b9ddc22280cc085944eaa4b0 | [
"MIT"
] | null | null | null | readthedocs/settings/proxito/base.py | rffontenelle/readthedocs.org | a7a9072215551156b9ddc22280cc085944eaa4b0 | [
"MIT"
] | null | null | null | readthedocs/settings/proxito/base.py | rffontenelle/readthedocs.org | a7a9072215551156b9ddc22280cc085944eaa4b0 | [
"MIT"
] | null | null | null | """
Base settings for Proxito
Some of these settings will eventually be backported into the main settings file,
but currently we have them to be able to run the site with the old middleware for
a staged rollout of the proxito code.
"""
| 31.425532 | 81 | 0.65606 |
8a20872ac762ad5db9d06e05df401ef72a6b24c6 | 69,998 | py | Python | model_selection/tests/test_search.py | jessica-tu/jupyter | 917e02bc29e0fa06bd8adb25fe5388ac381ec829 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | model_selection/tests/test_search.py | jessica-tu/jupyter | 917e02bc29e0fa06bd8adb25fe5388ac381ec829 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | model_selection/tests/test_search.py | jessica-tu/jupyter | 917e02bc29e0fa06bd8adb25fe5388ac381ec829 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | """Test the search module"""
from collections.abc import Iterable, Sized
from io import StringIO
from itertools import chain, product
from functools import partial
import pickle
import sys
from types import GeneratorType
import re
import numpy as np
import scipy.sparse as sp
import pytest
from sklearn.utils.fixes import sp_version
from sklearn.utils._testing import assert_raises
from sklearn.utils._testing import assert_warns
from sklearn.utils._testing import assert_warns_message
from sklearn.utils._testing import assert_raise_message
from sklearn.utils._testing import assert_array_equal
from sklearn.utils._testing import assert_array_almost_equal
from sklearn.utils._testing import assert_allclose
from sklearn.utils._testing import assert_almost_equal
from sklearn.utils._testing import ignore_warnings
from sklearn.utils._mocking import CheckingClassifier, MockDataFrame
from scipy.stats import bernoulli, expon, uniform
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.base import clone
from sklearn.exceptions import NotFittedError
from sklearn.datasets import make_classification
from sklearn.datasets import make_blobs
from sklearn.datasets import make_multilabel_classification
from sklearn.model_selection import fit_grid_point
from sklearn.model_selection import train_test_split
from sklearn.model_selection import KFold
from sklearn.model_selection import StratifiedKFold
from sklearn.model_selection import StratifiedShuffleSplit
from sklearn.model_selection import LeaveOneGroupOut
from sklearn.model_selection import LeavePGroupsOut
from sklearn.model_selection import GroupKFold
from sklearn.model_selection import GroupShuffleSplit
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import RandomizedSearchCV
from sklearn.model_selection import ParameterGrid
from sklearn.model_selection import ParameterSampler
from sklearn.model_selection._search import BaseSearchCV
from sklearn.model_selection._validation import FitFailedWarning
from sklearn.svm import LinearSVC, SVC
from sklearn.tree import DecisionTreeRegressor
from sklearn.tree import DecisionTreeClassifier
from sklearn.cluster import KMeans
from sklearn.neighbors import KernelDensity
from sklearn.neighbors import KNeighborsClassifier
from sklearn.metrics import f1_score
from sklearn.metrics import recall_score
from sklearn.metrics import accuracy_score
from sklearn.metrics import make_scorer
from sklearn.metrics import roc_auc_score
from sklearn.metrics.pairwise import euclidean_distances
from sklearn.impute import SimpleImputer
from sklearn.pipeline import Pipeline
from sklearn.linear_model import Ridge, SGDClassifier, LinearRegression
from sklearn.experimental import enable_hist_gradient_boosting # noqa
from sklearn.ensemble import HistGradientBoostingClassifier
from sklearn.model_selection.tests.common import OneTimeSplitter
# Neither of the following two estimators inherit from BaseEstimator,
# to test hyperparameter search on user-defined classifiers.
X = np.array([[-1, -1], [-2, -1], [1, 1], [2, 1]])
y = np.array([1, 1, 2, 2])
def test_grid_search_score_method():
X, y = make_classification(n_samples=100, n_classes=2, flip_y=.2,
random_state=0)
clf = LinearSVC(random_state=0)
grid = {'C': [.1]}
search_no_scoring = GridSearchCV(clf, grid, scoring=None).fit(X, y)
search_accuracy = GridSearchCV(clf, grid, scoring='accuracy').fit(X, y)
search_no_score_method_auc = GridSearchCV(LinearSVCNoScore(), grid,
scoring='roc_auc'
).fit(X, y)
search_auc = GridSearchCV(clf, grid, scoring='roc_auc').fit(X, y)
# Check warning only occurs in situation where behavior changed:
# estimator requires score method to compete with scoring parameter
score_no_scoring = search_no_scoring.score(X, y)
score_accuracy = search_accuracy.score(X, y)
score_no_score_auc = search_no_score_method_auc.score(X, y)
score_auc = search_auc.score(X, y)
# ensure the test is sane
assert score_auc < 1.0
assert score_accuracy < 1.0
assert score_auc != score_accuracy
assert_almost_equal(score_accuracy, score_no_scoring)
assert_almost_equal(score_auc, score_no_score_auc)
def test_grid_search_groups():
# Check if ValueError (when groups is None) propagates to GridSearchCV
# And also check if groups is correctly passed to the cv object
rng = np.random.RandomState(0)
X, y = make_classification(n_samples=15, n_classes=2, random_state=0)
groups = rng.randint(0, 3, 15)
clf = LinearSVC(random_state=0)
grid = {'C': [1]}
group_cvs = [LeaveOneGroupOut(), LeavePGroupsOut(2),
GroupKFold(n_splits=3), GroupShuffleSplit()]
for cv in group_cvs:
gs = GridSearchCV(clf, grid, cv=cv)
assert_raise_message(ValueError,
"The 'groups' parameter should not be None.",
gs.fit, X, y)
gs.fit(X, y, groups=groups)
non_group_cvs = [StratifiedKFold(), StratifiedShuffleSplit()]
for cv in non_group_cvs:
gs = GridSearchCV(clf, grid, cv=cv)
# Should not raise an error
gs.fit(X, y)
def test_refit_callable():
"""
Test refit=callable, which adds flexibility in identifying the
"best" estimator.
"""
def refit_callable(cv_results):
"""
A dummy function tests `refit=callable` interface.
Return the index of a model that has the least
`mean_test_score`.
"""
# Fit a dummy clf with `refit=True` to get a list of keys in
# clf.cv_results_.
X, y = make_classification(n_samples=100, n_features=4,
random_state=42)
clf = GridSearchCV(LinearSVC(random_state=42), {'C': [0.01, 0.1, 1]},
scoring='precision', refit=True)
clf.fit(X, y)
# Ensure that `best_index_ != 0` for this dummy clf
assert clf.best_index_ != 0
# Assert every key matches those in `cv_results`
for key in clf.cv_results_.keys():
assert key in cv_results
return cv_results['mean_test_score'].argmin()
X, y = make_classification(n_samples=100, n_features=4,
random_state=42)
clf = GridSearchCV(LinearSVC(random_state=42), {'C': [0.01, 0.1, 1]},
scoring='precision', refit=refit_callable)
clf.fit(X, y)
assert clf.best_index_ == 0
# Ensure `best_score_` is disabled when using `refit=callable`
assert not hasattr(clf, 'best_score_')
def test_refit_callable_invalid_type():
"""
Test implementation catches the errors when 'best_index_' returns an
invalid result.
"""
def refit_callable_invalid_type(cv_results):
"""
A dummy function tests when returned 'best_index_' is not integer.
"""
return None
X, y = make_classification(n_samples=100, n_features=4,
random_state=42)
clf = GridSearchCV(LinearSVC(random_state=42), {'C': [0.1, 1]},
scoring='precision', refit=refit_callable_invalid_type)
with pytest.raises(TypeError,
match='best_index_ returned is not an integer'):
clf.fit(X, y)
def test_refit_callable_multi_metric():
"""
Test refit=callable in multiple metric evaluation setting
"""
def refit_callable(cv_results):
"""
A dummy function tests `refit=callable` interface.
Return the index of a model that has the least
`mean_test_prec`.
"""
assert 'mean_test_prec' in cv_results
return cv_results['mean_test_prec'].argmin()
X, y = make_classification(n_samples=100, n_features=4,
random_state=42)
scoring = {'Accuracy': make_scorer(accuracy_score), 'prec': 'precision'}
clf = GridSearchCV(LinearSVC(random_state=42), {'C': [0.01, 0.1, 1]},
scoring=scoring, refit=refit_callable)
clf.fit(X, y)
assert clf.best_index_ == 0
# Ensure `best_score_` is disabled when using `refit=callable`
assert not hasattr(clf, 'best_score_')
def compare_cv_results_multimetric_with_single(
search_multi, search_acc, search_rec):
"""Compare multi-metric cv_results with the ensemble of multiple
single metric cv_results from single metric grid/random search"""
assert search_multi.multimetric_
assert_array_equal(sorted(search_multi.scorer_),
('accuracy', 'recall'))
cv_results_multi = search_multi.cv_results_
cv_results_acc_rec = {re.sub('_score$', '_accuracy', k): v
for k, v in search_acc.cv_results_.items()}
cv_results_acc_rec.update({re.sub('_score$', '_recall', k): v
for k, v in search_rec.cv_results_.items()})
# Check if score and timing are reasonable, also checks if the keys
# are present
assert all((np.all(cv_results_multi[k] <= 1) for k in (
'mean_score_time', 'std_score_time', 'mean_fit_time',
'std_fit_time')))
# Compare the keys, other than time keys, among multi-metric and
# single metric grid search results. np.testing.assert_equal performs a
# deep nested comparison of the two cv_results dicts
np.testing.assert_equal({k: v for k, v in cv_results_multi.items()
if not k.endswith('_time')},
{k: v for k, v in cv_results_acc_rec.items()
if not k.endswith('_time')})
def compare_refit_methods_when_refit_with_acc(search_multi, search_acc, refit):
"""Compare refit multi-metric search methods with single metric methods"""
assert search_acc.refit == refit
if refit:
assert search_multi.refit == 'accuracy'
else:
assert not search_multi.refit
return # search cannot predict/score without refit
X, y = make_blobs(n_samples=100, n_features=4, random_state=42)
for method in ('predict', 'predict_proba', 'predict_log_proba'):
assert_almost_equal(getattr(search_multi, method)(X),
getattr(search_acc, method)(X))
assert_almost_equal(search_multi.score(X, y), search_acc.score(X, y))
for key in ('best_index_', 'best_score_', 'best_params_'):
assert getattr(search_multi, key) == getattr(search_acc, key)
# FIXME remove test_fit_grid_point as the function will be removed on 0.25
# FIXME remove test_fit_grid_point_deprecated as
# fit_grid_point will be removed on 0.25
def test_fit_grid_point_deprecated():
X, y = make_classification(random_state=0)
svc = LinearSVC(random_state=0)
scorer = make_scorer(accuracy_score)
msg = ("fit_grid_point is deprecated in version 0.23 "
"and will be removed in version 0.25")
params = {'C': 0.1}
train, test = next(StratifiedKFold().split(X, y))
with pytest.warns(FutureWarning, match=msg):
fit_grid_point(X, y, svc, params, train, test, scorer, verbose=False)
def test_pickle():
# Test that a fit search can be pickled
clf = MockClassifier()
grid_search = GridSearchCV(clf, {'foo_param': [1, 2, 3]}, refit=True, cv=3)
grid_search.fit(X, y)
grid_search_pickled = pickle.loads(pickle.dumps(grid_search))
assert_array_almost_equal(grid_search.predict(X),
grid_search_pickled.predict(X))
random_search = RandomizedSearchCV(clf, {'foo_param': [1, 2, 3]},
refit=True, n_iter=3, cv=3)
random_search.fit(X, y)
random_search_pickled = pickle.loads(pickle.dumps(random_search))
assert_array_almost_equal(random_search.predict(X),
random_search_pickled.predict(X))
def test_grid_search_with_multioutput_data():
# Test search with multi-output estimator
X, y = make_multilabel_classification(return_indicator=True,
random_state=0)
est_parameters = {"max_depth": [1, 2, 3, 4]}
cv = KFold()
estimators = [DecisionTreeRegressor(random_state=0),
DecisionTreeClassifier(random_state=0)]
# Test with grid search cv
for est in estimators:
grid_search = GridSearchCV(est, est_parameters, cv=cv)
grid_search.fit(X, y)
res_params = grid_search.cv_results_['params']
for cand_i in range(len(res_params)):
est.set_params(**res_params[cand_i])
for i, (train, test) in enumerate(cv.split(X, y)):
est.fit(X[train], y[train])
correct_score = est.score(X[test], y[test])
assert_almost_equal(
correct_score,
grid_search.cv_results_['split%d_test_score' % i][cand_i])
# Test with a randomized search
for est in estimators:
random_search = RandomizedSearchCV(est, est_parameters,
cv=cv, n_iter=3)
random_search.fit(X, y)
res_params = random_search.cv_results_['params']
for cand_i in range(len(res_params)):
est.set_params(**res_params[cand_i])
for i, (train, test) in enumerate(cv.split(X, y)):
est.fit(X[train], y[train])
correct_score = est.score(X[test], y[test])
assert_almost_equal(
correct_score,
random_search.cv_results_['split%d_test_score'
% i][cand_i])
def test_search_cv__pairwise_property_delegated_to_base_estimator():
"""
Test implementation of BaseSearchCV has the _pairwise property
which matches the _pairwise property of its estimator.
This test make sure _pairwise is delegated to the base estimator.
Non-regression test for issue #13920.
"""
est = BaseEstimator()
attr_message = "BaseSearchCV _pairwise property must match estimator"
for _pairwise_setting in [True, False]:
setattr(est, '_pairwise', _pairwise_setting)
cv = GridSearchCV(est, {'n_neighbors': [10]})
assert _pairwise_setting == cv._pairwise, attr_message
def test_search_cv__pairwise_property_equivalence_of_precomputed():
"""
Test implementation of BaseSearchCV has the _pairwise property
which matches the _pairwise property of its estimator.
This test ensures the equivalence of 'precomputed'.
Non-regression test for issue #13920.
"""
n_samples = 50
n_splits = 2
X, y = make_classification(n_samples=n_samples, random_state=0)
grid_params = {'n_neighbors': [10]}
# defaults to euclidean metric (minkowski p = 2)
clf = KNeighborsClassifier()
cv = GridSearchCV(clf, grid_params, cv=n_splits)
cv.fit(X, y)
preds_original = cv.predict(X)
# precompute euclidean metric to validate _pairwise is working
X_precomputed = euclidean_distances(X)
clf = KNeighborsClassifier(metric='precomputed')
cv = GridSearchCV(clf, grid_params, cv=n_splits)
cv.fit(X_precomputed, y)
preds_precomputed = cv.predict(X_precomputed)
attr_message = "GridSearchCV not identical with precomputed metric"
assert (preds_original == preds_precomputed).all(), attr_message
| 38.18767 | 79 | 0.629904 |
8a20b188a03474ef2ca2b4263906e89597ed6c86 | 6,029 | py | Python | src/economy/migrations/0027_zettlebalance_zettlereceipt.py | bornhack/bornhack-website | 40ed0875f5129a4c8ae1887e33e7dedb4981dadc | [
"BSD-3-Clause"
] | 7 | 2017-04-14T15:28:29.000Z | 2021-09-10T09:45:38.000Z | src/economy/migrations/0027_zettlebalance_zettlereceipt.py | bornhack/bornhack-website | 40ed0875f5129a4c8ae1887e33e7dedb4981dadc | [
"BSD-3-Clause"
] | 799 | 2016-04-28T09:31:50.000Z | 2022-03-29T09:05:02.000Z | src/economy/migrations/0027_zettlebalance_zettlereceipt.py | bornhack/bornhack-website | 40ed0875f5129a4c8ae1887e33e7dedb4981dadc | [
"BSD-3-Clause"
] | 35 | 2016-04-28T09:23:53.000Z | 2021-05-02T12:36:01.000Z | # Generated by Django 3.2.7 on 2021-09-13 03:52
import uuid
from django.db import migrations, models
| 35.674556 | 137 | 0.379499 |
8a20b1d12635ada6c636b100e165021b86485320 | 2,854 | py | Python | main.py | vkumarma/Complete-Interpreter | 5ec15ea84b0e7e735328511cc504efa43638f720 | [
"MIT"
] | null | null | null | main.py | vkumarma/Complete-Interpreter | 5ec15ea84b0e7e735328511cc504efa43638f720 | [
"MIT"
] | null | null | null | main.py | vkumarma/Complete-Interpreter | 5ec15ea84b0e7e735328511cc504efa43638f720 | [
"MIT"
] | null | null | null | import re
import sys
input_file = open(str(sys.argv[1]), 'r') # Open file for reading
line = input_file.read()
# "if z then while x * 4 - 2 do skip endwhile else x := 7 endif; y := 1"
input_string = line.strip("\n")
lexer = Lexer(input_string)
hashtable = {}
tokens_list = []
new = [] # keeping track of current char.
curr_char = lexer.get_char()
while (curr_char != None):
while (curr_char == ' ' or curr_char == ''):
curr_char = lexer.get_char()
if (curr_char.isdigit()):
token_check(digit(curr_char, lexer))
curr_char = new.pop()
elif (curr_char.isalpha()):
token_check(longest_sub_string(curr_char, lexer))
curr_char = new.pop()
elif curr_char in "+-/*();":
token_check(symbol(curr_char, lexer))
curr_char = new.pop()
elif curr_char == ":":
token_check(assignment(curr_char, lexer))
curr_char = new.pop()
if curr_char == "=":
curr_char = lexer.get_char()
else:
token_check(curr_char)
curr_char = lexer.get_char()
# print(tokens_list)
# print(tokens())
| 23.983193 | 72 | 0.590049 |
8a20dee928bb3a353769ebc5d7c40156ab5eb131 | 306 | py | Python | deduplicate.py | Ghostofapacket/NewsGrabber-Deduplicate | 0b8152af2e1c6c87cf8540970f42084b96a99d9c | [
"Unlicense"
] | null | null | null | deduplicate.py | Ghostofapacket/NewsGrabber-Deduplicate | 0b8152af2e1c6c87cf8540970f42084b96a99d9c | [
"Unlicense"
] | null | null | null | deduplicate.py | Ghostofapacket/NewsGrabber-Deduplicate | 0b8152af2e1c6c87cf8540970f42084b96a99d9c | [
"Unlicense"
] | null | null | null | import sys
sys.path.append('/usr/local/lib/python3.4/site-packages/')
from warc_dedup import deduplicate
if __name__ == '__main__':
main()
| 20.4 | 68 | 0.679739 |
8a20fc9b93bd3fc7e19c79190d5875b049bc7526 | 4,136 | py | Python | build/lib/FinMesh/usgov/__init__.py | johnjdailey/FinMesh | 64048b02bfec1a24de840877b38e82f4fa813d22 | [
"MIT"
] | 1 | 2020-08-14T16:09:54.000Z | 2020-08-14T16:09:54.000Z | build/lib/FinMesh/usgov/__init__.py | johnjdailey/FinMesh | 64048b02bfec1a24de840877b38e82f4fa813d22 | [
"MIT"
] | null | null | null | build/lib/FinMesh/usgov/__init__.py | johnjdailey/FinMesh | 64048b02bfec1a24de840877b38e82f4fa813d22 | [
"MIT"
] | null | null | null | import os
import requests
import xmltodict
import csv
import json
# # # # # # # # # #
# FRED DATA BELOW #
# # # # # # # # # #
FRED_BASE_URL = 'https://api.stlouisfed.org/fred/'
GEOFRED_BASE_URL = 'https://api.stlouisfed.org/geofred/'
FRED_SERIES_OBS_URL = FRED_BASE_URL + 'series/observations?'
GEOFRED_SERIES_META_URL = GEOFRED_BASE_URL + 'series/group?'
GEOFRED_REGIONAL_SERIES_URL = GEOFRED_BASE_URL + 'series/data?'
# # # # # # # # # # # # # # # #
# GOVERNMENT YIELD CURVE DATA #
# # # # # # # # # # # # # # # #
GOV_YIELD_URL = 'https://data.treasury.gov/feed.svc/DailyTreasuryYieldCurveRateData?$filter=month(NEW_DATE)%20eq%204%20and%20year(NEW_DATE)%20eq%202019'
| 44 | 259 | 0.676499 |
8a22e67655b4062b0aecbc7e8062db32e1383d10 | 1,268 | py | Python | settings.py | Cradac/mattermost-octane-integration | 6a3cb4d2e0854cbf190f66467b604e6e4344a907 | [
"MIT"
] | null | null | null | settings.py | Cradac/mattermost-octane-integration | 6a3cb4d2e0854cbf190f66467b604e6e4344a907 | [
"MIT"
] | null | null | null | settings.py | Cradac/mattermost-octane-integration | 6a3cb4d2e0854cbf190f66467b604e6e4344a907 | [
"MIT"
] | null | null | null | '''
This is the Settings File for the Mattermost-Octane Bridge.
You can change various variables here to customize and set up the client.
'''
'''----------------------Mattermost Webhook Configuration----------------------'''
#URL of the webhook from mattermost. To create one go to `Main Menu -> Integrations -> Incoming Webhooks` and press `Add Incoming Webhook`
mm_webhook_url = 'http://localhost:8065/hooks/yuro8xrfeffj787cj1bwc4ziue'
#Override the channel to send the notifications to, use the channel name as a String
mm_channel = None
#Set a custom Username to display in Mattermost
mm_username = 'Defect Notification'
#Set a custom Profile Image for the Client
mm_profileimage = 'https://i.imgur.com/7Wg3Tgs.png' #Telekom T Image
#The latter two need to be enabled in the settings.json of the Mattermost server
'''----------------------------Flask Configuration----------------------------'''
#set external IP for the Flask Server to create a Webhook for ALM Octane
#local: 127.0.0.1 / False
#default external: 0.0.0.0 (will default to only available external adress)
external_ip = False
#default: 5000
port = 5000
#external webhook verify token can be set here, if set as `None` it will be autogenerated & changed on each startup.
wh_token = None
| 34.27027 | 138 | 0.706625 |
8a23dbe8e2d12d3a5feea4e2c5b3fc204f837e5d | 10 | py | Python | python/testData/console/indent7.after.py | jnthn/intellij-community | 8fa7c8a3ace62400c838e0d5926a7be106aa8557 | [
"Apache-2.0"
] | 2 | 2019-04-28T07:48:50.000Z | 2020-12-11T14:18:08.000Z | python/testData/console/indent7.after.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
] | 173 | 2018-07-05T13:59:39.000Z | 2018-08-09T01:12:03.000Z | python/testData/console/indent7.after.py | Cyril-lamirand/intellij-community | 60ab6c61b82fc761dd68363eca7d9d69663cfa39 | [
"Apache-2.0"
] | 2 | 2020-03-15T08:57:37.000Z | 2020-04-07T04:48:14.000Z | print(1) | 10 | 10 | 0.6 |
8a23e2e394242f4eca447da628bb8ac3e7fe2807 | 6,323 | py | Python | tools/aerial_detection.py | gfjiangly/AerialDetection | ee8a945c67c8e9ddef725900ac300d2d5a785e08 | [
"Apache-2.0"
] | null | null | null | tools/aerial_detection.py | gfjiangly/AerialDetection | ee8a945c67c8e9ddef725900ac300d2d5a785e08 | [
"Apache-2.0"
] | 1 | 2021-08-28T15:48:14.000Z | 2021-08-28T15:48:14.000Z | tools/aerial_detection.py | gfjiangly/AerialDetection | ee8a945c67c8e9ddef725900ac300d2d5a785e08 | [
"Apache-2.0"
] | null | null | null | # -*- encoding:utf-8 -*-
# @Time : 2021/1/3 15:15
# @Author : gfjiang
import os.path as osp
import mmcv
import numpy as np
import cvtools
import matplotlib.pyplot as plt
import cv2.cv2 as cv
from functools import partial
import torch
import math
from cvtools.utils.path import add_prefix_filename_suffix
from mmdet.ops import nms
from mmdet.apis import init_detector, inference_detector
def get_image_name_for_hook(module, work_dir='./'):
"""
Generate image filename for hook function
Parameters:
-----------
module: module of neural network
"""
# os.makedirs(work_dir, exist_ok=True)
module_name = str(module)
base_name = module_name.split('(')[0]
index = 0
image_name = '.' # '.' is surely exist, to make first loop condition True
while osp.exists(image_name):
index += 1
image_name = osp.join(
work_dir, 'feats', '%s_%d.png' % (base_name, index))
return image_name
if __name__ == '__main__':
config_file = 'configs/DOTA/faster_rcnn_RoITrans_r50_fpn_1x_dota_1gpus_mdanet2.py'
pth_file = 'work_dirs/faster_rcnn_RoITrans_r50_fpn_1x_dota_1gpus_mdanet2/epoch_12.pth'
detector = AerialDetectionOBB(config_file, pth_file)
detector('/media/data/DOTA/crop/P2701_2926_1597_3949_2620.png', vis=True,
save_root='work_dirs/attention_vis/')
detector.save_results('work_dirs/faster_rcnn_RoITrans_r50_fpn_1x_dota_1gpus_mdanet2/detect_result.txt')
| 36.33908 | 107 | 0.591966 |
8a248f7412b4e5841bfb0f9c54b8c6e82ae3813b | 19,715 | py | Python | tradingAPI/low_level.py | federico123579/Trading212-API | 0fab20b71a2348e72bbe76071b81f3692128851f | [
"MIT"
] | 44 | 2017-10-23T19:17:20.000Z | 2021-09-06T17:01:49.000Z | tradingAPI/low_level.py | federico123579/Trading212-API | 0fab20b71a2348e72bbe76071b81f3692128851f | [
"MIT"
] | 7 | 2017-09-05T09:51:16.000Z | 2020-05-17T11:23:27.000Z | tradingAPI/low_level.py | federico123579/Trading212-API | 0fab20b71a2348e72bbe76071b81f3692128851f | [
"MIT"
] | 18 | 2017-11-18T11:55:58.000Z | 2021-04-11T14:23:12.000Z | # -*- coding: utf-8 -*-
"""
tradingAPI.low_level
~~~~~~~~~~~~~~
This module provides the low level functions with the service.
"""
import time
import re
from datetime import datetime
from pyvirtualdisplay import Display
from bs4 import BeautifulSoup
from splinter import Browser
from .glob import Glob
from .links import path
from .utils import num, expect, get_pip
# exceptions
from tradingAPI import exceptions
import selenium.common.exceptions
# logging
import logging
logger = logging.getLogger('tradingAPI.low_level')
| 37.058271 | 79 | 0.526401 |